hexsha
stringlengths
40
40
size
int64
2
1.02M
ext
stringclasses
10 values
lang
stringclasses
1 value
max_stars_repo_path
stringlengths
4
245
max_stars_repo_name
stringlengths
6
130
max_stars_repo_head_hexsha
stringlengths
40
40
max_stars_repo_licenses
listlengths
1
10
max_stars_count
int64
1
191k
max_stars_repo_stars_event_min_datetime
stringlengths
24
24
max_stars_repo_stars_event_max_datetime
stringlengths
24
24
max_issues_repo_path
stringlengths
4
245
max_issues_repo_name
stringlengths
6
130
max_issues_repo_head_hexsha
stringlengths
40
40
max_issues_repo_licenses
listlengths
1
10
max_issues_count
int64
1
67k
max_issues_repo_issues_event_min_datetime
stringlengths
24
24
max_issues_repo_issues_event_max_datetime
stringlengths
24
24
max_forks_repo_path
stringlengths
4
245
max_forks_repo_name
stringlengths
6
130
max_forks_repo_head_hexsha
stringlengths
40
40
max_forks_repo_licenses
listlengths
1
10
max_forks_count
int64
1
105k
max_forks_repo_forks_event_min_datetime
stringlengths
24
24
max_forks_repo_forks_event_max_datetime
stringlengths
24
24
content
stringlengths
2
1.02M
avg_line_length
float64
1
958k
max_line_length
int64
1
987k
alphanum_fraction
float64
0
1
content_no_comment
stringlengths
0
1.01M
is_comment_constant_removed
bool
2 classes
is_sharp_comment_removed
bool
1 class
f70108704eabc1b138d4f8106e24514584746298
10,157
py
Python
NeuroEvolution/evolution.py
berkott/SpaceInvadersAI
0d1d095f60b06f09b337bd3abf7bb46a08a8ed70
[ "Apache-2.0" ]
4
2019-04-12T09:46:58.000Z
2021-08-09T16:30:26.000Z
NeuroEvolution/evolution.py
berkott/SpaceInvadersAI
0d1d095f60b06f09b337bd3abf7bb46a08a8ed70
[ "Apache-2.0" ]
1
2018-08-13T23:00:46.000Z
2018-08-13T23:00:46.000Z
NeuroEvolution/evolution.py
berkott/SpaceInvadersAI
0d1d095f60b06f09b337bd3abf7bb46a08a8ed70
[ "Apache-2.0" ]
2
2019-12-28T08:50:55.000Z
2021-11-26T02:09:37.000Z
import gym import keras as k from keras.models import Sequential from keras.layers import Conv2D, Activation, MaxPooling2D, Flatten, Dense, Dropout from keras.optimizers import Adam import numpy as np from datetime import datetime from matplotlib import pyplot as PLT import time import csv import os # You can adjust these hyperparameters POPULATION_SIZE = 50 L1=20 L2=10 L3=50 L4=4 # L1=2 # L2=3 # L3=4 # L4=5 POOLING_SIZE = (2,2) FILTER_SIZE_1 = (3,3) FILTER_SIZE_2 = (5,5) ELITE_SET_SIZE = 5 MUTATION_RATE = 0.5 FRAME_SIZE = 210*160*1 INPUT_DIM = 2*FRAME_SIZE INPUT_SHAPE = (210, 160, 2) FINAL_DIMENSION_X = int(((INPUT_SHAPE[0] - 2*int(FILTER_SIZE_1[0]/2))/2 - 2*int(FILTER_SIZE_2[0]/2))/2) FINAL_DIMENSION_Y = int(((INPUT_SHAPE[1] - 2*int(FILTER_SIZE_1[0]/2))/2 - 2*int(FILTER_SIZE_2[0]/2))/2) env = gym.make('SpaceInvaders-v0') keepTraining = True slack_logs = np.zeros((6,1)) def visualize(featureVector): regularImage = featureVector[0,:FRAME_SIZE].reshape((210,160)) differenceImage = featureVector[0,FRAME_SIZE:].reshape((210,160)) PLT.imshow(regularImage) PLT.show() PLT.imshow(differenceImage) PLT.show() def writeCsv(index, data): slack_logs[index] = data # For slack_logs: # [0] Generation # [1] Highest Score # [2] Current Score # [3] Games Played # [4] Start Time # [5] All Time High Score with open("logs.csv", "w", newline='') as csv_file: writer = csv.writer(csv_file, delimiter=',') writer.writerows(slack_logs) def calculatePolicySize(): # INPUT_DIM * L1+L1+L1 * L2+L2+L2 * L3+L3+L3 * L4+L4 # FILTER_SIZE_1[0] * FILTER_SIZE_1[1] * INPUT_SHAPE[2] * L1 + L1 + # FILTER_SIZE_1[0] * FILTER_SIZE_1[1] * L1 * L2 + L2 + # final_dimension_x*final_dimension_y*L2*L3 + L3 + # L3*L4 return FILTER_SIZE_1[0] * FILTER_SIZE_1[1] * INPUT_SHAPE[2] * L1 + L1 + FILTER_SIZE_1[0] * FILTER_SIZE_1[1] * L1 * L2 + L2 + FINAL_DIMENSION_X*FINAL_DIMENSION_Y*L2*L3 + L3 + L3 * L4 + L4 # This function is called each time a new memeber of the population is created def initPopulation(): population = np.random.rand(POPULATION_SIZE, calculatePolicySize()) population = population*2-1 return population def convert_prediction_to_action(prediction): index = np.argmax(prediction[0]) # NOOP if (index == 0): return 0 # FIRE elif (index == 1): return 1 # RIGHT elif (index == 2): return 3 # LEFT elif (index == 3): return 4 return 0 def playGame(model): score=0 done=False action=0 frame = np.zeros((1,FRAME_SIZE)) previous_frame = np.zeros((1,FRAME_SIZE)) env.reset() observation_dim = list(INPUT_SHAPE) observation_dim.insert(0,1) observation_dim = tuple(observation_dim) while not done: env.render() observation, reward, done, _ = env.step(action) frame = np.reshape(observation[:,:,0],(1,FRAME_SIZE)) frame = np.where(frame > 0, 1.0,0) difference = frame-previous_frame final_observation=np.zeros((1,INPUT_DIM)) final_observation[0,:FRAME_SIZE]=frame final_observation[0,FRAME_SIZE:]=difference final_observation = np.reshape(final_observation, observation_dim) prediction = model.predict(final_observation) action = convert_prediction_to_action(prediction) score+=reward writeCsv(2, score) previous_frame = np.copy(frame) # print("Score:",score) return score # This is where the weights are put into the neural net to see how well it goes def evaluate(dnnmodel, population, gamesPlayed): scores=np.zeros(POPULATION_SIZE) for i in range(POPULATION_SIZE): nnFormatPolicyVector = applyPolicyVectorToNN(population[i]) dnnmodel.set_weights(nnFormatPolicyVector) scores[i] = playGame(dnnmodel) gamesPlayed+=1 writeCsv(3, gamesPlayed) return scores # Constructs the model that is to be used def buildModel(): model = Sequential() # layer1=Dense(L1, activation = 'relu', input_dim = INPUT_DIM, kernel_initializer='uniform') layer1=Conv2D(L1, FILTER_SIZE_1, activation='relu', input_shape = INPUT_SHAPE, kernel_initializer='uniform') model.add(layer1) model.add(MaxPooling2D(pool_size=POOLING_SIZE)) layer2=Conv2D(L2, FILTER_SIZE_2, activation='relu', kernel_initializer='uniform') model.add(layer2) model.add(MaxPooling2D(pool_size=POOLING_SIZE)) # model.add(Dropout(0.25)) model.add(Flatten()) layer3=Dense(L3, activation = 'relu', kernel_initializer='uniform') model.add(layer3) layer4=Dense(L4, activation ='softmax', kernel_initializer='uniform') model.add(layer4) adam = Adam(lr=0.01) model.compile(loss='mean_squared_error', optimizer=adam) weights=model.get_weights() print(len(weights)) print("====================================") return model def applyPolicyVectorToNN(policyVector): # INPUT_DIM * L1+L1+L1 * L2+L2+L2 * L3+L3+L3 * L4+L4 # FILTER_SIZE_1[0] * FILTER_SIZE_1[1] * INPUT_SHAPE[2] * L1 + L1 + # FILTER_SIZE_1[0] * FILTER_SIZE_1[1] * L1 * L2 + L2 + # final_dimension_x*final_dimension_y*L2*L3 + L3 + # L3*L4 offset=FILTER_SIZE_1[0] * FILTER_SIZE_1[1] * INPUT_SHAPE[2] * L1 sec1 = policyVector[:offset].reshape(FILTER_SIZE_1[0], FILTER_SIZE_1[1], INPUT_SHAPE[2], L1) sec2 = policyVector[offset:offset+L1] offset+=L1 sec3 = policyVector[offset:offset+FILTER_SIZE_2[0] * FILTER_SIZE_2[1] * L1 * L2].reshape(FILTER_SIZE_2[0], FILTER_SIZE_2[1], L1, L2) offset+=FILTER_SIZE_1[0] * FILTER_SIZE_1[1] * L1 * L2 sec4 = policyVector[offset:offset+L2] offset+=L2 sec5 = policyVector[offset:offset+FINAL_DIMENSION_X*FINAL_DIMENSION_Y*L2*L3].reshape(FINAL_DIMENSION_X*FINAL_DIMENSION_Y*L2, L3) offset+=FINAL_DIMENSION_X*FINAL_DIMENSION_Y*L2*L3 sec6 = policyVector[offset:offset+L3] offset+=L3 sec7 = policyVector[offset:offset+L3*L4].reshape(L3, L4) offset+=L3*L4 sec8 = policyVector[offset:] nnFormat = [] nnFormat.append(sec1) nnFormat.append(sec2) nnFormat.append(sec3) nnFormat.append(sec4) nnFormat.append(sec5) nnFormat.append(sec6) nnFormat.append(sec7) nnFormat.append(sec8) return nnFormat # This is where the members of the population are ranked def selection(scores, population): eliteSet = np.zeros((ELITE_SET_SIZE,calculatePolicySize())) scoresTemp=np.copy(scores) for i in range(ELITE_SET_SIZE): index = np.argmax(scoresTemp) scoresTemp[index] = 0 eliteSet[i] = population[index] return eliteSet def cross(policy1, policy2): newPolicy = policy1.copy() mask = np.random.randint(2, size=newPolicy.shape).astype(np.bool) newPolicy[mask] = policy2[mask] # for i in range(calculatePolicySize()): # rand = np.random.uniform() # if rand > 0.5: # newPolicy[i] = policy2[i] return newPolicy # This is where crossover occurs based on the selection process def crossover(scores, population): crossoverSet = np.zeros((POPULATION_SIZE,calculatePolicySize())) selectionProbability = np.array(scores)/np.sum(scores) for i in range(POPULATION_SIZE - ELITE_SET_SIZE): randomIndex = np.random.choice(range(POPULATION_SIZE), p=selectionProbability) policy1 = population[randomIndex] randomIndex = np.random.choice(range(POPULATION_SIZE), p=selectionProbability) policy2 = population[randomIndex] newPolicy = cross(policy1, policy2) crossoverSet[i]=newPolicy return crossoverSet # Lastly, the mutation is a point mutation that sometimes occurs def mutation(crossoverPopulation): i = int((POPULATION_SIZE - ELITE_SET_SIZE) * np.random.random_sample()) j = int(calculatePolicySize() * np.random.random_sample()) for _ in range(int(i*j*MUTATION_RATE)): crossoverPopulation[i][j] = np.random.random_sample() * 2 - 1 # for i in range(POPULATION_SIZE - ELITE_SET_SIZE): # for j in range(calculatePolicySize()): # rand = np.random.uniform() # if(rand < MUTATION_RATE): # crossoverPopulation[i][j] = np.random.random_sample() * 2 - 1 return crossoverPopulation def generateNewGeneration(scores, population): elitePopulation = selection(scores, population) crossoverPopulation = crossover(scores, population) mutationPopulation = mutation(crossoverPopulation) for i in range(ELITE_SET_SIZE): mutationPopulation[POPULATION_SIZE-ELITE_SET_SIZE+i] = elitePopulation[i] return mutationPopulation def saveHighestScorePolicy(population, generation, scores): if (generation % 10 == 0): index = np.argmax(scores) filename='generation'+str(generation)+'HS'+str(scores[index])+'.npy' np.save(os.path.join('SavedScores', filename) ,population[index]) print("Saved generation to file "+filename) def loadPolicy(filename, population, index): policy=np.load(filename) print("Loaded\n",policy) population[index]=policy def measureTime(): global lasttime currentTime=time.time() diff=currentTime-lasttime lasttime=currentTime return diff # test_selection() # quit() env.reset() population = initPopulation() # loadPolicy('generation0.npy',population,0) dnnmodel = buildModel() generation = 0 lasttime = time.time() all_time_high_score = 0 writeCsv(4, time.time()) while (keepTraining): scores = evaluate(dnnmodel, population, generation*POPULATION_SIZE) print(int(measureTime())," sec Generation: ", generation, " Highest Score: ", np.max(scores), " Games Played: ", generation*POPULATION_SIZE+POPULATION_SIZE) writeCsv(0, generation) writeCsv(1, np.max(scores)) if (np.max(scores) > all_time_high_score): all_time_high_score = np.max(scores) writeCsv(5, all_time_high_score) saveHighestScorePolicy(population, generation, scores) population = generateNewGeneration(scores, population) print(int(measureTime())," sec New generation created.") generation+=1
33.744186
190
0.686915
import gym import keras as k from keras.models import Sequential from keras.layers import Conv2D, Activation, MaxPooling2D, Flatten, Dense, Dropout from keras.optimizers import Adam import numpy as np from datetime import datetime from matplotlib import pyplot as PLT import time import csv import os POPULATION_SIZE = 50 L1=20 L2=10 L3=50 L4=4 POOLING_SIZE = (2,2) FILTER_SIZE_1 = (3,3) FILTER_SIZE_2 = (5,5) ELITE_SET_SIZE = 5 MUTATION_RATE = 0.5 FRAME_SIZE = 210*160*1 INPUT_DIM = 2*FRAME_SIZE INPUT_SHAPE = (210, 160, 2) FINAL_DIMENSION_X = int(((INPUT_SHAPE[0] - 2*int(FILTER_SIZE_1[0]/2))/2 - 2*int(FILTER_SIZE_2[0]/2))/2) FINAL_DIMENSION_Y = int(((INPUT_SHAPE[1] - 2*int(FILTER_SIZE_1[0]/2))/2 - 2*int(FILTER_SIZE_2[0]/2))/2) env = gym.make('SpaceInvaders-v0') keepTraining = True slack_logs = np.zeros((6,1)) def visualize(featureVector): regularImage = featureVector[0,:FRAME_SIZE].reshape((210,160)) differenceImage = featureVector[0,FRAME_SIZE:].reshape((210,160)) PLT.imshow(regularImage) PLT.show() PLT.imshow(differenceImage) PLT.show() def writeCsv(index, data): slack_logs[index] = data with open("logs.csv", "w", newline='') as csv_file: writer = csv.writer(csv_file, delimiter=',') writer.writerows(slack_logs) def calculatePolicySize(): return FILTER_SIZE_1[0] * FILTER_SIZE_1[1] * INPUT_SHAPE[2] * L1 + L1 + FILTER_SIZE_1[0] * FILTER_SIZE_1[1] * L1 * L2 + L2 + FINAL_DIMENSION_X*FINAL_DIMENSION_Y*L2*L3 + L3 + L3 * L4 + L4 def initPopulation(): population = np.random.rand(POPULATION_SIZE, calculatePolicySize()) population = population*2-1 return population def convert_prediction_to_action(prediction): index = np.argmax(prediction[0]) if (index == 0): return 0 elif (index == 1): return 1 elif (index == 2): return 3 elif (index == 3): return 4 return 0 def playGame(model): score=0 done=False action=0 frame = np.zeros((1,FRAME_SIZE)) previous_frame = np.zeros((1,FRAME_SIZE)) env.reset() observation_dim = list(INPUT_SHAPE) observation_dim.insert(0,1) observation_dim = tuple(observation_dim) while not done: env.render() observation, reward, done, _ = env.step(action) frame = np.reshape(observation[:,:,0],(1,FRAME_SIZE)) frame = np.where(frame > 0, 1.0,0) difference = frame-previous_frame final_observation=np.zeros((1,INPUT_DIM)) final_observation[0,:FRAME_SIZE]=frame final_observation[0,FRAME_SIZE:]=difference final_observation = np.reshape(final_observation, observation_dim) prediction = model.predict(final_observation) action = convert_prediction_to_action(prediction) score+=reward writeCsv(2, score) previous_frame = np.copy(frame) return score def evaluate(dnnmodel, population, gamesPlayed): scores=np.zeros(POPULATION_SIZE) for i in range(POPULATION_SIZE): nnFormatPolicyVector = applyPolicyVectorToNN(population[i]) dnnmodel.set_weights(nnFormatPolicyVector) scores[i] = playGame(dnnmodel) gamesPlayed+=1 writeCsv(3, gamesPlayed) return scores def buildModel(): model = Sequential() layer1=Conv2D(L1, FILTER_SIZE_1, activation='relu', input_shape = INPUT_SHAPE, kernel_initializer='uniform') model.add(layer1) model.add(MaxPooling2D(pool_size=POOLING_SIZE)) layer2=Conv2D(L2, FILTER_SIZE_2, activation='relu', kernel_initializer='uniform') model.add(layer2) model.add(MaxPooling2D(pool_size=POOLING_SIZE)) model.add(Flatten()) layer3=Dense(L3, activation = 'relu', kernel_initializer='uniform') model.add(layer3) layer4=Dense(L4, activation ='softmax', kernel_initializer='uniform') model.add(layer4) adam = Adam(lr=0.01) model.compile(loss='mean_squared_error', optimizer=adam) weights=model.get_weights() print(len(weights)) print("====================================") return model def applyPolicyVectorToNN(policyVector): offset=FILTER_SIZE_1[0] * FILTER_SIZE_1[1] * INPUT_SHAPE[2] * L1 sec1 = policyVector[:offset].reshape(FILTER_SIZE_1[0], FILTER_SIZE_1[1], INPUT_SHAPE[2], L1) sec2 = policyVector[offset:offset+L1] offset+=L1 sec3 = policyVector[offset:offset+FILTER_SIZE_2[0] * FILTER_SIZE_2[1] * L1 * L2].reshape(FILTER_SIZE_2[0], FILTER_SIZE_2[1], L1, L2) offset+=FILTER_SIZE_1[0] * FILTER_SIZE_1[1] * L1 * L2 sec4 = policyVector[offset:offset+L2] offset+=L2 sec5 = policyVector[offset:offset+FINAL_DIMENSION_X*FINAL_DIMENSION_Y*L2*L3].reshape(FINAL_DIMENSION_X*FINAL_DIMENSION_Y*L2, L3) offset+=FINAL_DIMENSION_X*FINAL_DIMENSION_Y*L2*L3 sec6 = policyVector[offset:offset+L3] offset+=L3 sec7 = policyVector[offset:offset+L3*L4].reshape(L3, L4) offset+=L3*L4 sec8 = policyVector[offset:] nnFormat = [] nnFormat.append(sec1) nnFormat.append(sec2) nnFormat.append(sec3) nnFormat.append(sec4) nnFormat.append(sec5) nnFormat.append(sec6) nnFormat.append(sec7) nnFormat.append(sec8) return nnFormat def selection(scores, population): eliteSet = np.zeros((ELITE_SET_SIZE,calculatePolicySize())) scoresTemp=np.copy(scores) for i in range(ELITE_SET_SIZE): index = np.argmax(scoresTemp) scoresTemp[index] = 0 eliteSet[i] = population[index] return eliteSet def cross(policy1, policy2): newPolicy = policy1.copy() mask = np.random.randint(2, size=newPolicy.shape).astype(np.bool) newPolicy[mask] = policy2[mask] return newPolicy def crossover(scores, population): crossoverSet = np.zeros((POPULATION_SIZE,calculatePolicySize())) selectionProbability = np.array(scores)/np.sum(scores) for i in range(POPULATION_SIZE - ELITE_SET_SIZE): randomIndex = np.random.choice(range(POPULATION_SIZE), p=selectionProbability) policy1 = population[randomIndex] randomIndex = np.random.choice(range(POPULATION_SIZE), p=selectionProbability) policy2 = population[randomIndex] newPolicy = cross(policy1, policy2) crossoverSet[i]=newPolicy return crossoverSet def mutation(crossoverPopulation): i = int((POPULATION_SIZE - ELITE_SET_SIZE) * np.random.random_sample()) j = int(calculatePolicySize() * np.random.random_sample()) for _ in range(int(i*j*MUTATION_RATE)): crossoverPopulation[i][j] = np.random.random_sample() * 2 - 1 return crossoverPopulation def generateNewGeneration(scores, population): elitePopulation = selection(scores, population) crossoverPopulation = crossover(scores, population) mutationPopulation = mutation(crossoverPopulation) for i in range(ELITE_SET_SIZE): mutationPopulation[POPULATION_SIZE-ELITE_SET_SIZE+i] = elitePopulation[i] return mutationPopulation def saveHighestScorePolicy(population, generation, scores): if (generation % 10 == 0): index = np.argmax(scores) filename='generation'+str(generation)+'HS'+str(scores[index])+'.npy' np.save(os.path.join('SavedScores', filename) ,population[index]) print("Saved generation to file "+filename) def loadPolicy(filename, population, index): policy=np.load(filename) print("Loaded\n",policy) population[index]=policy def measureTime(): global lasttime currentTime=time.time() diff=currentTime-lasttime lasttime=currentTime return diff env.reset() population = initPopulation() dnnmodel = buildModel() generation = 0 lasttime = time.time() all_time_high_score = 0 writeCsv(4, time.time()) while (keepTraining): scores = evaluate(dnnmodel, population, generation*POPULATION_SIZE) print(int(measureTime())," sec Generation: ", generation, " Highest Score: ", np.max(scores), " Games Played: ", generation*POPULATION_SIZE+POPULATION_SIZE) writeCsv(0, generation) writeCsv(1, np.max(scores)) if (np.max(scores) > all_time_high_score): all_time_high_score = np.max(scores) writeCsv(5, all_time_high_score) saveHighestScorePolicy(population, generation, scores) population = generateNewGeneration(scores, population) print(int(measureTime())," sec New generation created.") generation+=1
true
true
f70109e1cbe5617c07cf5ee77cb41dc2d8d444c4
4,657
py
Python
ssd/modeling/backbone/vgg.py
BeibinLi/SSD
2cd30f02c21b0a8731a34dca2a89d6e099ca3442
[ "MIT" ]
null
null
null
ssd/modeling/backbone/vgg.py
BeibinLi/SSD
2cd30f02c21b0a8731a34dca2a89d6e099ca3442
[ "MIT" ]
null
null
null
ssd/modeling/backbone/vgg.py
BeibinLi/SSD
2cd30f02c21b0a8731a34dca2a89d6e099ca3442
[ "MIT" ]
null
null
null
import torch.nn as nn import torch.nn.functional as F from ssd.layers import L2Norm from ssd.modeling import registry from ssd.utils.model_zoo import load_state_dict_from_url model_urls = { 'vgg': 'https://s3.amazonaws.com/amdegroot-models/vgg16_reducedfc.pth', } # borrowed from https://github.com/amdegroot/ssd.pytorch/blob/master/ssd.py def add_vgg(cfg, batch_norm=False): layers = [] in_channels = 3 for v in cfg: if v == 'M': layers += [nn.MaxPool2d(kernel_size=2, stride=2)] elif v == 'C': layers += [nn.MaxPool2d(kernel_size=2, stride=2, ceil_mode=True)] else: conv2d = nn.Conv2d(in_channels, v, kernel_size=3, padding=1) if batch_norm: layers += [conv2d, nn.BatchNorm2d(v), nn.ReLU(inplace=True)] else: layers += [conv2d, nn.ReLU(inplace=True)] in_channels = v pool5 = nn.MaxPool2d(kernel_size=3, stride=1, padding=1) conv6 = nn.Conv2d(512, 1024, kernel_size=3, padding=6, dilation=6) conv7 = nn.Conv2d(1024, 1024, kernel_size=1) layers += [pool5, conv6, nn.ReLU(inplace=True), conv7, nn.ReLU(inplace=True)] return layers def add_extras(cfg, i, size=300): # Extra layers added to VGG for feature scaling layers = [] in_channels = i flag = False for k, v in enumerate(cfg): if in_channels != 'S': if v == 'S': layers += [nn.Conv2d(in_channels, cfg[k + 1], kernel_size=(1, 3)[flag], stride=2, padding=1)] else: layers += [nn.Conv2d(in_channels, v, kernel_size=(1, 3)[flag])] flag = not flag in_channels = v if size == 512: layers.append(nn.Conv2d(in_channels, 128, kernel_size=1, stride=1)) layers.append(nn.Conv2d(128, 256, kernel_size=4, stride=1, padding=1)) return layers def add_header(vgg, extra_layers, boxes_per_location, num_classes): regression_headers = [] classification_headers = [] vgg_source = [21, -2] for k, v in enumerate(vgg_source): regression_headers += [nn.Conv2d(vgg[v].out_channels, boxes_per_location[k] * 4, kernel_size=3, padding=1)] classification_headers += [nn.Conv2d(vgg[v].out_channels, boxes_per_location[k] * num_classes, kernel_size=3, padding=1)] for k, v in enumerate(extra_layers[1::2], 2): regression_headers += [nn.Conv2d(v.out_channels, boxes_per_location[k] * 4, kernel_size=3, padding=1)] classification_headers += [nn.Conv2d(v.out_channels, boxes_per_location[k] * num_classes, kernel_size=3, padding=1)] return regression_headers, classification_headers vgg_base = { '300': [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 'C', 512, 512, 512, 'M', 512, 512, 512], '512': [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 'C', 512, 512, 512, 'M', 512, 512, 512], } extras_base = { '300': [256, 'S', 512, 128, 'S', 256, 128, 256, 128, 256], '512': [256, 'S', 512, 128, 'S', 256, 128, 'S', 256, 128, 'S', 256], } class VGG(nn.Module): def __init__(self, cfg): super().__init__() size = cfg.INPUT.IMAGE_SIZE vgg_config = vgg_base[str(size)] extras_config = extras_base[str(size)] self.vgg = nn.ModuleList(add_vgg(vgg_config)) self.extras = nn.ModuleList(add_extras(extras_config, i=1024, size=size)) self.l2_norm = L2Norm(512, scale=20) self.reset_parameters() def reset_parameters(self): for m in self.extras.modules(): if isinstance(m, nn.Conv2d): nn.init.xavier_uniform_(m.weight) nn.init.zeros_(m.bias) def init_from_pretrain(self, state_dict): self.vgg.load_state_dict(state_dict) def forward(self, x): features = [] for i in range(23): x = self.vgg[i](x) s = self.l2_norm(x) # Conv4_3 L2 normalization features.append(s) # apply vgg up to fc7 for i in range(23, len(self.vgg)): x = self.vgg[i](x) features.append(x) for k, v in enumerate(self.extras): x = F.relu(v(x), inplace=True) if k % 2 == 1: features.append(x) return tuple(features) @registry.BACKBONES.register('vgg') def vgg(cfg, pretrained=True): model = VGG(cfg) if pretrained: model.init_from_pretrain(load_state_dict_from_url(model_urls['vgg'])) return model
35.280303
109
0.582564
import torch.nn as nn import torch.nn.functional as F from ssd.layers import L2Norm from ssd.modeling import registry from ssd.utils.model_zoo import load_state_dict_from_url model_urls = { 'vgg': 'https://s3.amazonaws.com/amdegroot-models/vgg16_reducedfc.pth', } def add_vgg(cfg, batch_norm=False): layers = [] in_channels = 3 for v in cfg: if v == 'M': layers += [nn.MaxPool2d(kernel_size=2, stride=2)] elif v == 'C': layers += [nn.MaxPool2d(kernel_size=2, stride=2, ceil_mode=True)] else: conv2d = nn.Conv2d(in_channels, v, kernel_size=3, padding=1) if batch_norm: layers += [conv2d, nn.BatchNorm2d(v), nn.ReLU(inplace=True)] else: layers += [conv2d, nn.ReLU(inplace=True)] in_channels = v pool5 = nn.MaxPool2d(kernel_size=3, stride=1, padding=1) conv6 = nn.Conv2d(512, 1024, kernel_size=3, padding=6, dilation=6) conv7 = nn.Conv2d(1024, 1024, kernel_size=1) layers += [pool5, conv6, nn.ReLU(inplace=True), conv7, nn.ReLU(inplace=True)] return layers def add_extras(cfg, i, size=300): layers = [] in_channels = i flag = False for k, v in enumerate(cfg): if in_channels != 'S': if v == 'S': layers += [nn.Conv2d(in_channels, cfg[k + 1], kernel_size=(1, 3)[flag], stride=2, padding=1)] else: layers += [nn.Conv2d(in_channels, v, kernel_size=(1, 3)[flag])] flag = not flag in_channels = v if size == 512: layers.append(nn.Conv2d(in_channels, 128, kernel_size=1, stride=1)) layers.append(nn.Conv2d(128, 256, kernel_size=4, stride=1, padding=1)) return layers def add_header(vgg, extra_layers, boxes_per_location, num_classes): regression_headers = [] classification_headers = [] vgg_source = [21, -2] for k, v in enumerate(vgg_source): regression_headers += [nn.Conv2d(vgg[v].out_channels, boxes_per_location[k] * 4, kernel_size=3, padding=1)] classification_headers += [nn.Conv2d(vgg[v].out_channels, boxes_per_location[k] * num_classes, kernel_size=3, padding=1)] for k, v in enumerate(extra_layers[1::2], 2): regression_headers += [nn.Conv2d(v.out_channels, boxes_per_location[k] * 4, kernel_size=3, padding=1)] classification_headers += [nn.Conv2d(v.out_channels, boxes_per_location[k] * num_classes, kernel_size=3, padding=1)] return regression_headers, classification_headers vgg_base = { '300': [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 'C', 512, 512, 512, 'M', 512, 512, 512], '512': [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 'C', 512, 512, 512, 'M', 512, 512, 512], } extras_base = { '300': [256, 'S', 512, 128, 'S', 256, 128, 256, 128, 256], '512': [256, 'S', 512, 128, 'S', 256, 128, 'S', 256, 128, 'S', 256], } class VGG(nn.Module): def __init__(self, cfg): super().__init__() size = cfg.INPUT.IMAGE_SIZE vgg_config = vgg_base[str(size)] extras_config = extras_base[str(size)] self.vgg = nn.ModuleList(add_vgg(vgg_config)) self.extras = nn.ModuleList(add_extras(extras_config, i=1024, size=size)) self.l2_norm = L2Norm(512, scale=20) self.reset_parameters() def reset_parameters(self): for m in self.extras.modules(): if isinstance(m, nn.Conv2d): nn.init.xavier_uniform_(m.weight) nn.init.zeros_(m.bias) def init_from_pretrain(self, state_dict): self.vgg.load_state_dict(state_dict) def forward(self, x): features = [] for i in range(23): x = self.vgg[i](x) s = self.l2_norm(x) features.append(s) for i in range(23, len(self.vgg)): x = self.vgg[i](x) features.append(x) for k, v in enumerate(self.extras): x = F.relu(v(x), inplace=True) if k % 2 == 1: features.append(x) return tuple(features) @registry.BACKBONES.register('vgg') def vgg(cfg, pretrained=True): model = VGG(cfg) if pretrained: model.init_from_pretrain(load_state_dict_from_url(model_urls['vgg'])) return model
true
true
f7010a582d05fcd97300b6f85b1ecb9b1d89f6e1
516
py
Python
sfa/custom_function.py
erpsonic/en_sfa
2b5f52ec063571c30e147610ceb4d8baa76121e4
[ "MIT" ]
null
null
null
sfa/custom_function.py
erpsonic/en_sfa
2b5f52ec063571c30e147610ceb4d8baa76121e4
[ "MIT" ]
null
null
null
sfa/custom_function.py
erpsonic/en_sfa
2b5f52ec063571c30e147610ceb4d8baa76121e4
[ "MIT" ]
null
null
null
from __future__ import unicode_literals import frappe def set_default_address(doc,method): if doc.is_primary_address: for row in doc.links: if row.link_doctype=="Customer": cust = frappe.get_doc("Customer",row.link_name) cust.default_address=doc.name cust.save() def set_default_contact(doc,method): if doc.is_primary_contact: for row in doc.links: if row.link_doctype=="Customer": cust = frappe.get_doc("Customer",row.link_name) cust.default_contact_person=doc.name cust.save()
28.666667
51
0.75
from __future__ import unicode_literals import frappe def set_default_address(doc,method): if doc.is_primary_address: for row in doc.links: if row.link_doctype=="Customer": cust = frappe.get_doc("Customer",row.link_name) cust.default_address=doc.name cust.save() def set_default_contact(doc,method): if doc.is_primary_contact: for row in doc.links: if row.link_doctype=="Customer": cust = frappe.get_doc("Customer",row.link_name) cust.default_contact_person=doc.name cust.save()
true
true
f7010a60f3944ab782eeb8876e4d862fb8afe60d
1,026
py
Python
src/core/Nav/Nav.py
airportmarc/bondy
de574b69d33576359b089471406f6447cd4a87df
[ "MIT" ]
null
null
null
src/core/Nav/Nav.py
airportmarc/bondy
de574b69d33576359b089471406f6447cd4a87df
[ "MIT" ]
null
null
null
src/core/Nav/Nav.py
airportmarc/bondy
de574b69d33576359b089471406f6447cd4a87df
[ "MIT" ]
null
null
null
class MenuItem(object): TEXT_NAME = 'name' TEXT_URL = 'url_name' TEXT_SUBMENU = 'submenu' def __init__(self, name, url=None, *args): super(MenuItem, self).__init__() self.name = name self.url = url self.url_args = args self.sub_menu = [] def add_sub_menu_item(self, name, url): item = {self.TEXT_NAME: name, self.TEXT_URL: url} self.sub_menu.append(item) def __getitem__(self, key): return self[key] def to_text(self): output = {} output[self.TEXT_NAME] = self.name if self.url: output[self.TEXT_URL] = self.url if self.sub_menu: output[self.TEXT_SUBMENU] = self.sub_menu return output class Nav: def __init__(self, *args, **kwargs): self.menu = [] def add_menu(self, menu): self.menu.append(menu) def get_menu_list(self): output = [] for x in self.menu: output.append(x.to_text()) return output
23.318182
57
0.57115
class MenuItem(object): TEXT_NAME = 'name' TEXT_URL = 'url_name' TEXT_SUBMENU = 'submenu' def __init__(self, name, url=None, *args): super(MenuItem, self).__init__() self.name = name self.url = url self.url_args = args self.sub_menu = [] def add_sub_menu_item(self, name, url): item = {self.TEXT_NAME: name, self.TEXT_URL: url} self.sub_menu.append(item) def __getitem__(self, key): return self[key] def to_text(self): output = {} output[self.TEXT_NAME] = self.name if self.url: output[self.TEXT_URL] = self.url if self.sub_menu: output[self.TEXT_SUBMENU] = self.sub_menu return output class Nav: def __init__(self, *args, **kwargs): self.menu = [] def add_menu(self, menu): self.menu.append(menu) def get_menu_list(self): output = [] for x in self.menu: output.append(x.to_text()) return output
true
true
f7010ac3c7a794ae315e8b6b29a246ee58f835ee
1,907
py
Python
oiasg_base/lib/game.py
will7101/OIASG
44badff57689da99a2c9896d176b32e7b51d42b5
[ "BSD-3-Clause" ]
1
2018-03-17T10:07:11.000Z
2018-03-17T10:07:11.000Z
oiasg_base/lib/game.py
will7101/OIASG
44badff57689da99a2c9896d176b32e7b51d42b5
[ "BSD-3-Clause" ]
1
2018-03-17T11:35:54.000Z
2018-03-17T11:35:54.000Z
oiasg_base/lib/game.py
will7101/OIASG
44badff57689da99a2c9896d176b32e7b51d42b5
[ "BSD-3-Clause" ]
null
null
null
import os import time import traceback # import functools def getobj(s): return open(s, "r", encoding='utf-8').read() def getobjs(s): objs = [] fs = os.listdir(s) for f in fs: absf = os.path.join(s, f) if os.path.isfile(absf) and os.path.splitext(f)[1] == '.py': objs.append(absf) elif os.path.isdir(absf): objs += getobjs(absf) return objs class gameplay(object): def __init__(self, scenario="__general", _basedir=None): print("A new game object is constructed.") if _basedir is None: _basedir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) self.__basedir = _basedir self.var = { "load_script": self.load_script, "load_scripts": self.load_scripts, "running": True # "output":self.output } self.load_scripts("__commons") self.load_script(os.path.join("scenarios", scenario + '.py')) self.paused = True self.speed = 0 def end(self): print("A game has ended.") def run(self): print("A game has started.") try: # definition # execution while self.var['running']: self.var['play_round']() self.pause_game() while self.paused: # self.output() ope = input() # print("Game object got operation:" + ope) exec(ope) time.sleep(2 * (0.5 ** self.speed)) except: print("!!!!! --- 游戏体抛出异常 --- !!!!!") traceback.print_exc() self.end() def output(self): print(self.var) def load_script(self, scriptpath): exec(getobj(os.path.join(self.__basedir, scriptpath)), self.var, self.var) def load_scripts(self, scriptdir): objs = getobjs(os.path.join(self.__basedir, scriptdir)) objs.sort() for i in objs: exec(getobj(i), self.var, self.var) def pause_game(self): self.paused = True def continue_game(self): self.paused = False def set_speed(self, speed): self.speed = speed
22.435294
77
0.625066
import os import time import traceback def getobj(s): return open(s, "r", encoding='utf-8').read() def getobjs(s): objs = [] fs = os.listdir(s) for f in fs: absf = os.path.join(s, f) if os.path.isfile(absf) and os.path.splitext(f)[1] == '.py': objs.append(absf) elif os.path.isdir(absf): objs += getobjs(absf) return objs class gameplay(object): def __init__(self, scenario="__general", _basedir=None): print("A new game object is constructed.") if _basedir is None: _basedir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) self.__basedir = _basedir self.var = { "load_script": self.load_script, "load_scripts": self.load_scripts, "running": True } self.load_scripts("__commons") self.load_script(os.path.join("scenarios", scenario + '.py')) self.paused = True self.speed = 0 def end(self): print("A game has ended.") def run(self): print("A game has started.") try: while self.var['running']: self.var['play_round']() self.pause_game() while self.paused: ope = input() exec(ope) time.sleep(2 * (0.5 ** self.speed)) except: print("!!!!! --- 游戏体抛出异常 --- !!!!!") traceback.print_exc() self.end() def output(self): print(self.var) def load_script(self, scriptpath): exec(getobj(os.path.join(self.__basedir, scriptpath)), self.var, self.var) def load_scripts(self, scriptdir): objs = getobjs(os.path.join(self.__basedir, scriptdir)) objs.sort() for i in objs: exec(getobj(i), self.var, self.var) def pause_game(self): self.paused = True def continue_game(self): self.paused = False def set_speed(self, speed): self.speed = speed
true
true
f7010bd75a5a5416d81aa889093286f6e47a1c7d
6,120
py
Python
songbird/util.py
fedarko/songbird
44827596bc9ca16d8046aeafee24ee1dd74dcc0b
[ "BSD-2-Clause", "BSD-3-Clause" ]
1
2020-08-03T11:23:29.000Z
2020-08-03T11:23:29.000Z
songbird/util.py
fedarko/songbird
44827596bc9ca16d8046aeafee24ee1dd74dcc0b
[ "BSD-2-Clause", "BSD-3-Clause" ]
1
2019-09-23T20:15:24.000Z
2019-09-23T20:42:15.000Z
songbird/util.py
fedarko/songbird
44827596bc9ca16d8046aeafee24ee1dd74dcc0b
[ "BSD-2-Clause", "BSD-3-Clause" ]
null
null
null
import os import tensorflow as tf import numpy as np import pandas as pd from sklearn.utils import check_random_state from skbio.stats.composition import clr_inv as softmax from biom import Table from patsy import dmatrix def random_multinomial_model(num_samples, num_features, reps=1, low=2, high=10, beta_mean=0, beta_scale=5, mu=1, sigma=1, seed=0): """ Generates a table using a random poisson regression model. Here we will be simulating microbial counts given the model, and the corresponding model priors. Parameters ---------- num_samples : int Number of samples num_features : int Number of features tree : np.array Tree specifying orthonormal contrast matrix. low : float Smallest gradient value. high : float Largest gradient value. beta_mean : float Mean of beta prior (for regression coefficients) beta_scale : float Scale of beta prior (for regression coefficients) mu : float Mean sequencing depth (in log units) sigma : float Variance for sequencing depth Returns ------- table : biom.Table Biom representation of the count table. metadata : pd.DataFrame DataFrame containing relevant metadata. beta : np.array Regression parameter estimates. """ N = num_samples # generate all of the coefficient using the random poisson model state = check_random_state(seed) beta = state.normal(beta_mean, beta_scale, size=(2, num_features-1)) X = np.hstack([np.linspace(low, high, num_samples // reps)] for _ in range(reps)) X = np.vstack((np.ones(N), X)).T phi = np.hstack((np.zeros((N, 1)), X @ beta)) probs = softmax(phi) n = [mu] * N table = np.vstack( state.multinomial(n[i], probs[i, :]) for i in range(N) ).T samp_ids = pd.Index(['S%d' % i for i in range(num_samples)], name='sampleid') feat_ids = ['F%d' % i for i in range(num_features)] balance_ids = ['L%d' % i for i in range(num_features-1)] table = Table(table, feat_ids, samp_ids) metadata = pd.DataFrame(X, columns=['Ones', 'X'], index=samp_ids) beta = pd.DataFrame(beta.T, columns=['Intercept', 'beta'], index=balance_ids) return table, metadata, beta def _type_cast_to_float(df): """ Attempt to cast all of the values in dataframe to float. This will try to type cast all of the series within the dataframe into floats. If a column cannot be type casted, it will be kept as is. Parameters ---------- df : pd.DataFrame Returns ------- pd.DataFrame """ # TODO: Will need to improve this, as this is a very hacky solution. for c in df.columns: s = df[c] try: df[c] = s.astype(np.float64) except Exception: continue return df def read_metadata(filepath): """ Reads in a sample metadata file Parameters ---------- filepath: str The file path location of the sample metadata file Returns ------- pd.DataFrame : The metadata table with inferred types. """ metadata = pd.read_table( filepath, dtype=object) cols = metadata.columns metadata = metadata.set_index(cols[0]) metadata = _type_cast_to_float(metadata.copy()) return metadata def match_and_filter(table, metadata, formula, min_sample_count, min_feature_count): """ Matches and aligns biom and metadata tables. This will also return the patsy representation. Parameters ---------- table : biom.Table Table of abundances metadata : pd.DataFrame Sample metadata Returns ------- table : biom.Table Filtered biom table metadata : pd.DataFrame Sample metadata """ # match them def sample_filter(val, id_, md): return id_ in metadata.index and np.sum(val) > min_sample_count def read_filter(val, id_, md): return np.sum(val > 0) > min_feature_count table = table.filter(sample_filter, axis='sample', inplace=False) table = table.filter(read_filter, axis='observation', inplace=False) metadata = metadata.loc[table.ids(axis='sample')] metadata = metadata.loc[~metadata.index.duplicated(keep='first')] def sort_f(xs): return [xs[metadata.index.get_loc(x)] for x in xs] table = table.sort(sort_f=sort_f, axis='sample') design = dmatrix(formula, metadata, return_type='dataframe') design = design.dropna() def design_filter(val, id_, md): return id_ in design.index table = table.filter(design_filter, axis='sample') return table, metadata, design def split_training(dense_table, metadata, design, training_column=None, num_random_test_examples=10, seed=None): if training_column is None: np.random.seed(seed) idx = np.random.random(design.shape[0]) i = np.argsort(idx)[num_random_test_examples] threshold = idx[i] train_idx = ~(idx < threshold) else: train_idx = metadata.loc[design.index, training_column] == "Train" trainX = design.loc[train_idx].values testX = design.loc[~train_idx].values trainY = dense_table.loc[train_idx].values testY = dense_table.loc[~train_idx].values return trainX, testX, trainY, testY def silence_output(): # suppress profiling messages & compilation warnings # taken from: # https://stackoverflow.com/questions/47068709/your-cpu-supports- # instructions-that-this-tensorflow-binary-was-not-compiled-to-u os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' # suppress deprecation warnings # taken from https://github.com/tensorflow/tensorflow/issues/27023 tf.compat.v1.logging.set_verbosity(tf.compat.v1.logging.ERROR)
28.867925
74
0.625327
import os import tensorflow as tf import numpy as np import pandas as pd from sklearn.utils import check_random_state from skbio.stats.composition import clr_inv as softmax from biom import Table from patsy import dmatrix def random_multinomial_model(num_samples, num_features, reps=1, low=2, high=10, beta_mean=0, beta_scale=5, mu=1, sigma=1, seed=0): N = num_samples state = check_random_state(seed) beta = state.normal(beta_mean, beta_scale, size=(2, num_features-1)) X = np.hstack([np.linspace(low, high, num_samples // reps)] for _ in range(reps)) X = np.vstack((np.ones(N), X)).T phi = np.hstack((np.zeros((N, 1)), X @ beta)) probs = softmax(phi) n = [mu] * N table = np.vstack( state.multinomial(n[i], probs[i, :]) for i in range(N) ).T samp_ids = pd.Index(['S%d' % i for i in range(num_samples)], name='sampleid') feat_ids = ['F%d' % i for i in range(num_features)] balance_ids = ['L%d' % i for i in range(num_features-1)] table = Table(table, feat_ids, samp_ids) metadata = pd.DataFrame(X, columns=['Ones', 'X'], index=samp_ids) beta = pd.DataFrame(beta.T, columns=['Intercept', 'beta'], index=balance_ids) return table, metadata, beta def _type_cast_to_float(df): for c in df.columns: s = df[c] try: df[c] = s.astype(np.float64) except Exception: continue return df def read_metadata(filepath): metadata = pd.read_table( filepath, dtype=object) cols = metadata.columns metadata = metadata.set_index(cols[0]) metadata = _type_cast_to_float(metadata.copy()) return metadata def match_and_filter(table, metadata, formula, min_sample_count, min_feature_count): def sample_filter(val, id_, md): return id_ in metadata.index and np.sum(val) > min_sample_count def read_filter(val, id_, md): return np.sum(val > 0) > min_feature_count table = table.filter(sample_filter, axis='sample', inplace=False) table = table.filter(read_filter, axis='observation', inplace=False) metadata = metadata.loc[table.ids(axis='sample')] metadata = metadata.loc[~metadata.index.duplicated(keep='first')] def sort_f(xs): return [xs[metadata.index.get_loc(x)] for x in xs] table = table.sort(sort_f=sort_f, axis='sample') design = dmatrix(formula, metadata, return_type='dataframe') design = design.dropna() def design_filter(val, id_, md): return id_ in design.index table = table.filter(design_filter, axis='sample') return table, metadata, design def split_training(dense_table, metadata, design, training_column=None, num_random_test_examples=10, seed=None): if training_column is None: np.random.seed(seed) idx = np.random.random(design.shape[0]) i = np.argsort(idx)[num_random_test_examples] threshold = idx[i] train_idx = ~(idx < threshold) else: train_idx = metadata.loc[design.index, training_column] == "Train" trainX = design.loc[train_idx].values testX = design.loc[~train_idx].values trainY = dense_table.loc[train_idx].values testY = dense_table.loc[~train_idx].values return trainX, testX, trainY, testY def silence_output(): os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' tf.compat.v1.logging.set_verbosity(tf.compat.v1.logging.ERROR)
true
true
f7010e09db939d4517fbc2b1a250bb4262658b1d
22,716
py
Python
letsencrypt/client.py
impressiver/letsencrypt
f2a83e9e942e9f5776a2e9f8c76d7a21bbbf27aa
[ "Apache-2.0" ]
1
2017-02-19T12:38:23.000Z
2017-02-19T12:38:23.000Z
letsencrypt/client.py
ingochris/letsencrypt
f2a83e9e942e9f5776a2e9f8c76d7a21bbbf27aa
[ "Apache-2.0" ]
null
null
null
letsencrypt/client.py
ingochris/letsencrypt
f2a83e9e942e9f5776a2e9f8c76d7a21bbbf27aa
[ "Apache-2.0" ]
1
2015-12-03T23:58:41.000Z
2015-12-03T23:58:41.000Z
"""Let's Encrypt client API.""" import logging import os from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives.asymmetric import rsa import OpenSSL import zope.component from acme import client as acme_client from acme import jose from acme import messages import letsencrypt from letsencrypt import account from letsencrypt import auth_handler from letsencrypt import configuration from letsencrypt import constants from letsencrypt import continuity_auth from letsencrypt import crypto_util from letsencrypt import errors from letsencrypt import error_handler from letsencrypt import interfaces from letsencrypt import le_util from letsencrypt import reverter from letsencrypt import storage from letsencrypt.display import ops as display_ops from letsencrypt.display import enhancements logger = logging.getLogger(__name__) def acme_from_config_key(config, key): "Wrangle ACME client construction" # TODO: Allow for other alg types besides RS256 net = acme_client.ClientNetwork(key, verify_ssl=(not config.no_verify_ssl), user_agent=_determine_user_agent(config)) return acme_client.Client(config.server, key=key, net=net) def _determine_user_agent(config): """ Set a user_agent string in the config based on the choice of plugins. (this wasn't knowable at construction time) :returns: the client's User-Agent string :rtype: `str` """ if config.user_agent is None: ua = "LetsEncryptPythonClient/{0} ({1}) Authenticator/{2} Installer/{3}" ua = ua.format(letsencrypt.__version__, " ".join(le_util.get_os_info()), config.authenticator, config.installer) else: ua = config.user_agent return ua def register(config, account_storage, tos_cb=None): """Register new account with an ACME CA. This function takes care of generating fresh private key, registering the account, optionally accepting CA Terms of Service and finally saving the account. It should be called prior to initialization of `Client`, unless account has already been created. :param .IConfig config: Client configuration. :param .AccountStorage account_storage: Account storage where newly registered account will be saved to. Save happens only after TOS acceptance step, so any account private keys or `.RegistrationResource` will not be persisted if `tos_cb` returns ``False``. :param tos_cb: If ACME CA requires the user to accept a Terms of Service before registering account, client action is necessary. For example, a CLI tool would prompt the user acceptance. `tos_cb` must be a callable that should accept `.RegistrationResource` and return a `bool`: ``True`` iff the Terms of Service present in the contained `.Registration.terms_of_service` is accepted by the client, and ``False`` otherwise. ``tos_cb`` will be called only if the client acction is necessary, i.e. when ``terms_of_service is not None``. This argument is optional, if not supplied it will default to automatic acceptance! :raises letsencrypt.errors.Error: In case of any client problems, in particular registration failure, or unaccepted Terms of Service. :raises acme.errors.Error: In case of any protocol problems. :returns: Newly registered and saved account, as well as protocol API handle (should be used in `Client` initialization). :rtype: `tuple` of `.Account` and `acme.client.Client` """ # Log non-standard actions, potentially wrong API calls if account_storage.find_all(): logger.info("There are already existing accounts for %s", config.server) if config.email is None: if not config.register_unsafely_without_email: msg = ("No email was provided and " "--register-unsafely-without-email was not present.") logger.warn(msg) raise errors.Error(msg) logger.warn("Registering without email!") # Each new registration shall use a fresh new key key = jose.JWKRSA(key=jose.ComparableRSAKey( rsa.generate_private_key( public_exponent=65537, key_size=config.rsa_key_size, backend=default_backend()))) acme = acme_from_config_key(config, key) # TODO: add phone? regr = perform_registration(acme, config) if regr.terms_of_service is not None: if tos_cb is not None and not tos_cb(regr): raise errors.Error( "Registration cannot proceed without accepting " "Terms of Service.") regr = acme.agree_to_tos(regr) acc = account.Account(regr, key) account.report_new_account(acc, config) account_storage.save(acc) return acc, acme def perform_registration(acme, config): """ Actually register new account, trying repeatedly if there are email problems :param .IConfig config: Client configuration. :param acme.client.Client client: ACME client object. :returns: Registration Resource. :rtype: `acme.messages.RegistrationResource` :raises .UnexpectedUpdate: """ try: return acme.register(messages.NewRegistration.from_data(email=config.email)) except messages.Error, e: err = repr(e) if "MX record" in err or "Validation of contact mailto" in err: config.namespace.email = display_ops.get_email(more=True, invalid=True) return perform_registration(acme, config) else: raise class Client(object): """ACME protocol client. :ivar .IConfig config: Client configuration. :ivar .Account account: Account registered with `register`. :ivar .AuthHandler auth_handler: Authorizations handler that will dispatch DV and Continuity challenges to appropriate authenticators (providing `.IAuthenticator` interface). :ivar .IAuthenticator dv_auth: Prepared (`.IAuthenticator.prepare`) authenticator that can solve the `.constants.DV_CHALLENGES`. :ivar .IInstaller installer: Installer. :ivar acme.client.Client acme: Optional ACME client API handle. You might already have one from `register`. """ def __init__(self, config, account_, dv_auth, installer, acme=None): """Initialize a client.""" self.config = config self.account = account_ self.dv_auth = dv_auth self.installer = installer # Initialize ACME if account is provided if acme is None and self.account is not None: acme = acme_from_config_key(config, self.account.key) self.acme = acme # TODO: Check if self.config.enroll_autorenew is None. If # so, set it based to the default: figure out if dv_auth is # standalone (then default is False, otherwise default is True) if dv_auth is not None: cont_auth = continuity_auth.ContinuityAuthenticator(config, installer) self.auth_handler = auth_handler.AuthHandler( dv_auth, cont_auth, self.acme, self.account) else: self.auth_handler = None def _obtain_certificate(self, domains, csr): """Obtain certificate. Internal function with precondition that `domains` are consistent with identifiers present in the `csr`. :param list domains: Domain names. :param .le_util.CSR csr: DER-encoded Certificate Signing Request. The key used to generate this CSR can be different than `authkey`. :returns: `.CertificateResource` and certificate chain (as returned by `.fetch_chain`). :rtype: tuple """ if self.auth_handler is None: msg = ("Unable to obtain certificate because authenticator is " "not set.") logger.warning(msg) raise errors.Error(msg) if self.account.regr is None: raise errors.Error("Please register with the ACME server first.") logger.debug("CSR: %s, domains: %s", csr, domains) authzr = self.auth_handler.get_authorizations(domains) certr = self.acme.request_issuance( jose.ComparableX509(OpenSSL.crypto.load_certificate_request( OpenSSL.crypto.FILETYPE_ASN1, csr.data)), authzr) return certr, self.acme.fetch_chain(certr) def obtain_certificate_from_csr(self, csr): """Obtain certficiate from CSR. :param .le_util.CSR csr: DER-encoded Certificate Signing Request. :returns: `.CertificateResource` and certificate chain (as returned by `.fetch_chain`). :rtype: tuple """ return self._obtain_certificate( # TODO: add CN to domains? crypto_util.get_sans_from_csr( csr.data, OpenSSL.crypto.FILETYPE_ASN1), csr) def obtain_certificate(self, domains): """Obtains a certificate from the ACME server. `.register` must be called before `.obtain_certificate` :param set domains: domains to get a certificate :returns: `.CertificateResource`, certificate chain (as returned by `.fetch_chain`), and newly generated private key (`.le_util.Key`) and DER-encoded Certificate Signing Request (`.le_util.CSR`). :rtype: tuple """ # Create CSR from names key = crypto_util.init_save_key( self.config.rsa_key_size, self.config.key_dir) csr = crypto_util.init_save_csr(key, domains, self.config.csr_dir) return self._obtain_certificate(domains, csr) + (key, csr) def obtain_and_enroll_certificate(self, domains): """Obtain and enroll certificate. Get a new certificate for the specified domains using the specified authenticator and installer, and then create a new renewable lineage containing it. :param list domains: Domains to request. :param plugins: A PluginsFactory object. :returns: A new :class:`letsencrypt.storage.RenewableCert` instance referred to the enrolled cert lineage, or False if the cert could not be obtained. """ certr, chain, key, _ = self.obtain_certificate(domains) # XXX: We clearly need a more general and correct way of getting # options into the configobj for the RenewableCert instance. # This is a quick-and-dirty way to do it to allow integration # testing to start. (Note that the config parameter to new_lineage # ideally should be a ConfigObj, but in this case a dict will be # accepted in practice.) params = vars(self.config.namespace) config = {} cli_config = configuration.RenewerConfiguration(self.config.namespace) if (cli_config.config_dir != constants.CLI_DEFAULTS["config_dir"] or cli_config.work_dir != constants.CLI_DEFAULTS["work_dir"]): logger.warning( "Non-standard path(s), might not work with crontab installed " "by your operating system package manager") lineage = storage.RenewableCert.new_lineage( domains[0], OpenSSL.crypto.dump_certificate( OpenSSL.crypto.FILETYPE_PEM, certr.body), key.pem, crypto_util.dump_pyopenssl_chain(chain), params, config, cli_config) return lineage def save_certificate(self, certr, chain_cert, cert_path, chain_path, fullchain_path): """Saves the certificate received from the ACME server. :param certr: ACME "certificate" resource. :type certr: :class:`acme.messages.Certificate` :param list chain_cert: :param str cert_path: Candidate path to a certificate. :param str chain_path: Candidate path to a certificate chain. :param str fullchain_path: Candidate path to a full cert chain. :returns: cert_path, chain_path, and fullchain_path as absolute paths to the actual files :rtype: `tuple` of `str` :raises IOError: If unable to find room to write the cert files """ for path in cert_path, chain_path, fullchain_path: le_util.make_or_verify_dir( os.path.dirname(path), 0o755, os.geteuid(), self.config.strict_permissions) cert_pem = OpenSSL.crypto.dump_certificate( OpenSSL.crypto.FILETYPE_PEM, certr.body) cert_file, act_cert_path = le_util.unique_file(cert_path, 0o644) try: cert_file.write(cert_pem) finally: cert_file.close() logger.info("Server issued certificate; certificate written to %s", act_cert_path) cert_chain_abspath = None fullchain_abspath = None if chain_cert: chain_pem = crypto_util.dump_pyopenssl_chain(chain_cert) cert_chain_abspath = _save_chain(chain_pem, chain_path) fullchain_abspath = _save_chain(cert_pem + chain_pem, fullchain_path) return os.path.abspath(act_cert_path), cert_chain_abspath, fullchain_abspath def deploy_certificate(self, domains, privkey_path, cert_path, chain_path, fullchain_path): """Install certificate :param list domains: list of domains to install the certificate :param str privkey_path: path to certificate private key :param str cert_path: certificate file path (optional) :param str chain_path: chain file path """ if self.installer is None: logger.warning("No installer specified, client is unable to deploy" "the certificate") raise errors.Error("No installer available") chain_path = None if chain_path is None else os.path.abspath(chain_path) with error_handler.ErrorHandler(self.installer.recovery_routine): for dom in domains: self.installer.deploy_cert( domain=dom, cert_path=os.path.abspath(cert_path), key_path=os.path.abspath(privkey_path), chain_path=chain_path, fullchain_path=fullchain_path) self.installer.save() # needed by the Apache plugin self.installer.save("Deployed Let's Encrypt Certificate") msg = ("We were unable to install your certificate, " "however, we successfully restored your " "server to its prior configuration.") with error_handler.ErrorHandler(self._rollback_and_restart, msg): # sites may have been enabled / final cleanup self.installer.restart() def enhance_config(self, domains, config): """Enhance the configuration. :param list domains: list of domains to configure :ivar config: Namespace typically produced by :meth:`argparse.ArgumentParser.parse_args`. it must have the redirect, hsts and uir attributes. :type namespace: :class:`argparse.Namespace` :raises .errors.Error: if no installer is specified in the client. """ if self.installer is None: logger.warning("No installer is specified, there isn't any " "configuration to enhance.") raise errors.Error("No installer available") if config is None: logger.warning("No config is specified.") raise errors.Error("No config available") redirect = config.redirect hsts = config.hsts uir = config.uir # Upgrade Insecure Requests if redirect is None: redirect = enhancements.ask("redirect") if redirect: self.apply_enhancement(domains, "redirect") if hsts: self.apply_enhancement(domains, "ensure-http-header", "Strict-Transport-Security") if uir: self.apply_enhancement(domains, "ensure-http-header", "Upgrade-Insecure-Requests") msg = ("We were unable to restart web server") if redirect or hsts or uir: with error_handler.ErrorHandler(self._rollback_and_restart, msg): self.installer.restart() def apply_enhancement(self, domains, enhancement, options=None): """Applies an enhacement on all domains. :param domains: list of ssl_vhosts :type list of str :param enhancement: name of enhancement, e.g. ensure-http-header :type str .. note:: when more options are need make options a list. :param options: options to enhancement, e.g. Strict-Transport-Security :type str :raises .errors.PluginError: If Enhancement is not supported, or if there is any other problem with the enhancement. """ msg = ("We were unable to set up enhancement %s for your server, " "however, we successfully installed your certificate." % (enhancement)) with error_handler.ErrorHandler(self._recovery_routine_with_msg, msg): for dom in domains: try: self.installer.enhance(dom, enhancement, options) except errors.PluginEnhancementAlreadyPresent: logger.warn("Enhancement %s was already set.", enhancement) except errors.PluginError: logger.warn("Unable to set enhancement %s for %s", enhancement, dom) raise self.installer.save("Add enhancement %s" % (enhancement)) def _recovery_routine_with_msg(self, success_msg): """Calls the installer's recovery routine and prints success_msg :param str success_msg: message to show on successful recovery """ self.installer.recovery_routine() reporter = zope.component.getUtility(interfaces.IReporter) reporter.add_message(success_msg, reporter.HIGH_PRIORITY) def _rollback_and_restart(self, success_msg): """Rollback the most recent checkpoint and restart the webserver :param str success_msg: message to show on successful rollback """ logger.critical("Rolling back to previous server configuration...") reporter = zope.component.getUtility(interfaces.IReporter) try: self.installer.rollback_checkpoints() self.installer.restart() except: # TODO: suggest letshelp-letsencypt here reporter.add_message( "An error occurred and we failed to restore your config and " "restart your server. Please submit a bug report to " "https://github.com/letsencrypt/letsencrypt", reporter.HIGH_PRIORITY) raise reporter.add_message(success_msg, reporter.HIGH_PRIORITY) def validate_key_csr(privkey, csr=None): """Validate Key and CSR files. Verifies that the client key and csr arguments are valid and correspond to one another. This does not currently check the names in the CSR due to the inability to read SANs from CSRs in python crypto libraries. If csr is left as None, only the key will be validated. :param privkey: Key associated with CSR :type privkey: :class:`letsencrypt.le_util.Key` :param .le_util.CSR csr: CSR :raises .errors.Error: when validation fails """ # TODO: Handle all of these problems appropriately # The client can eventually do things like prompt the user # and allow the user to take more appropriate actions # Key must be readable and valid. if privkey.pem and not crypto_util.valid_privkey(privkey.pem): raise errors.Error("The provided key is not a valid key") if csr: if csr.form == "der": csr_obj = OpenSSL.crypto.load_certificate_request( OpenSSL.crypto.FILETYPE_ASN1, csr.data) csr = le_util.CSR(csr.file, OpenSSL.crypto.dump_certificate( OpenSSL.crypto.FILETYPE_PEM, csr_obj), "pem") # If CSR is provided, it must be readable and valid. if csr.data and not crypto_util.valid_csr(csr.data): raise errors.Error("The provided CSR is not a valid CSR") # If both CSR and key are provided, the key must be the same key used # in the CSR. if csr.data and privkey.pem: if not crypto_util.csr_matches_pubkey( csr.data, privkey.pem): raise errors.Error("The key and CSR do not match") def rollback(default_installer, checkpoints, config, plugins): """Revert configuration the specified number of checkpoints. :param int checkpoints: Number of checkpoints to revert. :param config: Configuration. :type config: :class:`letsencrypt.interfaces.IConfig` """ # Misconfigurations are only a slight problems... allow the user to rollback installer = display_ops.pick_installer( config, default_installer, plugins, question="Which installer " "should be used for rollback?") # No Errors occurred during init... proceed normally # If installer is None... couldn't find an installer... there shouldn't be # anything to rollback if installer is not None: installer.rollback_checkpoints(checkpoints) installer.restart() def view_config_changes(config): """View checkpoints and associated configuration changes. .. note:: This assumes that the installation is using a Reverter object. :param config: Configuration. :type config: :class:`letsencrypt.interfaces.IConfig` """ rev = reverter.Reverter(config) rev.recovery_routine() rev.view_config_changes() def _save_chain(chain_pem, chain_path): """Saves chain_pem at a unique path based on chain_path. :param str chain_pem: certificate chain in PEM format :param str chain_path: candidate path for the cert chain :returns: absolute path to saved cert chain :rtype: str """ chain_file, act_chain_path = le_util.unique_file(chain_path, 0o644) try: chain_file.write(chain_pem) finally: chain_file.close() logger.info("Cert chain written to %s", act_chain_path) # This expects a valid chain file return os.path.abspath(act_chain_path)
37.923205
84
0.655045
"""Let's Encrypt client API.""" import logging import os from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives.asymmetric import rsa import OpenSSL import zope.component from acme import client as acme_client from acme import jose from acme import messages import letsencrypt from letsencrypt import account from letsencrypt import auth_handler from letsencrypt import configuration from letsencrypt import constants from letsencrypt import continuity_auth from letsencrypt import crypto_util from letsencrypt import errors from letsencrypt import error_handler from letsencrypt import interfaces from letsencrypt import le_util from letsencrypt import reverter from letsencrypt import storage from letsencrypt.display import ops as display_ops from letsencrypt.display import enhancements logger = logging.getLogger(__name__) def acme_from_config_key(config, key): "Wrangle ACME client construction" # TODO: Allow for other alg types besides RS256 net = acme_client.ClientNetwork(key, verify_ssl=(not config.no_verify_ssl), user_agent=_determine_user_agent(config)) return acme_client.Client(config.server, key=key, net=net) def _determine_user_agent(config): """ Set a user_agent string in the config based on the choice of plugins. (this wasn't knowable at construction time) :returns: the client's User-Agent string :rtype: `str` """ if config.user_agent is None: ua = "LetsEncryptPythonClient/{0} ({1}) Authenticator/{2} Installer/{3}" ua = ua.format(letsencrypt.__version__, " ".join(le_util.get_os_info()), config.authenticator, config.installer) else: ua = config.user_agent return ua def register(config, account_storage, tos_cb=None): """Register new account with an ACME CA. This function takes care of generating fresh private key, registering the account, optionally accepting CA Terms of Service and finally saving the account. It should be called prior to initialization of `Client`, unless account has already been created. :param .IConfig config: Client configuration. :param .AccountStorage account_storage: Account storage where newly registered account will be saved to. Save happens only after TOS acceptance step, so any account private keys or `.RegistrationResource` will not be persisted if `tos_cb` returns ``False``. :param tos_cb: If ACME CA requires the user to accept a Terms of Service before registering account, client action is necessary. For example, a CLI tool would prompt the user acceptance. `tos_cb` must be a callable that should accept `.RegistrationResource` and return a `bool`: ``True`` iff the Terms of Service present in the contained `.Registration.terms_of_service` is accepted by the client, and ``False`` otherwise. ``tos_cb`` will be called only if the client acction is necessary, i.e. when ``terms_of_service is not None``. This argument is optional, if not supplied it will default to automatic acceptance! :raises letsencrypt.errors.Error: In case of any client problems, in particular registration failure, or unaccepted Terms of Service. :raises acme.errors.Error: In case of any protocol problems. :returns: Newly registered and saved account, as well as protocol API handle (should be used in `Client` initialization). :rtype: `tuple` of `.Account` and `acme.client.Client` """ # Log non-standard actions, potentially wrong API calls if account_storage.find_all(): logger.info("There are already existing accounts for %s", config.server) if config.email is None: if not config.register_unsafely_without_email: msg = ("No email was provided and " "--register-unsafely-without-email was not present.") logger.warn(msg) raise errors.Error(msg) logger.warn("Registering without email!") # Each new registration shall use a fresh new key key = jose.JWKRSA(key=jose.ComparableRSAKey( rsa.generate_private_key( public_exponent=65537, key_size=config.rsa_key_size, backend=default_backend()))) acme = acme_from_config_key(config, key) # TODO: add phone? regr = perform_registration(acme, config) if regr.terms_of_service is not None: if tos_cb is not None and not tos_cb(regr): raise errors.Error( "Registration cannot proceed without accepting " "Terms of Service.") regr = acme.agree_to_tos(regr) acc = account.Account(regr, key) account.report_new_account(acc, config) account_storage.save(acc) return acc, acme def perform_registration(acme, config): """ Actually register new account, trying repeatedly if there are email problems :param .IConfig config: Client configuration. :param acme.client.Client client: ACME client object. :returns: Registration Resource. :rtype: `acme.messages.RegistrationResource` :raises .UnexpectedUpdate: """ try: return acme.register(messages.NewRegistration.from_data(email=config.email)) except messages.Error, e: err = repr(e) if "MX record" in err or "Validation of contact mailto" in err: config.namespace.email = display_ops.get_email(more=True, invalid=True) return perform_registration(acme, config) else: raise class Client(object): """ACME protocol client. :ivar .IConfig config: Client configuration. :ivar .Account account: Account registered with `register`. :ivar .AuthHandler auth_handler: Authorizations handler that will dispatch DV and Continuity challenges to appropriate authenticators (providing `.IAuthenticator` interface). :ivar .IAuthenticator dv_auth: Prepared (`.IAuthenticator.prepare`) authenticator that can solve the `.constants.DV_CHALLENGES`. :ivar .IInstaller installer: Installer. :ivar acme.client.Client acme: Optional ACME client API handle. You might already have one from `register`. """ def __init__(self, config, account_, dv_auth, installer, acme=None): """Initialize a client.""" self.config = config self.account = account_ self.dv_auth = dv_auth self.installer = installer # Initialize ACME if account is provided if acme is None and self.account is not None: acme = acme_from_config_key(config, self.account.key) self.acme = acme # TODO: Check if self.config.enroll_autorenew is None. If # so, set it based to the default: figure out if dv_auth is # standalone (then default is False, otherwise default is True) if dv_auth is not None: cont_auth = continuity_auth.ContinuityAuthenticator(config, installer) self.auth_handler = auth_handler.AuthHandler( dv_auth, cont_auth, self.acme, self.account) else: self.auth_handler = None def _obtain_certificate(self, domains, csr): """Obtain certificate. Internal function with precondition that `domains` are consistent with identifiers present in the `csr`. :param list domains: Domain names. :param .le_util.CSR csr: DER-encoded Certificate Signing Request. The key used to generate this CSR can be different than `authkey`. :returns: `.CertificateResource` and certificate chain (as returned by `.fetch_chain`). :rtype: tuple """ if self.auth_handler is None: msg = ("Unable to obtain certificate because authenticator is " "not set.") logger.warning(msg) raise errors.Error(msg) if self.account.regr is None: raise errors.Error("Please register with the ACME server first.") logger.debug("CSR: %s, domains: %s", csr, domains) authzr = self.auth_handler.get_authorizations(domains) certr = self.acme.request_issuance( jose.ComparableX509(OpenSSL.crypto.load_certificate_request( OpenSSL.crypto.FILETYPE_ASN1, csr.data)), authzr) return certr, self.acme.fetch_chain(certr) def obtain_certificate_from_csr(self, csr): """Obtain certficiate from CSR. :param .le_util.CSR csr: DER-encoded Certificate Signing Request. :returns: `.CertificateResource` and certificate chain (as returned by `.fetch_chain`). :rtype: tuple """ return self._obtain_certificate( # TODO: add CN to domains? crypto_util.get_sans_from_csr( csr.data, OpenSSL.crypto.FILETYPE_ASN1), csr) def obtain_certificate(self, domains): """Obtains a certificate from the ACME server. `.register` must be called before `.obtain_certificate` :param set domains: domains to get a certificate :returns: `.CertificateResource`, certificate chain (as returned by `.fetch_chain`), and newly generated private key (`.le_util.Key`) and DER-encoded Certificate Signing Request (`.le_util.CSR`). :rtype: tuple """ # Create CSR from names key = crypto_util.init_save_key( self.config.rsa_key_size, self.config.key_dir) csr = crypto_util.init_save_csr(key, domains, self.config.csr_dir) return self._obtain_certificate(domains, csr) + (key, csr) def obtain_and_enroll_certificate(self, domains): """Obtain and enroll certificate. Get a new certificate for the specified domains using the specified authenticator and installer, and then create a new renewable lineage containing it. :param list domains: Domains to request. :param plugins: A PluginsFactory object. :returns: A new :class:`letsencrypt.storage.RenewableCert` instance referred to the enrolled cert lineage, or False if the cert could not be obtained. """ certr, chain, key, _ = self.obtain_certificate(domains) # XXX: We clearly need a more general and correct way of getting # options into the configobj for the RenewableCert instance. # This is a quick-and-dirty way to do it to allow integration # testing to start. (Note that the config parameter to new_lineage # ideally should be a ConfigObj, but in this case a dict will be # accepted in practice.) params = vars(self.config.namespace) config = {} cli_config = configuration.RenewerConfiguration(self.config.namespace) if (cli_config.config_dir != constants.CLI_DEFAULTS["config_dir"] or cli_config.work_dir != constants.CLI_DEFAULTS["work_dir"]): logger.warning( "Non-standard path(s), might not work with crontab installed " "by your operating system package manager") lineage = storage.RenewableCert.new_lineage( domains[0], OpenSSL.crypto.dump_certificate( OpenSSL.crypto.FILETYPE_PEM, certr.body), key.pem, crypto_util.dump_pyopenssl_chain(chain), params, config, cli_config) return lineage def save_certificate(self, certr, chain_cert, cert_path, chain_path, fullchain_path): """Saves the certificate received from the ACME server. :param certr: ACME "certificate" resource. :type certr: :class:`acme.messages.Certificate` :param list chain_cert: :param str cert_path: Candidate path to a certificate. :param str chain_path: Candidate path to a certificate chain. :param str fullchain_path: Candidate path to a full cert chain. :returns: cert_path, chain_path, and fullchain_path as absolute paths to the actual files :rtype: `tuple` of `str` :raises IOError: If unable to find room to write the cert files """ for path in cert_path, chain_path, fullchain_path: le_util.make_or_verify_dir( os.path.dirname(path), 0o755, os.geteuid(), self.config.strict_permissions) cert_pem = OpenSSL.crypto.dump_certificate( OpenSSL.crypto.FILETYPE_PEM, certr.body) cert_file, act_cert_path = le_util.unique_file(cert_path, 0o644) try: cert_file.write(cert_pem) finally: cert_file.close() logger.info("Server issued certificate; certificate written to %s", act_cert_path) cert_chain_abspath = None fullchain_abspath = None if chain_cert: chain_pem = crypto_util.dump_pyopenssl_chain(chain_cert) cert_chain_abspath = _save_chain(chain_pem, chain_path) fullchain_abspath = _save_chain(cert_pem + chain_pem, fullchain_path) return os.path.abspath(act_cert_path), cert_chain_abspath, fullchain_abspath def deploy_certificate(self, domains, privkey_path, cert_path, chain_path, fullchain_path): """Install certificate :param list domains: list of domains to install the certificate :param str privkey_path: path to certificate private key :param str cert_path: certificate file path (optional) :param str chain_path: chain file path """ if self.installer is None: logger.warning("No installer specified, client is unable to deploy" "the certificate") raise errors.Error("No installer available") chain_path = None if chain_path is None else os.path.abspath(chain_path) with error_handler.ErrorHandler(self.installer.recovery_routine): for dom in domains: self.installer.deploy_cert( domain=dom, cert_path=os.path.abspath(cert_path), key_path=os.path.abspath(privkey_path), chain_path=chain_path, fullchain_path=fullchain_path) self.installer.save() # needed by the Apache plugin self.installer.save("Deployed Let's Encrypt Certificate") msg = ("We were unable to install your certificate, " "however, we successfully restored your " "server to its prior configuration.") with error_handler.ErrorHandler(self._rollback_and_restart, msg): self.installer.restart() def enhance_config(self, domains, config): """Enhance the configuration. :param list domains: list of domains to configure :ivar config: Namespace typically produced by :meth:`argparse.ArgumentParser.parse_args`. it must have the redirect, hsts and uir attributes. :type namespace: :class:`argparse.Namespace` :raises .errors.Error: if no installer is specified in the client. """ if self.installer is None: logger.warning("No installer is specified, there isn't any " "configuration to enhance.") raise errors.Error("No installer available") if config is None: logger.warning("No config is specified.") raise errors.Error("No config available") redirect = config.redirect hsts = config.hsts uir = config.uir # Upgrade Insecure Requests if redirect is None: redirect = enhancements.ask("redirect") if redirect: self.apply_enhancement(domains, "redirect") if hsts: self.apply_enhancement(domains, "ensure-http-header", "Strict-Transport-Security") if uir: self.apply_enhancement(domains, "ensure-http-header", "Upgrade-Insecure-Requests") msg = ("We were unable to restart web server") if redirect or hsts or uir: with error_handler.ErrorHandler(self._rollback_and_restart, msg): self.installer.restart() def apply_enhancement(self, domains, enhancement, options=None): """Applies an enhacement on all domains. :param domains: list of ssl_vhosts :type list of str :param enhancement: name of enhancement, e.g. ensure-http-header :type str .. note:: when more options are need make options a list. :param options: options to enhancement, e.g. Strict-Transport-Security :type str :raises .errors.PluginError: If Enhancement is not supported, or if there is any other problem with the enhancement. """ msg = ("We were unable to set up enhancement %s for your server, " "however, we successfully installed your certificate." % (enhancement)) with error_handler.ErrorHandler(self._recovery_routine_with_msg, msg): for dom in domains: try: self.installer.enhance(dom, enhancement, options) except errors.PluginEnhancementAlreadyPresent: logger.warn("Enhancement %s was already set.", enhancement) except errors.PluginError: logger.warn("Unable to set enhancement %s for %s", enhancement, dom) raise self.installer.save("Add enhancement %s" % (enhancement)) def _recovery_routine_with_msg(self, success_msg): """Calls the installer's recovery routine and prints success_msg :param str success_msg: message to show on successful recovery """ self.installer.recovery_routine() reporter = zope.component.getUtility(interfaces.IReporter) reporter.add_message(success_msg, reporter.HIGH_PRIORITY) def _rollback_and_restart(self, success_msg): """Rollback the most recent checkpoint and restart the webserver :param str success_msg: message to show on successful rollback """ logger.critical("Rolling back to previous server configuration...") reporter = zope.component.getUtility(interfaces.IReporter) try: self.installer.rollback_checkpoints() self.installer.restart() except: reporter.add_message( "An error occurred and we failed to restore your config and " "restart your server. Please submit a bug report to " "https://github.com/letsencrypt/letsencrypt", reporter.HIGH_PRIORITY) raise reporter.add_message(success_msg, reporter.HIGH_PRIORITY) def validate_key_csr(privkey, csr=None): """Validate Key and CSR files. Verifies that the client key and csr arguments are valid and correspond to one another. This does not currently check the names in the CSR due to the inability to read SANs from CSRs in python crypto libraries. If csr is left as None, only the key will be validated. :param privkey: Key associated with CSR :type privkey: :class:`letsencrypt.le_util.Key` :param .le_util.CSR csr: CSR :raises .errors.Error: when validation fails """ if privkey.pem and not crypto_util.valid_privkey(privkey.pem): raise errors.Error("The provided key is not a valid key") if csr: if csr.form == "der": csr_obj = OpenSSL.crypto.load_certificate_request( OpenSSL.crypto.FILETYPE_ASN1, csr.data) csr = le_util.CSR(csr.file, OpenSSL.crypto.dump_certificate( OpenSSL.crypto.FILETYPE_PEM, csr_obj), "pem") if csr.data and not crypto_util.valid_csr(csr.data): raise errors.Error("The provided CSR is not a valid CSR") if csr.data and privkey.pem: if not crypto_util.csr_matches_pubkey( csr.data, privkey.pem): raise errors.Error("The key and CSR do not match") def rollback(default_installer, checkpoints, config, plugins): """Revert configuration the specified number of checkpoints. :param int checkpoints: Number of checkpoints to revert. :param config: Configuration. :type config: :class:`letsencrypt.interfaces.IConfig` """ installer = display_ops.pick_installer( config, default_installer, plugins, question="Which installer " "should be used for rollback?") if installer is not None: installer.rollback_checkpoints(checkpoints) installer.restart() def view_config_changes(config): """View checkpoints and associated configuration changes. .. note:: This assumes that the installation is using a Reverter object. :param config: Configuration. :type config: :class:`letsencrypt.interfaces.IConfig` """ rev = reverter.Reverter(config) rev.recovery_routine() rev.view_config_changes() def _save_chain(chain_pem, chain_path): """Saves chain_pem at a unique path based on chain_path. :param str chain_pem: certificate chain in PEM format :param str chain_path: candidate path for the cert chain :returns: absolute path to saved cert chain :rtype: str """ chain_file, act_chain_path = le_util.unique_file(chain_path, 0o644) try: chain_file.write(chain_pem) finally: chain_file.close() logger.info("Cert chain written to %s", act_chain_path) return os.path.abspath(act_chain_path)
false
true
f7010e635fd165d115aeb280622b39b947f59dc1
2,239
py
Python
tests/models/test_full_frame_model.py
lsst-sitcom/spot_motion_monitor
3d0242276198126240667ba13e95b7bdf901d053
[ "BSD-3-Clause" ]
null
null
null
tests/models/test_full_frame_model.py
lsst-sitcom/spot_motion_monitor
3d0242276198126240667ba13e95b7bdf901d053
[ "BSD-3-Clause" ]
5
2020-01-08T23:50:22.000Z
2020-02-14T18:15:20.000Z
tests/models/test_full_frame_model.py
lsst-com/spot_motion_monitor
3d0242276198126240667ba13e95b7bdf901d053
[ "MIT" ]
null
null
null
# This file is part of spot_motion_monitor. # # Developed for LSST System Integration, Test and Commissioning. # # See the LICENSE file at the top-level directory of this distribution # for details of code ownership. # # Use of this source code is governed by a 3-clause BSD-style # license that can be found in the LICENSE file. import numpy as np import pytest from spot_motion_monitor.camera.gaussian_camera import GaussianCamera from spot_motion_monitor.models import FullFrameModel from spot_motion_monitor.utils import FrameRejected, TimeHandler class TestFullFrameModel(): def setup_class(cls): cls.model = FullFrameModel() cls.model.timeHandler = TimeHandler() def checkFrame(self, flux, maxAdc, comX, comY): return flux > 4000 and maxAdc > 130 and comX > 0 and comY > 0 def test_parametersAfterConstruction(self): assert self.model.sigmaScale == 5.0 assert self.model.minimumNumPixels == 10 assert self.model.timeHandler is not None def test_frameCalculations(self): # This test requires the generation of a CCD frame which will be # provided by the GaussianCamera camera = GaussianCamera() camera.seed = 1000 camera.startup() frame = camera.getFullFrame() info = self.model.calculateCentroid(frame) assert info.centerX == 288.47687644439395 assert info.centerY == 224.45394404821826 assert info.flux == 3235.9182163661176 assert info.maxAdc == 135.83703259361937 assert info.fwhm == 5.749039360993981 assert info.stdNoObjects is None def test_badFrameCalculation(self): frame = np.ones((480, 640)) with pytest.raises(FrameRejected): self.model.calculateCentroid(frame) def test_failedFrameCheck(self): # This test requires the generation of a CCD frame which will be # provided by the GaussianCamera self.model.frameCheck = self.checkFrame camera = GaussianCamera() camera.seed = 1000 camera.startup() frame = camera.getFullFrame() with pytest.raises(FrameRejected): self.model.calculateCentroid(frame) self.model.frameCheck = None
35.539683
72
0.6954
import numpy as np import pytest from spot_motion_monitor.camera.gaussian_camera import GaussianCamera from spot_motion_monitor.models import FullFrameModel from spot_motion_monitor.utils import FrameRejected, TimeHandler class TestFullFrameModel(): def setup_class(cls): cls.model = FullFrameModel() cls.model.timeHandler = TimeHandler() def checkFrame(self, flux, maxAdc, comX, comY): return flux > 4000 and maxAdc > 130 and comX > 0 and comY > 0 def test_parametersAfterConstruction(self): assert self.model.sigmaScale == 5.0 assert self.model.minimumNumPixels == 10 assert self.model.timeHandler is not None def test_frameCalculations(self): camera = GaussianCamera() camera.seed = 1000 camera.startup() frame = camera.getFullFrame() info = self.model.calculateCentroid(frame) assert info.centerX == 288.47687644439395 assert info.centerY == 224.45394404821826 assert info.flux == 3235.9182163661176 assert info.maxAdc == 135.83703259361937 assert info.fwhm == 5.749039360993981 assert info.stdNoObjects is None def test_badFrameCalculation(self): frame = np.ones((480, 640)) with pytest.raises(FrameRejected): self.model.calculateCentroid(frame) def test_failedFrameCheck(self): self.model.frameCheck = self.checkFrame camera = GaussianCamera() camera.seed = 1000 camera.startup() frame = camera.getFullFrame() with pytest.raises(FrameRejected): self.model.calculateCentroid(frame) self.model.frameCheck = None
true
true
f7010e9f11d3af9bdc7566f2be0adab9c6000069
6,529
py
Python
addons14/github_connector_odoo/models/odoo_module.py
odoochain/addons_oca
55d456d798aebe16e49b4a6070765f206a8885ca
[ "MIT" ]
1
2021-06-10T14:59:13.000Z
2021-06-10T14:59:13.000Z
addons14/github_connector_odoo/models/odoo_module.py
odoochain/addons_oca
55d456d798aebe16e49b4a6070765f206a8885ca
[ "MIT" ]
null
null
null
addons14/github_connector_odoo/models/odoo_module.py
odoochain/addons_oca
55d456d798aebe16e49b4a6070765f206a8885ca
[ "MIT" ]
1
2021-04-09T09:44:44.000Z
2021-04-09T09:44:44.000Z
# Copyright (C) 2016-Today: Odoo Community Association (OCA) # @author: Sylvain LE GAL (https://twitter.com/legalsylvain) # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html). from odoo import _, api, fields, models from odoo.tools import html_sanitize class OdooModule(models.Model): _inherit = "abstract.action.mixin" _name = "odoo.module" _description = "Odoo Module" _order = "technical_name, name" # Column Section name = fields.Char( string="Name", store=True, readonly=True, compute="_compute_name" ) technical_name = fields.Char( string="Technical Name", index=True, required=True, readonly=True ) module_version_ids = fields.One2many( comodel_name="odoo.module.version", inverse_name="module_id", string="Versions", readonly=True, ) module_version_qty = fields.Integer( string="Number of Module Versions", compute="_compute_module_version_qty", store=True, ) author_ids = fields.Many2many( string="Authors", comodel_name="odoo.author", compute="_compute_author", relation="github_module_author_rel", column1="module_id", column2="author_id", store=True, ) author_ids_description = fields.Char( string="Authors (Text)", compute="_compute_author", store=True ) organization_serie_ids = fields.Many2many( string="Series", comodel_name="github.organization.serie", compute="_compute_organization_serie", store=True, relation="github_module_organization_serie_rel", column1="module_id", column2="organization_serie_id", ) organization_serie_ids_description = fields.Char( string="Series (Text)", store=True, compute="_compute_organization_serie", ) description_rst = fields.Char( string="RST Description of the last Version", store=True, readonly=True, compute="_compute_description", ) description_rst_html = fields.Html( string="HTML of the RST Description of the last Version", store=True, readonly=True, compute="_compute_description", ) dependence_module_version_ids = fields.Many2many( comodel_name="odoo.module.version", string="Module Versions that depend on this module", relation="module_version_dependency_rel", column1="dependency_module_id", column2="module_version_id", ) dependence_module_version_qty = fields.Integer( string="Number of Module Versions that depend on this module", compute="_compute_dependence_module_version_qty", store=True, ) image = fields.Binary( string="Icon Image", compute="_compute_image", store=True, attachment=True ) # Compute Section @api.depends("module_version_ids.image") def _compute_image(self): module_version_obj = self.env["odoo.module.version"] for module in self: version_ids = module.module_version_ids.ids last_version = module_version_obj.search( [("id", "in", version_ids)], order="organization_serie_id desc", limit=1 ) module.image = last_version and last_version.image @api.depends("technical_name", "module_version_ids.name") def _compute_name(self): module_version_obj = self.env["odoo.module.version"] for module in self: version_ids = module.module_version_ids.ids last_version = module_version_obj.search( [("id", "in", version_ids)], order="organization_serie_id desc", limit=1 ) if last_version: module.name = last_version.name else: module.name = module.technical_name @api.depends("module_version_ids", "module_version_ids.description_rst_html") def _compute_description(self): module_version_obj = self.env["odoo.module.version"] for module in self: version_ids = module.module_version_ids.ids last_version = module_version_obj.search( [("id", "in", version_ids)], order="organization_serie_id desc", limit=1 ) if last_version: module.description_rst = last_version.description_rst module.description_rst_html = last_version.description_rst_html else: module.description_rst = "" module.description_rst_html = html_sanitize( "<h1 style='color:gray;'>" + _("No Version Found") + "</h1>" ) @api.depends("dependence_module_version_ids.dependency_module_ids") def _compute_dependence_module_version_qty(self): for module in self: module.dependence_module_version_qty = len( module.dependence_module_version_ids ) @api.depends("module_version_ids") def _compute_module_version_qty(self): for module in self: module.module_version_qty = len(module.module_version_ids) @api.depends("module_version_ids.author_ids") def _compute_author(self): for module in self: authors = [] for version in module.module_version_ids: authors += version.author_ids authors = set(authors) module.author_ids = [x.id for x in authors] module.author_ids_description = ", ".join(sorted([x.name for x in authors])) @api.depends("module_version_ids.organization_serie_id") def _compute_organization_serie(self): for module in self: organization_series = [] for version in module.module_version_ids: organization_series += version.organization_serie_id organization_series = set(organization_series) module.organization_serie_ids = [x.id for x in organization_series] module.organization_serie_ids_description = " - ".join( [x.name for x in sorted(organization_series, key=lambda x: x.sequence)] ) # Custom Section @api.model def create_if_not_exist(self, technical_name): module = self.search([("technical_name", "=", technical_name)]) if not module: module = self.create({"technical_name": technical_name}) return module def name_get(self): return [(module.id, module.technical_name) for module in self]
35.873626
88
0.642824
from odoo import _, api, fields, models from odoo.tools import html_sanitize class OdooModule(models.Model): _inherit = "abstract.action.mixin" _name = "odoo.module" _description = "Odoo Module" _order = "technical_name, name" name = fields.Char( string="Name", store=True, readonly=True, compute="_compute_name" ) technical_name = fields.Char( string="Technical Name", index=True, required=True, readonly=True ) module_version_ids = fields.One2many( comodel_name="odoo.module.version", inverse_name="module_id", string="Versions", readonly=True, ) module_version_qty = fields.Integer( string="Number of Module Versions", compute="_compute_module_version_qty", store=True, ) author_ids = fields.Many2many( string="Authors", comodel_name="odoo.author", compute="_compute_author", relation="github_module_author_rel", column1="module_id", column2="author_id", store=True, ) author_ids_description = fields.Char( string="Authors (Text)", compute="_compute_author", store=True ) organization_serie_ids = fields.Many2many( string="Series", comodel_name="github.organization.serie", compute="_compute_organization_serie", store=True, relation="github_module_organization_serie_rel", column1="module_id", column2="organization_serie_id", ) organization_serie_ids_description = fields.Char( string="Series (Text)", store=True, compute="_compute_organization_serie", ) description_rst = fields.Char( string="RST Description of the last Version", store=True, readonly=True, compute="_compute_description", ) description_rst_html = fields.Html( string="HTML of the RST Description of the last Version", store=True, readonly=True, compute="_compute_description", ) dependence_module_version_ids = fields.Many2many( comodel_name="odoo.module.version", string="Module Versions that depend on this module", relation="module_version_dependency_rel", column1="dependency_module_id", column2="module_version_id", ) dependence_module_version_qty = fields.Integer( string="Number of Module Versions that depend on this module", compute="_compute_dependence_module_version_qty", store=True, ) image = fields.Binary( string="Icon Image", compute="_compute_image", store=True, attachment=True ) @api.depends("module_version_ids.image") def _compute_image(self): module_version_obj = self.env["odoo.module.version"] for module in self: version_ids = module.module_version_ids.ids last_version = module_version_obj.search( [("id", "in", version_ids)], order="organization_serie_id desc", limit=1 ) module.image = last_version and last_version.image @api.depends("technical_name", "module_version_ids.name") def _compute_name(self): module_version_obj = self.env["odoo.module.version"] for module in self: version_ids = module.module_version_ids.ids last_version = module_version_obj.search( [("id", "in", version_ids)], order="organization_serie_id desc", limit=1 ) if last_version: module.name = last_version.name else: module.name = module.technical_name @api.depends("module_version_ids", "module_version_ids.description_rst_html") def _compute_description(self): module_version_obj = self.env["odoo.module.version"] for module in self: version_ids = module.module_version_ids.ids last_version = module_version_obj.search( [("id", "in", version_ids)], order="organization_serie_id desc", limit=1 ) if last_version: module.description_rst = last_version.description_rst module.description_rst_html = last_version.description_rst_html else: module.description_rst = "" module.description_rst_html = html_sanitize( "<h1 style='color:gray;'>" + _("No Version Found") + "</h1>" ) @api.depends("dependence_module_version_ids.dependency_module_ids") def _compute_dependence_module_version_qty(self): for module in self: module.dependence_module_version_qty = len( module.dependence_module_version_ids ) @api.depends("module_version_ids") def _compute_module_version_qty(self): for module in self: module.module_version_qty = len(module.module_version_ids) @api.depends("module_version_ids.author_ids") def _compute_author(self): for module in self: authors = [] for version in module.module_version_ids: authors += version.author_ids authors = set(authors) module.author_ids = [x.id for x in authors] module.author_ids_description = ", ".join(sorted([x.name for x in authors])) @api.depends("module_version_ids.organization_serie_id") def _compute_organization_serie(self): for module in self: organization_series = [] for version in module.module_version_ids: organization_series += version.organization_serie_id organization_series = set(organization_series) module.organization_serie_ids = [x.id for x in organization_series] module.organization_serie_ids_description = " - ".join( [x.name for x in sorted(organization_series, key=lambda x: x.sequence)] ) @api.model def create_if_not_exist(self, technical_name): module = self.search([("technical_name", "=", technical_name)]) if not module: module = self.create({"technical_name": technical_name}) return module def name_get(self): return [(module.id, module.technical_name) for module in self]
true
true
f7010ed84f58c84788493d098b72557b60251c87
3,678
py
Python
loss/general_adaptive_loss.py
jmendozais/SDSSDepth
7a4d0c5affef3eda7056876ccb2365ac883c08eb
[ "MIT" ]
null
null
null
loss/general_adaptive_loss.py
jmendozais/SDSSDepth
7a4d0c5affef3eda7056876ccb2365ac883c08eb
[ "MIT" ]
null
null
null
loss/general_adaptive_loss.py
jmendozais/SDSSDepth
7a4d0c5affef3eda7056876ccb2365ac883c08eb
[ "MIT" ]
null
null
null
import sys import math import os import torch import torchvision import numpy as np from pkg_resources import resource_stream def interpolate1d(x, values, tangents): ''' Returns: Returns the interpolated or extrapolated values for each query point, depending on whether or not the query lies within the span of the spline. ''' assert torch.is_tensor(x) assert torch.is_tensor(values) assert torch.is_tensor(tangents) float_dtype = x.dtype assert values.dtype == float_dtype assert tangents.dtype == float_dtype assert len(values.shape) == 1 assert len(tangents.shape) == 1 assert values.shape[0] == tangents.shape[0] x_lo = torch.floor(torch.clamp(x, torch.as_tensor(0), values.shape[0] - 2)).type(torch.int64) x_hi = x_lo + 1 # Compute the relative distance between each `x` and the knot below it. t = x - x_lo.type(float_dtype) # Compute the cubic hermite expansion of `t`. t_sq = t**2 t_cu = t * t_sq h01 = -2. * t_cu + 3. * t_sq h00 = 1. - h01 h11 = t_cu - t_sq h10 = h11 - t_sq + t # Linearly extrapolate above and below the extents of the spline for all # values. value_before = tangents[0] * t + values[0] value_after = tangents[-1] * (t - 1.) + values[-1] # Cubically interpolate between the knots below and above each query point. neighbor_values_lo = values[x_lo] neighbor_values_hi = values[x_hi] neighbor_tangents_lo = tangents[x_lo] neighbor_tangents_hi = tangents[x_hi] value_mid = ( neighbor_values_lo * h00 + neighbor_values_hi * h01 + neighbor_tangents_lo * h10 + neighbor_tangents_hi * h11) return torch.where(t < 0., value_before, torch.where(t > 1., value_after, value_mid)) def log_safe(x): x = torch.as_tensor(x) return torch.log(torch.min(x, torch.tensor(33e37).to(x))) def load_spline_params(): dirname = os.path.dirname(__file__) with open(os.path.join(dirname, '../misc/partition_spline.npz'), "rb") as spline_file: with np.load(spline_file, allow_pickle=False) as f: spline_x_scale = torch.tensor(f['x_scale']) spline_values = torch.tensor(f['values']) spline_tangents = torch.tensor(f['tangents']) return spline_x_scale, spline_values, spline_tangents def get_partition_init(shape): shape = torch.as_tensor(shape) base1 = (2.25 * shape - 4.5) / (torch.abs(shape - 2) + 0.25) + shape + 2 base2 = 5. / 18. * log_safe(4 * shape - 15) + 8 return torch.where(shape < 4, base1, base2) def get_partition(shape): shape = torch.as_tensor(shape) assert (shape >= 0).all() init = get_partition_init(shape) x_scale, values, tangents = load_spline_params() return interpolate1d(init * x_scale.to(init), values.to(init), tangents.to(init)) def general_adaptive_loss(x, shape, bowl=1.): input_shape = x.shape shape = torch.as_tensor(shape).to(x.device) bowl = torch.as_tensor(bowl).to(x.device) b = x.size(0) x = x.view(b, -1) if len(shape.shape) == 0: shape = shape.unsqueeze(dim=0).expand([b, ]).unsqueeze(dim=1) else: shape = shape.view(b, -1) if len(bowl.shape) == 0: bowl = bowl.unsqueeze(dim=0).expand([b, ]).unsqueeze(dim=1) else: bowl = bowl.view(b, -1) partition = get_partition(shape) ans = (torch.abs(shape - 2)/shape) * (torch.pow((torch.square(x/bowl) / torch.abs(shape - 2) + 1), shape/2) - 1) + log_safe(bowl) + log_safe(partition) return ans.view(input_shape)
30.907563
132
0.63404
import sys import math import os import torch import torchvision import numpy as np from pkg_resources import resource_stream def interpolate1d(x, values, tangents): assert torch.is_tensor(x) assert torch.is_tensor(values) assert torch.is_tensor(tangents) float_dtype = x.dtype assert values.dtype == float_dtype assert tangents.dtype == float_dtype assert len(values.shape) == 1 assert len(tangents.shape) == 1 assert values.shape[0] == tangents.shape[0] x_lo = torch.floor(torch.clamp(x, torch.as_tensor(0), values.shape[0] - 2)).type(torch.int64) x_hi = x_lo + 1 t = x - x_lo.type(float_dtype) t_sq = t**2 t_cu = t * t_sq h01 = -2. * t_cu + 3. * t_sq h00 = 1. - h01 h11 = t_cu - t_sq h10 = h11 - t_sq + t value_before = tangents[0] * t + values[0] value_after = tangents[-1] * (t - 1.) + values[-1] neighbor_values_lo = values[x_lo] neighbor_values_hi = values[x_hi] neighbor_tangents_lo = tangents[x_lo] neighbor_tangents_hi = tangents[x_hi] value_mid = ( neighbor_values_lo * h00 + neighbor_values_hi * h01 + neighbor_tangents_lo * h10 + neighbor_tangents_hi * h11) return torch.where(t < 0., value_before, torch.where(t > 1., value_after, value_mid)) def log_safe(x): x = torch.as_tensor(x) return torch.log(torch.min(x, torch.tensor(33e37).to(x))) def load_spline_params(): dirname = os.path.dirname(__file__) with open(os.path.join(dirname, '../misc/partition_spline.npz'), "rb") as spline_file: with np.load(spline_file, allow_pickle=False) as f: spline_x_scale = torch.tensor(f['x_scale']) spline_values = torch.tensor(f['values']) spline_tangents = torch.tensor(f['tangents']) return spline_x_scale, spline_values, spline_tangents def get_partition_init(shape): shape = torch.as_tensor(shape) base1 = (2.25 * shape - 4.5) / (torch.abs(shape - 2) + 0.25) + shape + 2 base2 = 5. / 18. * log_safe(4 * shape - 15) + 8 return torch.where(shape < 4, base1, base2) def get_partition(shape): shape = torch.as_tensor(shape) assert (shape >= 0).all() init = get_partition_init(shape) x_scale, values, tangents = load_spline_params() return interpolate1d(init * x_scale.to(init), values.to(init), tangents.to(init)) def general_adaptive_loss(x, shape, bowl=1.): input_shape = x.shape shape = torch.as_tensor(shape).to(x.device) bowl = torch.as_tensor(bowl).to(x.device) b = x.size(0) x = x.view(b, -1) if len(shape.shape) == 0: shape = shape.unsqueeze(dim=0).expand([b, ]).unsqueeze(dim=1) else: shape = shape.view(b, -1) if len(bowl.shape) == 0: bowl = bowl.unsqueeze(dim=0).expand([b, ]).unsqueeze(dim=1) else: bowl = bowl.view(b, -1) partition = get_partition(shape) ans = (torch.abs(shape - 2)/shape) * (torch.pow((torch.square(x/bowl) / torch.abs(shape - 2) + 1), shape/2) - 1) + log_safe(bowl) + log_safe(partition) return ans.view(input_shape)
true
true
f701108071e5e40645215cbd4bf739ccb6950fce
10,977
py
Python
topi/python/topi/x86/conv2d_int8.py
byungchul/tvm
ce72e9b552c14e9636e43782ccb3732d00fa0b6b
[ "Apache-2.0" ]
1
2019-10-17T02:08:41.000Z
2019-10-17T02:08:41.000Z
topi/python/topi/x86/conv2d_int8.py
byungchul/tvm
ce72e9b552c14e9636e43782ccb3732d00fa0b6b
[ "Apache-2.0" ]
null
null
null
topi/python/topi/x86/conv2d_int8.py
byungchul/tvm
ce72e9b552c14e9636e43782ccb3732d00fa0b6b
[ "Apache-2.0" ]
null
null
null
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # pylint: disable=invalid-name,unused-variable,unused-argument,no-member """Conv2D int8 schedule on x86""" import re import tvm from tvm import autotvm from tvm.autotvm.task import get_config from tvm.autotvm.task.topi_integration import deserialize_args from ..nn.conv2d import _get_workload as _get_conv2d_workload from .. import generic, tag from ..generic import conv2d as conv2d_generic from ..util import get_const_tuple from ..nn.conv2d import conv2d_NCHWc_int8 from .. import nn from . import conv2d_avx_1x1, conv2d_avx_common def _get_default_config_int8(cfg, data, kernel, strides, padding, out_dtype, is_depthwise=False, layout='NCHW'): """ Get default schedule config for the workload """ assert not is_depthwise, "Depthwise Int8 not supported" wkl = _get_conv2d_workload(data, kernel, strides, padding, out_dtype, layout) is_kernel_1x1 = wkl.hkernel == 1 and wkl.wkernel == 1 if is_kernel_1x1: conv2d_generic.fallback_schedule_cpu_1x1_int8( cfg, wkl, int32_lanes=16, num_int8_elements=4) else: conv2d_generic.fallback_schedule_cpu_common_int8( cfg, wkl, int32_lanes=16, num_int8_elements=4) def _is_int8_hw_support(data_dtype, kernel_dtype): """ Checks to ensure that we can use Intel DLBoost instructions 1) The datatypes are correct. 2) LLVM version has support for the instructions. 3) Target is skylake and above. """ # 1) Check datatypes is_dtype_support = data_dtype == 'uint8' and kernel_dtype == 'int8' # 2) Check LLVM support llvm_intrin_fast_int8 = "llvm.x86.avx512.pmaddubs.w.512" llvm_id = tvm.codegen.llvm_lookup_intrinsic_id(llvm_intrin_fast_int8) is_llvm_support = llvm_id != 0 # 3) Check target target = tvm.target.current_target() is_target_support = False for opt in target.options: if opt == '-mcpu=skylake-avx512': is_target_support = True return is_dtype_support and is_llvm_support and is_target_support def _create_tuning_space_int8(cfg, data, kernel, strides, padding, dilation, layout): """Create schedule configuration from input arguments""" dshape = get_const_tuple(data.shape) kshape = get_const_tuple(kernel.shape) pat = re.compile(r'NCHW.+(\d+)c') if layout == 'NCHW': n, ic, h, w = dshape oc, _, kh, kw = kshape elif layout == 'NHWC': n, h, w, ic = dshape kh, kw, oc, _ = kshape elif pat.match(layout) is not None: n, ic_chunk, h, w, ic_bn = dshape target = tvm.target.current_target(allow_none=False) oc_chunk, k_ic, kh, kw, k_ic_f, oc_bn, k_ic_s = kshape ic = ic_chunk * ic_bn assert ic == k_ic * k_ic_f * k_ic_s oc = oc_chunk*oc_bn else: raise ValueError("Not support this layout {} with " "schedule template.".format(layout)) is_kernel_1x1 = kh == 1 and kw == 1 ph, pw = padding if isinstance(padding, (tuple, list)) else (padding, padding) sh, sw = strides if isinstance(strides, (tuple, list)) else (strides, strides) oh = (h - kh + 2 * ph) // sh + 1 ow = (w - kw + 2 * pw) // sw + 1 # Create schedule config cfg.define_split('tile_ic', ic, num_outputs=2, filter=lambda y: y.size[-1] % 4 == 0) cfg.define_split('tile_oc', oc, num_outputs=2, filter=lambda y: y.size[-1] % 16 == 0) cfg.define_split("tile_ow", ow, num_outputs=2, filter=lambda y: y.size[-1] <= 64) if is_kernel_1x1: cfg.define_knob("tile_oh", [1, 2] if oh > 1 else [1]) else: cfg.define_knob("unroll_kw", [True, False]) # Define template function for autotvm task # We define schedule template in this function instead of # declaration function since actual input arguments need # to be altered by the schedule selected. @autotvm.task.register("topi_x86_conv2d_NCHWc_int8") def _topi_nn_conv2d_NCHWc_int8(*args, **kwargs): assert not kwargs, "Do not support kwargs in template function call" args = deserialize_args(args) if len(args) == 7: data, kernel, strides, padding, dilation, origin_layout, dtype = args else: assert len(args) == 8 data, kernel, strides, padding, dilation, origin_layout, out_layout, dtype = args raw_data_shape = get_const_tuple(data.shape) raw_kernel_shape = get_const_tuple(kernel.shape) # get config here cfg = get_config() _create_tuning_space_int8(cfg, data, kernel, strides, padding, dilation, origin_layout) # change shape with the value in config ic_bn, oc_bn, ow_bn = (cfg["tile_ic"].size[-1], cfg["tile_oc"].size[-1], cfg["tile_ow"].size[-1]) data_layout = "NCHW%dc" % ic_bn out_layout = "NCHW%dc" % oc_bn # Set up the new shape for data and kernel new_data_shape = (raw_data_shape[0], raw_data_shape[1] // ic_bn, raw_data_shape[2], raw_data_shape[3], ic_bn) n_elems = 4 new_kernel_shape = (raw_kernel_shape[0] // oc_bn, raw_kernel_shape[1] // ic_bn, raw_kernel_shape[2], raw_kernel_shape[3], ic_bn // n_elems, oc_bn, n_elems) new_data = tvm.placeholder(new_data_shape, data.dtype) new_kernel = tvm.placeholder(new_kernel_shape, kernel.dtype) C = _declaration_conv_NCHWc_int8(cfg, new_data, new_kernel, strides, padding, dilation, data_layout, out_layout, dtype) s = _schedule_conv2d_NCHWc_int8(cfg, [C]) return s, [new_data, new_kernel, C] @autotvm.register_topi_compute(conv2d_NCHWc_int8, 'cpu', 'direct') def _declaration_conv_NCHWc_int8(cfg, data, kernel, strides, padding, dilation, layout, out_layout, out_dtype): return nn.conv2d_NCHWc_int8_compute(data, kernel, strides, padding, dilation, layout, out_layout, out_dtype) @autotvm.register_topi_schedule(generic.schedule_conv2d_NCHWc_int8, 'cpu', ['direct']) def _schedule_conv2d_NCHWc_int8(cfg, outs): """Create schedule for tensors""" s = tvm.create_schedule([x.op for x in outs]) scheduled_ops = [] def traverse(op): """Traverse operators from computation graph""" # inline all one-to-one-mapping operators except the last stage (output) if tag.is_broadcast(op.tag): if op not in s.outputs: s[op].compute_inline() for tensor in op.input_tensors: if isinstance(tensor.op, tvm.tensor.ComputeOp) and tensor.op not in scheduled_ops: traverse(tensor.op) if 'conv2d_NCHWc_int8' in op.tag: conv_out = op.output(0) kernel = conv_out.op.input_tensors[1] data_vec = conv_out.op.input_tensors[0] data = data_vec.op.input_tensors[0] \ if isinstance(data_vec.op, tvm.tensor.ComputeOp) and "pad" not in data_vec.op.tag \ else data_vec if isinstance(data.op, tvm.tensor.ComputeOp) and "pad" in data.op.tag: data_pad = data data = data_pad.op.input_tensors[0] args = [s, cfg, data_vec, conv_out, outs[0]] target = tvm.target.current_target(allow_none=False) # int8 conv kernel is 7-dim _, _, kh, kw, _, _, _ = get_const_tuple(kernel.shape) if kh == 1 and kw == 1: conv2d_avx_1x1._schedule_conv_NCHWc_int8(*args) else: conv2d_avx_common._schedule_conv_NCHWc_int8(*args) scheduled_ops.append(op) traverse(outs[0].op) return s @autotvm.register_topi_schedule(generic.schedule_conv2d_nhwc_pack, 'cpu', ['direct']) def schedule_conv2d_nhwc_pack(cfg, outs): """Create schedule for tensors""" s = tvm.create_schedule([x.op for x in outs]) output_op = outs[0].op scheduled_ops = [] def traverse(op): """Traverse operators from computation graph""" # inline all one-to-one-mapping operators except the last stage (output) if tag.is_broadcast(op.tag): if op not in s.outputs: s[op].compute_inline() else: # inject custom schedule if len(op.axis) == 4: # schedule bias + bn + relu n, h, w, c = op.axis fused = s[op].fuse(n, h, w) s[op].parallel(fused) s[op].vectorize(c) for tensor in op.input_tensors: if isinstance(tensor.op, tvm.tensor.ComputeOp) and tensor.op not in scheduled_ops: traverse(tensor.op) if 'conv2d_nhwc_pack_int8' in op.tag: conv_out = op.output(0) kernel = conv_out.op.input_tensors[1] data_vec = conv_out.op.input_tensors[0] data = data_vec.op.input_tensors[0] \ if isinstance(data_vec.op, tvm.tensor.ComputeOp) and "pad" not in data_vec.op.tag \ else data_vec if isinstance(data.op, tvm.tensor.ComputeOp) and "pad" in data.op.tag: data_pad = data data = data_pad.op.input_tensors[0] args = [s, cfg, data_vec, conv_out, outs[0]] if data.dtype == 'uint8': kh, kw, _, _, _ = get_const_tuple(kernel.shape) if kh == 1 and kw == 1: conv2d_avx_1x1._schedule_conv_nhwc_pack_int8(*args) else: raise ValueError("Only support 1x1 kernel with " "schedule_conv2d_nhwc_pack.") else: raise ValueError("Not support this data type {} with " "schedule_conv2d_nhwc_pack. Only support int8".format(data.dtype)) scheduled_ops.append(op) traverse(output_op) return s
41.422642
99
0.620388
import re import tvm from tvm import autotvm from tvm.autotvm.task import get_config from tvm.autotvm.task.topi_integration import deserialize_args from ..nn.conv2d import _get_workload as _get_conv2d_workload from .. import generic, tag from ..generic import conv2d as conv2d_generic from ..util import get_const_tuple from ..nn.conv2d import conv2d_NCHWc_int8 from .. import nn from . import conv2d_avx_1x1, conv2d_avx_common def _get_default_config_int8(cfg, data, kernel, strides, padding, out_dtype, is_depthwise=False, layout='NCHW'): assert not is_depthwise, "Depthwise Int8 not supported" wkl = _get_conv2d_workload(data, kernel, strides, padding, out_dtype, layout) is_kernel_1x1 = wkl.hkernel == 1 and wkl.wkernel == 1 if is_kernel_1x1: conv2d_generic.fallback_schedule_cpu_1x1_int8( cfg, wkl, int32_lanes=16, num_int8_elements=4) else: conv2d_generic.fallback_schedule_cpu_common_int8( cfg, wkl, int32_lanes=16, num_int8_elements=4) def _is_int8_hw_support(data_dtype, kernel_dtype): is_dtype_support = data_dtype == 'uint8' and kernel_dtype == 'int8' llvm_intrin_fast_int8 = "llvm.x86.avx512.pmaddubs.w.512" llvm_id = tvm.codegen.llvm_lookup_intrinsic_id(llvm_intrin_fast_int8) is_llvm_support = llvm_id != 0 target = tvm.target.current_target() is_target_support = False for opt in target.options: if opt == '-mcpu=skylake-avx512': is_target_support = True return is_dtype_support and is_llvm_support and is_target_support def _create_tuning_space_int8(cfg, data, kernel, strides, padding, dilation, layout): dshape = get_const_tuple(data.shape) kshape = get_const_tuple(kernel.shape) pat = re.compile(r'NCHW.+(\d+)c') if layout == 'NCHW': n, ic, h, w = dshape oc, _, kh, kw = kshape elif layout == 'NHWC': n, h, w, ic = dshape kh, kw, oc, _ = kshape elif pat.match(layout) is not None: n, ic_chunk, h, w, ic_bn = dshape target = tvm.target.current_target(allow_none=False) oc_chunk, k_ic, kh, kw, k_ic_f, oc_bn, k_ic_s = kshape ic = ic_chunk * ic_bn assert ic == k_ic * k_ic_f * k_ic_s oc = oc_chunk*oc_bn else: raise ValueError("Not support this layout {} with " "schedule template.".format(layout)) is_kernel_1x1 = kh == 1 and kw == 1 ph, pw = padding if isinstance(padding, (tuple, list)) else (padding, padding) sh, sw = strides if isinstance(strides, (tuple, list)) else (strides, strides) oh = (h - kh + 2 * ph) // sh + 1 ow = (w - kw + 2 * pw) // sw + 1 cfg.define_split('tile_ic', ic, num_outputs=2, filter=lambda y: y.size[-1] % 4 == 0) cfg.define_split('tile_oc', oc, num_outputs=2, filter=lambda y: y.size[-1] % 16 == 0) cfg.define_split("tile_ow", ow, num_outputs=2, filter=lambda y: y.size[-1] <= 64) if is_kernel_1x1: cfg.define_knob("tile_oh", [1, 2] if oh > 1 else [1]) else: cfg.define_knob("unroll_kw", [True, False]) @autotvm.task.register("topi_x86_conv2d_NCHWc_int8") def _topi_nn_conv2d_NCHWc_int8(*args, **kwargs): assert not kwargs, "Do not support kwargs in template function call" args = deserialize_args(args) if len(args) == 7: data, kernel, strides, padding, dilation, origin_layout, dtype = args else: assert len(args) == 8 data, kernel, strides, padding, dilation, origin_layout, out_layout, dtype = args raw_data_shape = get_const_tuple(data.shape) raw_kernel_shape = get_const_tuple(kernel.shape) cfg = get_config() _create_tuning_space_int8(cfg, data, kernel, strides, padding, dilation, origin_layout) ic_bn, oc_bn, ow_bn = (cfg["tile_ic"].size[-1], cfg["tile_oc"].size[-1], cfg["tile_ow"].size[-1]) data_layout = "NCHW%dc" % ic_bn out_layout = "NCHW%dc" % oc_bn new_data_shape = (raw_data_shape[0], raw_data_shape[1] // ic_bn, raw_data_shape[2], raw_data_shape[3], ic_bn) n_elems = 4 new_kernel_shape = (raw_kernel_shape[0] // oc_bn, raw_kernel_shape[1] // ic_bn, raw_kernel_shape[2], raw_kernel_shape[3], ic_bn // n_elems, oc_bn, n_elems) new_data = tvm.placeholder(new_data_shape, data.dtype) new_kernel = tvm.placeholder(new_kernel_shape, kernel.dtype) C = _declaration_conv_NCHWc_int8(cfg, new_data, new_kernel, strides, padding, dilation, data_layout, out_layout, dtype) s = _schedule_conv2d_NCHWc_int8(cfg, [C]) return s, [new_data, new_kernel, C] @autotvm.register_topi_compute(conv2d_NCHWc_int8, 'cpu', 'direct') def _declaration_conv_NCHWc_int8(cfg, data, kernel, strides, padding, dilation, layout, out_layout, out_dtype): return nn.conv2d_NCHWc_int8_compute(data, kernel, strides, padding, dilation, layout, out_layout, out_dtype) @autotvm.register_topi_schedule(generic.schedule_conv2d_NCHWc_int8, 'cpu', ['direct']) def _schedule_conv2d_NCHWc_int8(cfg, outs): s = tvm.create_schedule([x.op for x in outs]) scheduled_ops = [] def traverse(op): if tag.is_broadcast(op.tag): if op not in s.outputs: s[op].compute_inline() for tensor in op.input_tensors: if isinstance(tensor.op, tvm.tensor.ComputeOp) and tensor.op not in scheduled_ops: traverse(tensor.op) if 'conv2d_NCHWc_int8' in op.tag: conv_out = op.output(0) kernel = conv_out.op.input_tensors[1] data_vec = conv_out.op.input_tensors[0] data = data_vec.op.input_tensors[0] \ if isinstance(data_vec.op, tvm.tensor.ComputeOp) and "pad" not in data_vec.op.tag \ else data_vec if isinstance(data.op, tvm.tensor.ComputeOp) and "pad" in data.op.tag: data_pad = data data = data_pad.op.input_tensors[0] args = [s, cfg, data_vec, conv_out, outs[0]] target = tvm.target.current_target(allow_none=False) _, _, kh, kw, _, _, _ = get_const_tuple(kernel.shape) if kh == 1 and kw == 1: conv2d_avx_1x1._schedule_conv_NCHWc_int8(*args) else: conv2d_avx_common._schedule_conv_NCHWc_int8(*args) scheduled_ops.append(op) traverse(outs[0].op) return s @autotvm.register_topi_schedule(generic.schedule_conv2d_nhwc_pack, 'cpu', ['direct']) def schedule_conv2d_nhwc_pack(cfg, outs): s = tvm.create_schedule([x.op for x in outs]) output_op = outs[0].op scheduled_ops = [] def traverse(op): if tag.is_broadcast(op.tag): if op not in s.outputs: s[op].compute_inline() else: if len(op.axis) == 4: n, h, w, c = op.axis fused = s[op].fuse(n, h, w) s[op].parallel(fused) s[op].vectorize(c) for tensor in op.input_tensors: if isinstance(tensor.op, tvm.tensor.ComputeOp) and tensor.op not in scheduled_ops: traverse(tensor.op) if 'conv2d_nhwc_pack_int8' in op.tag: conv_out = op.output(0) kernel = conv_out.op.input_tensors[1] data_vec = conv_out.op.input_tensors[0] data = data_vec.op.input_tensors[0] \ if isinstance(data_vec.op, tvm.tensor.ComputeOp) and "pad" not in data_vec.op.tag \ else data_vec if isinstance(data.op, tvm.tensor.ComputeOp) and "pad" in data.op.tag: data_pad = data data = data_pad.op.input_tensors[0] args = [s, cfg, data_vec, conv_out, outs[0]] if data.dtype == 'uint8': kh, kw, _, _, _ = get_const_tuple(kernel.shape) if kh == 1 and kw == 1: conv2d_avx_1x1._schedule_conv_nhwc_pack_int8(*args) else: raise ValueError("Only support 1x1 kernel with " "schedule_conv2d_nhwc_pack.") else: raise ValueError("Not support this data type {} with " "schedule_conv2d_nhwc_pack. Only support int8".format(data.dtype)) scheduled_ops.append(op) traverse(output_op) return s
true
true
f701112398e218e571982191d312507cf5df31ed
3,344
py
Python
scripts/us_census/acs5yr/subject_tables/s2201/process_test.py
rpatil524/data
9e76c7f22a75ad4e52522444a080ed3f5c6da7dd
[ "Apache-2.0" ]
null
null
null
scripts/us_census/acs5yr/subject_tables/s2201/process_test.py
rpatil524/data
9e76c7f22a75ad4e52522444a080ed3f5c6da7dd
[ "Apache-2.0" ]
null
null
null
scripts/us_census/acs5yr/subject_tables/s2201/process_test.py
rpatil524/data
9e76c7f22a75ad4e52522444a080ed3f5c6da7dd
[ "Apache-2.0" ]
null
null
null
"""Tests for py. for S2201""" import csv import json import os import tempfile import sys import unittest _CODEDIR = os.path.dirname(os.path.realpath(__file__)) sys.path.insert(1, os.path.join(_CODEDIR, '.')) from .process import * _FEATURES = os.path.join(_CODEDIR, 'features.json') _STAT_VAR_LIST = os.path.join(_CODEDIR, 'stat_vars.csv') _TEST_DATA = os.path.join(_CODEDIR, 'testdata') _EXPECTED_TMCF = os.path.join(_CODEDIR, 'output.tmcf') class ProcessTest(unittest.TestCase): def test_convert_column_to_stat_var(self): f = open(_FEATURES) features = json.load(f) f.close() self.assertEqual( convert_column_to_stat_var( 'Estimate!!Households receiving food stamps/SNAP!!Households', features), 'Count_Household_WithFoodStampsInThePast12Months') self.assertEqual( convert_column_to_stat_var( 'Margin of Error!!' + 'Households receiving food stamps/SNAP!!Households!!' + 'No children under 18 years!!Other family:!!' + 'Male householder, no spouse present', features), 'MarginOfError_Count_Household_WithFoodStampsInThePast12Months_' + 'WithoutChildrenUnder18_SingleFatherFamilyHousehold') self.assertEqual( convert_column_to_stat_var( 'Estimate!!Households receiving food stamps/SNAP!!Households!!' + 'HOUSEHOLD INCOME IN THE PAST 12 MONTHS' + '(IN 2019 INFLATION-ADJUSTED DOLLARS)!!Median income (dollars)', features), 'Median_Income_Household_WithFoodStampsInThePast12Months') def test_create_csv(self): f = open(_FEATURES) features = json.load(f) f.close() f = open(_STAT_VAR_LIST) stat_vars = f.read().splitlines() f.close() with tempfile.TemporaryDirectory() as tmp_dir: test_csv = os.path.join(tmp_dir, 'test_csv.csv') create_csv(test_csv, stat_vars) for year in range(2010, 2020): filename = f'testACSST5Y{year}.csv' with open(os.path.join(_TEST_DATA, filename)) as f: reader = csv.DictReader(f) write_csv(filename, reader, test_csv, features, stat_vars) with open(test_csv) as f_result: test_result = f_result.read() with open(os.path.join(_TEST_DATA, 'expected.csv')) as f_test: expected = f_test.read() self.assertEqual(test_result, expected) os.remove(test_csv) def test_create_tmcf(self): f = open(_FEATURES) features = json.load(f) f.close() f = open(_STAT_VAR_LIST) stat_vars = f.read().splitlines() f.close() with tempfile.TemporaryDirectory() as tmp_dir: test_tmcf = os.path.join(tmp_dir, 'test_tmcf.tmcf') create_tmcf(test_tmcf, features, stat_vars) with open(test_tmcf) as f_result: test_result = f_result.read() with open(_EXPECTED_TMCF) as f_test: expected = f_test.read() self.assertEqual(test_result, expected) os.remove(test_tmcf) if __name__ == '__main__': unittest.main()
37.573034
80
0.60945
import csv import json import os import tempfile import sys import unittest _CODEDIR = os.path.dirname(os.path.realpath(__file__)) sys.path.insert(1, os.path.join(_CODEDIR, '.')) from .process import * _FEATURES = os.path.join(_CODEDIR, 'features.json') _STAT_VAR_LIST = os.path.join(_CODEDIR, 'stat_vars.csv') _TEST_DATA = os.path.join(_CODEDIR, 'testdata') _EXPECTED_TMCF = os.path.join(_CODEDIR, 'output.tmcf') class ProcessTest(unittest.TestCase): def test_convert_column_to_stat_var(self): f = open(_FEATURES) features = json.load(f) f.close() self.assertEqual( convert_column_to_stat_var( 'Estimate!!Households receiving food stamps/SNAP!!Households', features), 'Count_Household_WithFoodStampsInThePast12Months') self.assertEqual( convert_column_to_stat_var( 'Margin of Error!!' + 'Households receiving food stamps/SNAP!!Households!!' + 'No children under 18 years!!Other family:!!' + 'Male householder, no spouse present', features), 'MarginOfError_Count_Household_WithFoodStampsInThePast12Months_' + 'WithoutChildrenUnder18_SingleFatherFamilyHousehold') self.assertEqual( convert_column_to_stat_var( 'Estimate!!Households receiving food stamps/SNAP!!Households!!' + 'HOUSEHOLD INCOME IN THE PAST 12 MONTHS' + '(IN 2019 INFLATION-ADJUSTED DOLLARS)!!Median income (dollars)', features), 'Median_Income_Household_WithFoodStampsInThePast12Months') def test_create_csv(self): f = open(_FEATURES) features = json.load(f) f.close() f = open(_STAT_VAR_LIST) stat_vars = f.read().splitlines() f.close() with tempfile.TemporaryDirectory() as tmp_dir: test_csv = os.path.join(tmp_dir, 'test_csv.csv') create_csv(test_csv, stat_vars) for year in range(2010, 2020): filename = f'testACSST5Y{year}.csv' with open(os.path.join(_TEST_DATA, filename)) as f: reader = csv.DictReader(f) write_csv(filename, reader, test_csv, features, stat_vars) with open(test_csv) as f_result: test_result = f_result.read() with open(os.path.join(_TEST_DATA, 'expected.csv')) as f_test: expected = f_test.read() self.assertEqual(test_result, expected) os.remove(test_csv) def test_create_tmcf(self): f = open(_FEATURES) features = json.load(f) f.close() f = open(_STAT_VAR_LIST) stat_vars = f.read().splitlines() f.close() with tempfile.TemporaryDirectory() as tmp_dir: test_tmcf = os.path.join(tmp_dir, 'test_tmcf.tmcf') create_tmcf(test_tmcf, features, stat_vars) with open(test_tmcf) as f_result: test_result = f_result.read() with open(_EXPECTED_TMCF) as f_test: expected = f_test.read() self.assertEqual(test_result, expected) os.remove(test_tmcf) if __name__ == '__main__': unittest.main()
true
true
f7011160286db7888ac43e640bc9ad5f0b9698f2
661
py
Python
endpoints/get_news/get_news_validator.py
Maurck/fisinius
15b05c5f4dfcef1adb96e3d862ceab0cb557eae2
[ "Apache-2.0" ]
null
null
null
endpoints/get_news/get_news_validator.py
Maurck/fisinius
15b05c5f4dfcef1adb96e3d862ceab0cb557eae2
[ "Apache-2.0" ]
null
null
null
endpoints/get_news/get_news_validator.py
Maurck/fisinius
15b05c5f4dfcef1adb96e3d862ceab0cb557eae2
[ "Apache-2.0" ]
null
null
null
from utils.utils import validate_parameters get_news_query_schema = { "from": { "type": "integer", 'coerce': int, "min": 0, "max": 10000, "required": False, "default": 0 }, "limit": { "type": "integer", 'coerce': int, "min": 0, "max": 10000, "required": False, "default": 0 }, "category": { "type": "string", "required": False } } class GetNewsValidator: def __call__(self, request): body_validation_errors = validate_parameters(request.args.copy(), get_news_query_schema) return body_validation_errors
22.033333
96
0.535552
from utils.utils import validate_parameters get_news_query_schema = { "from": { "type": "integer", 'coerce': int, "min": 0, "max": 10000, "required": False, "default": 0 }, "limit": { "type": "integer", 'coerce': int, "min": 0, "max": 10000, "required": False, "default": 0 }, "category": { "type": "string", "required": False } } class GetNewsValidator: def __call__(self, request): body_validation_errors = validate_parameters(request.args.copy(), get_news_query_schema) return body_validation_errors
true
true
f7011278b1528d3d413eabf108ce7a8df5d2333a
1,543
py
Python
dataset/cut_chime.py
dzungcamlang/noise_adversarial_tacotron
7a7fda49eb8bf82f5139743d55639d48ff204e9e
[ "MIT" ]
11
2019-11-25T12:16:39.000Z
2020-11-05T15:47:13.000Z
dataset/cut_chime.py
yqlihust/noise_adversarial_tacotron
7a7fda49eb8bf82f5139743d55639d48ff204e9e
[ "MIT" ]
null
null
null
dataset/cut_chime.py
yqlihust/noise_adversarial_tacotron
7a7fda49eb8bf82f5139743d55639d48ff204e9e
[ "MIT" ]
3
2021-04-28T04:59:04.000Z
2022-01-05T12:22:03.000Z
import hp from pathlib import Path import numpy as np from tqdm import tqdm import librosa import torch import librosa.filters import numpy as np import scipy from random import randint from os import makedirs def load_wav(path, sample_rate): return librosa.core.load(path, sr=sample_rate)[0] def save_wav(wav, path, sample_rate): wav *= 32767 / max(0.01, np.max(np.abs(wav))) scipy.io.wavfile.write(path, sample_rate, wav.astype(np.int16)) def get_segments(source, length, count): begins = [] l = len(source) for _ in range(count): begins.append(randint(0, l - length - 1)) segments = [] for begin in begins: segments.append(source[begin: begin + length]) return segments def process_chime( source=hp.whole_chime_path, target=hp.part_chime_path, sr=16000, duration=30, count=10 ): """ Randomly picking segments from CHiME dataset, since full dataset is not necessary in our case. :param source: :param target: :param sr: :param duration: :param count: :return: """ makedirs(str(target), exist_ok=True) for path in tqdm(source.glob("*.wav")): wave = load_wav(path, sr) if len(wave) < sr * 30: continue waves = get_segments(wave, duration * sr, count) for i, wave in enumerate(waves, 1): save_wav(wave, str(target / f"{path.stem}_{i}.wav"), sr) if __name__ == '__main__': print("Beginning segmenting CHiME4 noises.") process_chime() print("Processing Finished")
24.492063
98
0.659106
import hp from pathlib import Path import numpy as np from tqdm import tqdm import librosa import torch import librosa.filters import numpy as np import scipy from random import randint from os import makedirs def load_wav(path, sample_rate): return librosa.core.load(path, sr=sample_rate)[0] def save_wav(wav, path, sample_rate): wav *= 32767 / max(0.01, np.max(np.abs(wav))) scipy.io.wavfile.write(path, sample_rate, wav.astype(np.int16)) def get_segments(source, length, count): begins = [] l = len(source) for _ in range(count): begins.append(randint(0, l - length - 1)) segments = [] for begin in begins: segments.append(source[begin: begin + length]) return segments def process_chime( source=hp.whole_chime_path, target=hp.part_chime_path, sr=16000, duration=30, count=10 ): makedirs(str(target), exist_ok=True) for path in tqdm(source.glob("*.wav")): wave = load_wav(path, sr) if len(wave) < sr * 30: continue waves = get_segments(wave, duration * sr, count) for i, wave in enumerate(waves, 1): save_wav(wave, str(target / f"{path.stem}_{i}.wav"), sr) if __name__ == '__main__': print("Beginning segmenting CHiME4 noises.") process_chime() print("Processing Finished")
true
true
f70112c4c7f82558c05500b63c0f7f128ba47bdd
282
py
Python
testsuite/render-material-layer/run.py
LongerVision/OpenShadingLanguage
30d2a4a089c5c9d521b27519329c205763dfe483
[ "BSD-3-Clause" ]
1,105
2015-01-02T20:47:19.000Z
2021-01-25T13:20:56.000Z
testsuite/render-material-layer/run.py
LongerVision/OpenShadingLanguage
30d2a4a089c5c9d521b27519329c205763dfe483
[ "BSD-3-Clause" ]
696
2015-01-07T23:42:08.000Z
2021-01-25T03:55:08.000Z
testsuite/render-material-layer/run.py
LongerVision/OpenShadingLanguage
30d2a4a089c5c9d521b27519329c205763dfe483
[ "BSD-3-Clause" ]
248
2015-01-05T13:41:28.000Z
2021-01-24T23:29:55.000Z
# Copyright Contributors to the Open Shading Language project. # SPDX-License-Identifier: BSD-3-Clause # https://github.com/AcademySoftwareFoundation/OpenShadingLanguage #!/usr/bin/python outputs = [ "out.exr" ] command = testrender("-r 320 240 -aa 4 material-layer.xml out.exr")
35.25
67
0.762411
outputs = [ "out.exr" ] command = testrender("-r 320 240 -aa 4 material-layer.xml out.exr")
true
true
f7011338d453f21f4330747e8cafd3b46da5df53
28,469
py
Python
openaset/dpupr/admin.py
muntaza/Open-Aset
f5eb6770a9f7184e3860a18cd655b35b248a9dd5
[ "BSD-2-Clause" ]
null
null
null
openaset/dpupr/admin.py
muntaza/Open-Aset
f5eb6770a9f7184e3860a18cd655b35b248a9dd5
[ "BSD-2-Clause" ]
null
null
null
openaset/dpupr/admin.py
muntaza/Open-Aset
f5eb6770a9f7184e3860a18cd655b35b248a9dd5
[ "BSD-2-Clause" ]
2
2019-02-18T05:25:23.000Z
2021-02-01T16:45:23.000Z
### $Id: admin.py,v 1.5 2017/12/18 09:12:51 muntaza Exp $ from django.contrib import admin from umum.models import Provinsi, Kabupaten, LokasiBidang, SKPD, SUBSKPD, KodeBarang, HakTanah, SatuanBarang, KeadaanBarang, SKPenghapusan, MutasiBerkurang, JenisPemanfaatan, AsalUsul, Tahun, GolonganBarang, Tanah, KontrakTanah, PenghapusanTanah, TanahPenghapusan, PemanfaatanTanah, TanahPemanfaatan, HargaTanah, TahunBerkurangUsulHapusTanah, TanahUsulHapus #### Tanah from umum.models import TanahDPUPR, KontrakTanahDPUPR, HargaTanahDPUPR, TanahUsulHapusDPUPR, TahunBerkurangUsulHapusTanahDPUPR from umum.models import TanahPenghapusanDPUPR, TahunBerkurangTanahDPUPR, PenghapusanTanahDPUPR from umum.models import SKPDAsalTanahDPUPR, SKPDTujuanTanahDPUPR, FotoTanahDPUPR from umum.admin import HargaTanahInline, TanahAdmin, KontrakTanahAdmin, HargaTanahAdmin, TahunBerkurangUsulHapusTanahInline, TanahUsulHapusAdmin from umum.admin import TahunBerkurangTanahInline, PenghapusanTanahInline, TanahPenghapusanAdmin from umum.admin import SKPDAsalTanahInline, SKPDTujuanTanahInline, FotoTanahInline from umum.admin import GedungBangunanInline #### Gedung Bangunan from gedungbangunan.models import StatusTingkat, StatusBeton, KontrakGedungBangunan, HargaGedungBangunan, GedungBangunan, PenghapusanGedungBangunan, PemanfaatanGedungBangunan, TahunBerkurangGedungBangunan, Ruangan, TahunBerkurangUsulHapusGedung from gedungbangunan.models import GedungBangunanPemanfaatan, GedungBangunanPenghapusan, GedungBangunanRuangan, GedungBangunanUsulHapus from gedungbangunan.models import GedungBangunanDPUPR, KontrakGedungBangunanDPUPR, HargaGedungBangunanDPUPR, GedungBangunanRuanganDPUPR, GedungBangunanUsulHapusDPUPR, TahunBerkurangUsulHapusGedungDPUPR from gedungbangunan.models import GedungBangunanPenghapusanDPUPR, TahunBerkurangGedungBangunanDPUPR, PenghapusanGedungBangunanDPUPR from gedungbangunan.models import SKPDAsalGedungBangunanDPUPR, SKPDTujuanGedungBangunanDPUPR, FotoGedungBangunanDPUPR from gedungbangunan.admin import HargaGedungBangunanInline, GedungBangunanAdmin, KontrakGedungBangunanAdmin, HargaGedungBangunanAdmin, RuanganInline, GedungBangunanRuanganAdmin, KDPGedungBangunanAdmin, TahunBerkurangUsulHapusGedungInline, GedungBangunanUsulHapusAdmin from gedungbangunan.admin import TahunBerkurangGedungBangunanInline, PenghapusanGedungBangunanInline, GedungBangunanPenghapusanAdmin from gedungbangunan.admin import SKPDAsalGedungBangunanInline, SKPDTujuanGedungBangunanInline, FotoGedungBangunanInline #### Peralatan Mesin from peralatanmesin.models import KontrakPeralatanMesin, HargaPeralatanMesin, PeralatanMesin, PenghapusanPeralatanMesin, PemanfaatanPeralatanMesin, TahunBerkurangPeralatanMesin, TahunBerkurangUsulHapusPeralatanMesin #untuk menampung inline from peralatanmesin.models import PeralatanMesinPemanfaatan, PeralatanMesinPenghapusan, PeralatanMesinUsulHapus from peralatanmesin.models import PeralatanMesinDPUPR, KontrakPeralatanMesinDPUPR, HargaPeralatanMesinDPUPR, PeralatanMesinUsulHapusDPUPR, TahunBerkurangUsulHapusPeralatanMesinDPUPR from peralatanmesin.models import PeralatanMesinPenghapusanDPUPR, TahunBerkurangPeralatanMesinDPUPR, PenghapusanPeralatanMesinDPUPR from peralatanmesin.models import SKPDAsalPeralatanMesinDPUPR, SKPDTujuanPeralatanMesinDPUPR, FotoPeralatanMesinDPUPR from peralatanmesin.admin import HargaPeralatanMesinInline, PeralatanMesinAdmin, KontrakPeralatanMesinAdmin, HargaPeralatanMesinAdmin, TahunBerkurangUsulHapusPeralatanMesinInline, PeralatanMesinUsulHapusAdmin from peralatanmesin.admin import TahunBerkurangPeralatanMesinInline, PenghapusanPeralatanMesinInline, PeralatanMesinPenghapusanAdmin from peralatanmesin.admin import SKPDAsalPeralatanMesinInline, SKPDTujuanPeralatanMesinInline, FotoPeralatanMesinInline #### Class Tanah class TahunBerkurangTanahDPUPRInline(TahunBerkurangTanahInline): model = TahunBerkurangTanahDPUPR class PenghapusanTanahDPUPRInline(PenghapusanTanahInline): model = PenghapusanTanahDPUPR class SKPDAsalTanahDPUPRInline(SKPDAsalTanahInline): model = SKPDAsalTanahDPUPR class SKPDTujuanTanahDPUPRInline(SKPDTujuanTanahInline): model = SKPDTujuanTanahDPUPR class FotoTanahDPUPRInline(FotoTanahInline): model = FotoTanahDPUPR class GedungBangunanDPUPRInline(GedungBangunanInline): model = GedungBangunanDPUPR class HargaTanahDPUPRInline(HargaTanahInline): model = HargaTanahDPUPR def formfield_for_foreignkey(self, db_field, request, **kwargs): if db_field.name == "id_kontrak": kwargs["queryset"] = KontrakTanah.objects.filter(id_skpd__exact=3) return super(HargaTanahDPUPRInline, self).formfield_for_foreignkey(db_field, request, **kwargs) class TahunBerkurangUsulHapusTanahDPUPRInline(TahunBerkurangUsulHapusTanahInline): model = TahunBerkurangUsulHapusTanahDPUPR class TanahDPUPRAdmin(TanahAdmin): inlines = [HargaTanahDPUPRInline, SKPDAsalTanahDPUPRInline, FotoTanahDPUPRInline, ] def formfield_for_foreignkey(self, db_field, request, **kwargs): if db_field.name == "id_sub_skpd": kwargs["queryset"] = SUBSKPD.objects.filter(id_skpd__exact=3) return super(TanahDPUPRAdmin, self).formfield_for_foreignkey(db_field, request, **kwargs) def get_queryset(self, request): qs = SUBSKPD.objects.filter(id_skpd__exact=3) return self.model.objects.filter(id_sub_skpd__in=qs).filter(id_mutasi_berkurang__exact=5) class TanahUsulHapusDPUPRAdmin(TanahUsulHapusAdmin): inlines = [TahunBerkurangUsulHapusTanahDPUPRInline, SKPDAsalTanahDPUPRInline, FotoTanahDPUPRInline, ] def get_queryset(self, request): qs = SUBSKPD.objects.filter(id_skpd__exact=3) return self.model.objects.filter(id_sub_skpd__in=qs).filter(id_mutasi_berkurang__exact=3) class KontrakTanahDPUPRAdmin(KontrakTanahAdmin): def formfield_for_foreignkey(self, db_field, request, **kwargs): if db_field.name == "id_skpd": kwargs["queryset"] = SKPD.objects.filter(id__exact=3) return super(KontrakTanahDPUPRAdmin, self).formfield_for_foreignkey(db_field, request, **kwargs) def get_queryset(self, request): return self.model.objects.filter(id_skpd__exact=3) class HargaTanahDPUPRAdmin(HargaTanahAdmin): def get_queryset(self, request): sub_skpd_qs = SUBSKPD.objects.filter(id_skpd__exact=3) tanah_qs = Tanah.objects.filter(id_sub_skpd__in=sub_skpd_qs) return self.model.objects.filter(id_tanah__in=tanah_qs) class TanahPenghapusanDPUPRAdmin(TanahPenghapusanAdmin): inlines = [PenghapusanTanahDPUPRInline, TahunBerkurangTanahDPUPRInline, SKPDAsalTanahDPUPRInline, SKPDTujuanTanahDPUPRInline, FotoTanahDPUPRInline, ] def get_queryset(self, request): qs = SUBSKPD.objects.filter(id_skpd__exact=3) return self.model.objects.filter(id_sub_skpd__in=qs).filter(id_mutasi_berkurang__in=[2,4,6,7,10,]) ### Register Tanah DPUPR admin.site.register(TanahDPUPR, TanahDPUPRAdmin) admin.site.register(TanahUsulHapusDPUPR, TanahUsulHapusDPUPRAdmin) admin.site.register(KontrakTanahDPUPR, KontrakTanahDPUPRAdmin) admin.site.register(HargaTanahDPUPR, HargaTanahDPUPRAdmin) admin.site.register(TanahPenghapusanDPUPR, TanahPenghapusanDPUPRAdmin) from gedungbangunan.models import KDPGedungBangunanDPUPR #### Class Gedung dan Bangunan class TahunBerkurangGedungBangunanDPUPRInline(TahunBerkurangGedungBangunanInline): model = TahunBerkurangGedungBangunanDPUPR class PenghapusanGedungBangunanDPUPRInline(PenghapusanGedungBangunanInline): model = PenghapusanGedungBangunanDPUPR class SKPDAsalGedungBangunanDPUPRInline(SKPDAsalGedungBangunanInline): model = SKPDAsalGedungBangunanDPUPR class SKPDTujuanGedungBangunanDPUPRInline(SKPDTujuanGedungBangunanInline): model = SKPDTujuanGedungBangunanDPUPR class FotoGedungBangunanDPUPRInline(FotoGedungBangunanInline): model = FotoGedungBangunanDPUPR class HargaGedungBangunanDPUPRInline(HargaGedungBangunanInline): model = HargaGedungBangunanDPUPR def formfield_for_foreignkey(self, db_field, request, **kwargs): if db_field.name == "id_kontrak_gedung_bangunan": kwargs["queryset"] = KontrakGedungBangunan.objects.filter(id_skpd__exact=3) return super(HargaGedungBangunanDPUPRInline, self).formfield_for_foreignkey(db_field, request, **kwargs) class TahunBerkurangUsulHapusGedungDPUPRInline(TahunBerkurangUsulHapusGedungInline): model = TahunBerkurangUsulHapusGedungDPUPR class GedungBangunanDPUPRAdmin(GedungBangunanAdmin): inlines = [HargaGedungBangunanDPUPRInline, SKPDAsalGedungBangunanDPUPRInline, FotoGedungBangunanDPUPRInline, ] def formfield_for_foreignkey(self, db_field, request, **kwargs): if db_field.name == "id_sub_skpd": kwargs["queryset"] = SUBSKPD.objects.filter(id_skpd__exact=3) return super(GedungBangunanDPUPRAdmin, self).formfield_for_foreignkey(db_field, request, **kwargs) def get_queryset(self, request): qs = SUBSKPD.objects.filter(id_skpd__exact=3) return self.model.objects.filter(id_sub_skpd__in=qs).filter(id_golongan_barang__exact=3).filter(id_mutasi_berkurang__exact=5) class KDPGedungBangunanDPUPRAdmin(KDPGedungBangunanAdmin): inlines = [HargaGedungBangunanDPUPRInline, SKPDAsalGedungBangunanDPUPRInline, FotoGedungBangunanDPUPRInline, ] def formfield_for_foreignkey(self, db_field, request, **kwargs): if db_field.name == "id_sub_skpd": kwargs["queryset"] = SUBSKPD.objects.filter(id_skpd__exact=3) return super(KDPGedungBangunanDPUPRAdmin, self).formfield_for_foreignkey(db_field, request, **kwargs) def get_queryset(self, request): qs = SUBSKPD.objects.filter(id_skpd__exact=3) return self.model.objects.filter(id_sub_skpd__in=qs).filter(id_golongan_barang__exact=6).filter(id_mutasi_berkurang__exact=5) class GedungBangunanRuanganDPUPRAdmin(GedungBangunanRuanganAdmin): def get_queryset(self, request): qs = SUBSKPD.objects.filter(id_skpd__exact=3) return self.model.objects.filter(id_sub_skpd__in=qs).filter(id_golongan_barang__exact=3).filter(id_mutasi_berkurang__exact=5) class GedungBangunanUsulHapusDPUPRAdmin(GedungBangunanUsulHapusAdmin): inlines = [TahunBerkurangUsulHapusGedungDPUPRInline, SKPDAsalGedungBangunanDPUPRInline, FotoGedungBangunanDPUPRInline, ] def get_queryset(self, request): qs = SUBSKPD.objects.filter(id_skpd__exact=3) return self.model.objects.filter(id_sub_skpd__in=qs).filter(id_golongan_barang__exact=3).filter(id_mutasi_berkurang__exact=3) class KontrakGedungBangunanDPUPRAdmin(KontrakGedungBangunanAdmin): def formfield_for_foreignkey(self, db_field, request, **kwargs): if db_field.name == "id_skpd": kwargs["queryset"] = SKPD.objects.filter(id__exact=3) return super(KontrakGedungBangunanDPUPRAdmin, self).formfield_for_foreignkey(db_field, request, **kwargs) def get_queryset(self, request): return self.model.objects.filter(id_skpd__exact=3) class HargaGedungBangunanDPUPRAdmin(HargaGedungBangunanAdmin): def get_queryset(self, request): sub_skpd_qs = SUBSKPD.objects.filter(id_skpd__exact=3) gedung_bangunan_qs = GedungBangunan.objects.filter(id_sub_skpd__in=sub_skpd_qs) return self.model.objects.filter(id_gedung_bangunan__in=gedung_bangunan_qs) class GedungBangunanPenghapusanDPUPRAdmin(GedungBangunanPenghapusanAdmin): inlines = [PenghapusanGedungBangunanDPUPRInline, TahunBerkurangGedungBangunanDPUPRInline, SKPDAsalGedungBangunanDPUPRInline, SKPDTujuanGedungBangunanDPUPRInline, FotoGedungBangunanDPUPRInline, ] def get_queryset(self, request): qs = SUBSKPD.objects.filter(id_skpd__exact=3) return self.model.objects.filter(id_sub_skpd__in=qs).filter(id_mutasi_berkurang__in=[2,4,6,7,10,]) ###Register GedungBangunan DPUPR admin.site.register(GedungBangunanDPUPR, GedungBangunanDPUPRAdmin) admin.site.register(KDPGedungBangunanDPUPR, KDPGedungBangunanDPUPRAdmin) admin.site.register(GedungBangunanRuanganDPUPR, GedungBangunanRuanganDPUPRAdmin) admin.site.register(GedungBangunanUsulHapusDPUPR, GedungBangunanUsulHapusDPUPRAdmin) admin.site.register(KontrakGedungBangunanDPUPR, KontrakGedungBangunanDPUPRAdmin) admin.site.register(HargaGedungBangunanDPUPR, HargaGedungBangunanDPUPRAdmin) admin.site.register(GedungBangunanPenghapusanDPUPR, GedungBangunanPenghapusanDPUPRAdmin) #### Class Peralatan Mesin class TahunBerkurangPeralatanMesinDPUPRInline(TahunBerkurangPeralatanMesinInline): model = TahunBerkurangPeralatanMesinDPUPR class PenghapusanPeralatanMesinDPUPRInline(PenghapusanPeralatanMesinInline): model = PenghapusanPeralatanMesinDPUPR class SKPDAsalPeralatanMesinDPUPRInline(SKPDAsalPeralatanMesinInline): model = SKPDAsalPeralatanMesinDPUPR class SKPDTujuanPeralatanMesinDPUPRInline(SKPDTujuanPeralatanMesinInline): model = SKPDTujuanPeralatanMesinDPUPR class FotoPeralatanMesinDPUPRInline(FotoPeralatanMesinInline): model = FotoPeralatanMesinDPUPR class HargaPeralatanMesinDPUPRInline(HargaPeralatanMesinInline): model = HargaPeralatanMesinDPUPR def formfield_for_foreignkey(self, db_field, request, **kwargs): if db_field.name == "id_kontrak_peralatan_mesin": kwargs["queryset"] = KontrakPeralatanMesin.objects.filter(id_skpd__exact=3) return super(HargaPeralatanMesinDPUPRInline, self).formfield_for_foreignkey(db_field, request, **kwargs) class TahunBerkurangUsulHapusPeralatanMesinDPUPRInline(TahunBerkurangUsulHapusPeralatanMesinInline): model = TahunBerkurangUsulHapusPeralatanMesinDPUPR class PeralatanMesinDPUPRAdmin(PeralatanMesinAdmin): inlines = [HargaPeralatanMesinDPUPRInline, SKPDAsalPeralatanMesinDPUPRInline, FotoPeralatanMesinDPUPRInline, ] def formfield_for_foreignkey(self, db_field, request, **kwargs): if db_field.name == "id_sub_skpd": kwargs["queryset"] = SUBSKPD.objects.filter(id_skpd__exact=3) if db_field.name == "id_ruangan": kwargs["queryset"] = Ruangan.objects.filter(id_gedung_bangunan__id_sub_skpd__id_skpd__exact=3) return super(PeralatanMesinDPUPRAdmin, self).formfield_for_foreignkey(db_field, request, **kwargs) def get_queryset(self, request): qs = SUBSKPD.objects.filter(id_skpd__exact=3) return self.model.objects.filter(id_sub_skpd__in=qs).filter(id_mutasi_berkurang__exact=5) class PeralatanMesinUsulHapusDPUPRAdmin(PeralatanMesinUsulHapusAdmin): inlines = [TahunBerkurangUsulHapusPeralatanMesinDPUPRInline, SKPDAsalPeralatanMesinDPUPRInline, FotoPeralatanMesinDPUPRInline, ] def get_queryset(self, request): qs = SUBSKPD.objects.filter(id_skpd__exact=3) return self.model.objects.filter(id_sub_skpd__in=qs).filter(id_mutasi_berkurang__exact=3) class KontrakPeralatanMesinDPUPRAdmin(KontrakPeralatanMesinAdmin): def formfield_for_foreignkey(self, db_field, request, **kwargs): if db_field.name == "id_skpd": kwargs["queryset"] = SKPD.objects.filter(id__exact=3) return super(KontrakPeralatanMesinDPUPRAdmin, self).formfield_for_foreignkey(db_field, request, **kwargs) def get_queryset(self, request): return self.model.objects.filter(id_skpd__exact=3) class HargaPeralatanMesinDPUPRAdmin(HargaPeralatanMesinAdmin): def get_queryset(self, request): sub_skpd_qs = SUBSKPD.objects.filter(id_skpd__exact=3) peralatan_mesin_qs = PeralatanMesin.objects.filter(id_sub_skpd__in=sub_skpd_qs) return self.model.objects.filter(id_peralatan_mesin__in=peralatan_mesin_qs) class PeralatanMesinPenghapusanDPUPRAdmin(PeralatanMesinPenghapusanAdmin): inlines = [PenghapusanPeralatanMesinDPUPRInline, TahunBerkurangPeralatanMesinDPUPRInline, SKPDAsalPeralatanMesinDPUPRInline, SKPDTujuanPeralatanMesinDPUPRInline, FotoPeralatanMesinDPUPRInline, ] def get_queryset(self, request): qs = SUBSKPD.objects.filter(id_skpd__exact=3) return self.model.objects.filter(id_sub_skpd__in=qs).filter(id_mutasi_berkurang__in=[2,4,6,7,10,]) ###Register PeralatanMesin DPUPR admin.site.register(PeralatanMesinDPUPR, PeralatanMesinDPUPRAdmin) admin.site.register(PeralatanMesinUsulHapusDPUPR, PeralatanMesinUsulHapusDPUPRAdmin) admin.site.register(KontrakPeralatanMesinDPUPR, KontrakPeralatanMesinDPUPRAdmin) admin.site.register(HargaPeralatanMesinDPUPR, HargaPeralatanMesinDPUPRAdmin) admin.site.register(PeralatanMesinPenghapusanDPUPR, PeralatanMesinPenghapusanDPUPRAdmin) #### Jalan, Irigasi, dan Jaringan from jalanirigasijaringan.models import KontrakJalanIrigasiJaringan, HargaJalanIrigasiJaringan, JalanIrigasiJaringan, PenghapusanJalanIrigasiJaringan, PemanfaatanJalanIrigasiJaringan, TahunBerkurangJalanIrigasiJaringan, TahunBerkurangUsulHapusJalanIrigasiJaringan from jalanirigasijaringan.models import JalanIrigasiJaringanPemanfaatan, JalanIrigasiJaringanPenghapusan, JalanIrigasiJaringanUsulHapus from jalanirigasijaringan.models import JalanIrigasiJaringanDPUPR, KontrakJalanIrigasiJaringanDPUPR, HargaJalanIrigasiJaringanDPUPR, KDPJalanIrigasiJaringanDPUPR, JalanIrigasiJaringanUsulHapusDPUPR, TahunBerkurangUsulHapusJalanIrigasiJaringanDPUPR from jalanirigasijaringan.models import JalanIrigasiJaringanPenghapusanDPUPR, TahunBerkurangJalanIrigasiJaringanDPUPR, PenghapusanJalanIrigasiJaringanDPUPR from jalanirigasijaringan.models import SKPDAsalJalanIrigasiJaringanDPUPR, SKPDTujuanJalanIrigasiJaringanDPUPR, FotoJalanIrigasiJaringanDPUPR from jalanirigasijaringan.admin import HargaJalanIrigasiJaringanInline, JalanIrigasiJaringanAdmin, KontrakJalanIrigasiJaringanAdmin, HargaJalanIrigasiJaringanAdmin, KDPJalanIrigasiJaringanAdmin, TahunBerkurangUsulHapusJalanIrigasiJaringanInline, JalanIrigasiJaringanUsulHapusAdmin from jalanirigasijaringan.admin import TahunBerkurangJalanIrigasiJaringanInline, PenghapusanJalanIrigasiJaringanInline, JalanIrigasiJaringanPenghapusanAdmin from jalanirigasijaringan.admin import SKPDAsalJalanIrigasiJaringanInline, SKPDTujuanJalanIrigasiJaringanInline, FotoJalanIrigasiJaringanInline #### Class Jalan, Irigasi dan Jaringan class TahunBerkurangJalanIrigasiJaringanDPUPRInline(TahunBerkurangJalanIrigasiJaringanInline): model = TahunBerkurangJalanIrigasiJaringanDPUPR class PenghapusanJalanIrigasiJaringanDPUPRInline(PenghapusanJalanIrigasiJaringanInline): model = PenghapusanJalanIrigasiJaringanDPUPR class SKPDAsalJalanIrigasiJaringanDPUPRInline(SKPDAsalJalanIrigasiJaringanInline): model = SKPDAsalJalanIrigasiJaringanDPUPR class SKPDTujuanJalanIrigasiJaringanDPUPRInline(SKPDTujuanJalanIrigasiJaringanInline): model = SKPDTujuanJalanIrigasiJaringanDPUPR class FotoJalanIrigasiJaringanDPUPRInline(FotoJalanIrigasiJaringanInline): model = FotoJalanIrigasiJaringanDPUPR class HargaJalanIrigasiJaringanDPUPRInline(HargaJalanIrigasiJaringanInline): model = HargaJalanIrigasiJaringanDPUPR def formfield_for_foreignkey(self, db_field, request, **kwargs): if db_field.name == "id_kontrak_jalan_irigasi_jaringan": kwargs["queryset"] = KontrakJalanIrigasiJaringan.objects.filter(id_skpd__exact=3) return super(HargaJalanIrigasiJaringanDPUPRInline, self).formfield_for_foreignkey(db_field, request, **kwargs) class TahunBerkurangUsulHapusJalanIrigasiJaringanDPUPRInline(TahunBerkurangUsulHapusJalanIrigasiJaringanInline): model = TahunBerkurangUsulHapusJalanIrigasiJaringanDPUPR class JalanIrigasiJaringanDPUPRAdmin(JalanIrigasiJaringanAdmin): inlines = [HargaJalanIrigasiJaringanDPUPRInline, SKPDAsalJalanIrigasiJaringanDPUPRInline, FotoJalanIrigasiJaringanDPUPRInline, ] def formfield_for_foreignkey(self, db_field, request, **kwargs): if db_field.name == "id_sub_skpd": kwargs["queryset"] = SUBSKPD.objects.filter(id_skpd__exact=3) return super(JalanIrigasiJaringanDPUPRAdmin, self).formfield_for_foreignkey(db_field, request, **kwargs) def get_queryset(self, request): qs = SUBSKPD.objects.filter(id_skpd__exact=3) return self.model.objects.filter(id_sub_skpd__in=qs).filter(id_golongan_barang__exact=4).filter(id_mutasi_berkurang__exact=5) class JalanIrigasiJaringanUsulHapusDPUPRAdmin(JalanIrigasiJaringanUsulHapusAdmin): inlines = [TahunBerkurangUsulHapusJalanIrigasiJaringanDPUPRInline, SKPDAsalJalanIrigasiJaringanDPUPRInline, FotoJalanIrigasiJaringanDPUPRInline, ] def get_queryset(self, request): qs = SUBSKPD.objects.filter(id_skpd__exact=3) return self.model.objects.filter(id_sub_skpd__in=qs).filter(id_golongan_barang__exact=4).filter(id_mutasi_berkurang__exact=3) class KDPJalanIrigasiJaringanDPUPRAdmin(KDPJalanIrigasiJaringanAdmin): inlines = [HargaJalanIrigasiJaringanDPUPRInline, SKPDAsalJalanIrigasiJaringanDPUPRInline, FotoJalanIrigasiJaringanDPUPRInline, ] def formfield_for_foreignkey(self, db_field, request, **kwargs): if db_field.name == "id_sub_skpd": kwargs["queryset"] = SUBSKPD.objects.filter(id_skpd__exact=3) return super(KDPJalanIrigasiJaringanDPUPRAdmin, self).formfield_for_foreignkey(db_field, request, **kwargs) def get_queryset(self, request): qs = SUBSKPD.objects.filter(id_skpd__exact=3) return self.model.objects.filter(id_sub_skpd__in=qs).filter(id_golongan_barang__exact=6).filter(id_mutasi_berkurang__exact=5) class KontrakJalanIrigasiJaringanDPUPRAdmin(KontrakJalanIrigasiJaringanAdmin): def formfield_for_foreignkey(self, db_field, request, **kwargs): if db_field.name == "id_skpd": kwargs["queryset"] = SKPD.objects.filter(id__exact=3) return super(KontrakJalanIrigasiJaringanDPUPRAdmin, self).formfield_for_foreignkey(db_field, request, **kwargs) def get_queryset(self, request): return self.model.objects.filter(id_skpd__exact=3) class HargaJalanIrigasiJaringanDPUPRAdmin(HargaJalanIrigasiJaringanAdmin): def get_queryset(self, request): sub_skpd_qs = SUBSKPD.objects.filter(id_skpd__exact=3) jalan_irigasi_jaringan_qs = JalanIrigasiJaringan.objects.filter(id_sub_skpd__in=sub_skpd_qs) return self.model.objects.filter(id_jalan_irigasi_jaringan__in=jalan_irigasi_jaringan_qs) class JalanIrigasiJaringanPenghapusanDPUPRAdmin(JalanIrigasiJaringanPenghapusanAdmin): inlines = [PenghapusanJalanIrigasiJaringanDPUPRInline, TahunBerkurangJalanIrigasiJaringanDPUPRInline, SKPDAsalJalanIrigasiJaringanDPUPRInline, SKPDTujuanJalanIrigasiJaringanDPUPRInline, FotoJalanIrigasiJaringanDPUPRInline, ] def get_queryset(self, request): qs = SUBSKPD.objects.filter(id_skpd__exact=3) return self.model.objects.filter(id_sub_skpd__in=qs).filter(id_mutasi_berkurang__in=[2,4,6,7,10,]) ###Register JalanIrigasiJaringan DPUPR admin.site.register(JalanIrigasiJaringanDPUPR, JalanIrigasiJaringanDPUPRAdmin) admin.site.register(JalanIrigasiJaringanUsulHapusDPUPR, JalanIrigasiJaringanUsulHapusDPUPRAdmin) admin.site.register(KDPJalanIrigasiJaringanDPUPR, KDPJalanIrigasiJaringanDPUPRAdmin) admin.site.register(KontrakJalanIrigasiJaringanDPUPR, KontrakJalanIrigasiJaringanDPUPRAdmin) admin.site.register(HargaJalanIrigasiJaringanDPUPR, HargaJalanIrigasiJaringanDPUPRAdmin) admin.site.register(JalanIrigasiJaringanPenghapusanDPUPR, JalanIrigasiJaringanPenghapusanDPUPRAdmin) #### Aset Tetap Lainnya from atl.models import KontrakATL, HargaATL, ATL, PenghapusanATL, PemanfaatanATL, TahunBerkurangATL, TahunBerkurangUsulHapusATL from atl.models import ATLPemanfaatan, ATLPenghapusan, ATLUsulHapus from atl.models import ATLDPUPR, KontrakATLDPUPR, HargaATLDPUPR, ATLUsulHapusDPUPR, TahunBerkurangUsulHapusATLDPUPR from atl.models import ATLPenghapusanDPUPR, TahunBerkurangATLDPUPR, PenghapusanATLDPUPR from atl.models import SKPDAsalATLDPUPR, SKPDTujuanATLDPUPR, FotoATLDPUPR from atl.admin import HargaATLInline, ATLAdmin, KontrakATLAdmin, HargaATLAdmin, TahunBerkurangUsulHapusATLInline, ATLUsulHapusAdmin from atl.admin import TahunBerkurangATLInline, PenghapusanATLInline, ATLPenghapusanAdmin from atl.admin import SKPDAsalATLInline, SKPDTujuanATLInline, FotoATLInline #### Class Aset Tetap Lainnya class TahunBerkurangATLDPUPRInline(TahunBerkurangATLInline): model = TahunBerkurangATLDPUPR class PenghapusanATLDPUPRInline(PenghapusanATLInline): model = PenghapusanATLDPUPR class SKPDAsalATLDPUPRInline(SKPDAsalATLInline): model = SKPDAsalATLDPUPR class SKPDTujuanATLDPUPRInline(SKPDTujuanATLInline): model = SKPDTujuanATLDPUPR class FotoATLDPUPRInline(FotoATLInline): model = FotoATLDPUPR class HargaATLDPUPRInline(HargaATLInline): model = HargaATLDPUPR def formfield_for_foreignkey(self, db_field, request, **kwargs): if db_field.name == "id_kontrak_atl": kwargs["queryset"] = KontrakATL.objects.filter(id_skpd__exact=3) return super(HargaATLDPUPRInline, self).formfield_for_foreignkey(db_field, request, **kwargs) class TahunBerkurangUsulHapusATLDPUPRInline(TahunBerkurangUsulHapusATLInline): model = TahunBerkurangUsulHapusATLDPUPR class ATLDPUPRAdmin(ATLAdmin): inlines = [HargaATLDPUPRInline, SKPDAsalATLDPUPRInline, FotoATLDPUPRInline, ] def formfield_for_foreignkey(self, db_field, request, **kwargs): if db_field.name == "id_sub_skpd": kwargs["queryset"] = SUBSKPD.objects.filter(id_skpd__exact=3) if db_field.name == "id_ruangan": kwargs["queryset"] = Ruangan.objects.filter(id_gedung_bangunan__id_sub_skpd__id_skpd__exact=3) return super(ATLDPUPRAdmin, self).formfield_for_foreignkey(db_field, request, **kwargs) def get_queryset(self, request): qs = SUBSKPD.objects.filter(id_skpd__exact=3) return self.model.objects.filter(id_sub_skpd__in=qs).filter(id_mutasi_berkurang__exact=5) class ATLUsulHapusDPUPRAdmin(ATLUsulHapusAdmin): inlines = [TahunBerkurangUsulHapusATLDPUPRInline, SKPDAsalATLDPUPRInline, FotoATLDPUPRInline, ] def get_queryset(self, request): qs = SUBSKPD.objects.filter(id_skpd__exact=3) return self.model.objects.filter(id_sub_skpd__in=qs).filter(id_golongan_barang__exact=5).filter(id_mutasi_berkurang__exact=3) class KontrakATLDPUPRAdmin(KontrakATLAdmin): def formfield_for_foreignkey(self, db_field, request, **kwargs): if db_field.name == "id_skpd": kwargs["queryset"] = SKPD.objects.filter(id__exact=3) return super(KontrakATLDPUPRAdmin, self).formfield_for_foreignkey(db_field, request, **kwargs) def get_queryset(self, request): return self.model.objects.filter(id_skpd__exact=3) class HargaATLDPUPRAdmin(HargaATLAdmin): def get_queryset(self, request): sub_skpd_qs = SUBSKPD.objects.filter(id_skpd__exact=3) atl_qs = ATL.objects.filter(id_sub_skpd__in=sub_skpd_qs) return self.model.objects.filter(id_atl__in=atl_qs) class ATLPenghapusanDPUPRAdmin(ATLPenghapusanAdmin): inlines = [PenghapusanATLDPUPRInline, TahunBerkurangATLDPUPRInline, SKPDAsalATLDPUPRInline, SKPDTujuanATLDPUPRInline, FotoATLDPUPRInline, ] def get_queryset(self, request): qs = SUBSKPD.objects.filter(id_skpd__exact=3) return self.model.objects.filter(id_sub_skpd__in=qs).filter(id_mutasi_berkurang__in=[2,4,6,7,10,]) ###Register ATL DPUPR admin.site.register(ATLDPUPR, ATLDPUPRAdmin) admin.site.register(ATLUsulHapusDPUPR, ATLUsulHapusDPUPRAdmin) admin.site.register(KontrakATLDPUPR, KontrakATLDPUPRAdmin) admin.site.register(HargaATLDPUPR, HargaATLDPUPRAdmin) admin.site.register(ATLPenghapusanDPUPR, ATLPenghapusanDPUPRAdmin)
40.496444
357
0.801012
from django.contrib import admin from umum.models import Provinsi, Kabupaten, LokasiBidang, SKPD, SUBSKPD, KodeBarang, HakTanah, SatuanBarang, KeadaanBarang, SKPenghapusan, MutasiBerkurang, JenisPemanfaatan, AsalUsul, Tahun, GolonganBarang, Tanah, KontrakTanah, PenghapusanTanah, TanahPenghapusan, PemanfaatanTanah, TanahPemanfaatan, HargaTanah, TahunBerkurangUsulHapusTanah, TanahUsulHapus from umum.models import TanahDPUPR, KontrakTanahDPUPR, HargaTanahDPUPR, TanahUsulHapusDPUPR, TahunBerkurangUsulHapusTanahDPUPR from umum.models import TanahPenghapusanDPUPR, TahunBerkurangTanahDPUPR, PenghapusanTanahDPUPR from umum.models import SKPDAsalTanahDPUPR, SKPDTujuanTanahDPUPR, FotoTanahDPUPR from umum.admin import HargaTanahInline, TanahAdmin, KontrakTanahAdmin, HargaTanahAdmin, TahunBerkurangUsulHapusTanahInline, TanahUsulHapusAdmin from umum.admin import TahunBerkurangTanahInline, PenghapusanTanahInline, TanahPenghapusanAdmin from umum.admin import SKPDAsalTanahInline, SKPDTujuanTanahInline, FotoTanahInline from umum.admin import GedungBangunanInline from gedungbangunan.models import StatusTingkat, StatusBeton, KontrakGedungBangunan, HargaGedungBangunan, GedungBangunan, PenghapusanGedungBangunan, PemanfaatanGedungBangunan, TahunBerkurangGedungBangunan, Ruangan, TahunBerkurangUsulHapusGedung from gedungbangunan.models import GedungBangunanPemanfaatan, GedungBangunanPenghapusan, GedungBangunanRuangan, GedungBangunanUsulHapus from gedungbangunan.models import GedungBangunanDPUPR, KontrakGedungBangunanDPUPR, HargaGedungBangunanDPUPR, GedungBangunanRuanganDPUPR, GedungBangunanUsulHapusDPUPR, TahunBerkurangUsulHapusGedungDPUPR from gedungbangunan.models import GedungBangunanPenghapusanDPUPR, TahunBerkurangGedungBangunanDPUPR, PenghapusanGedungBangunanDPUPR from gedungbangunan.models import SKPDAsalGedungBangunanDPUPR, SKPDTujuanGedungBangunanDPUPR, FotoGedungBangunanDPUPR from gedungbangunan.admin import HargaGedungBangunanInline, GedungBangunanAdmin, KontrakGedungBangunanAdmin, HargaGedungBangunanAdmin, RuanganInline, GedungBangunanRuanganAdmin, KDPGedungBangunanAdmin, TahunBerkurangUsulHapusGedungInline, GedungBangunanUsulHapusAdmin from gedungbangunan.admin import TahunBerkurangGedungBangunanInline, PenghapusanGedungBangunanInline, GedungBangunanPenghapusanAdmin from gedungbangunan.admin import SKPDAsalGedungBangunanInline, SKPDTujuanGedungBangunanInline, FotoGedungBangunanInline from peralatanmesin.models import KontrakPeralatanMesin, HargaPeralatanMesin, PeralatanMesin, PenghapusanPeralatanMesin, PemanfaatanPeralatanMesin, TahunBerkurangPeralatanMesin, TahunBerkurangUsulHapusPeralatanMesin from peralatanmesin.models import PeralatanMesinPemanfaatan, PeralatanMesinPenghapusan, PeralatanMesinUsulHapus from peralatanmesin.models import PeralatanMesinDPUPR, KontrakPeralatanMesinDPUPR, HargaPeralatanMesinDPUPR, PeralatanMesinUsulHapusDPUPR, TahunBerkurangUsulHapusPeralatanMesinDPUPR from peralatanmesin.models import PeralatanMesinPenghapusanDPUPR, TahunBerkurangPeralatanMesinDPUPR, PenghapusanPeralatanMesinDPUPR from peralatanmesin.models import SKPDAsalPeralatanMesinDPUPR, SKPDTujuanPeralatanMesinDPUPR, FotoPeralatanMesinDPUPR from peralatanmesin.admin import HargaPeralatanMesinInline, PeralatanMesinAdmin, KontrakPeralatanMesinAdmin, HargaPeralatanMesinAdmin, TahunBerkurangUsulHapusPeralatanMesinInline, PeralatanMesinUsulHapusAdmin from peralatanmesin.admin import TahunBerkurangPeralatanMesinInline, PenghapusanPeralatanMesinInline, PeralatanMesinPenghapusanAdmin from peralatanmesin.admin import SKPDAsalPeralatanMesinInline, SKPDTujuanPeralatanMesinInline, FotoPeralatanMesinInline class TahunBerkurangTanahDPUPRInline(TahunBerkurangTanahInline): model = TahunBerkurangTanahDPUPR class PenghapusanTanahDPUPRInline(PenghapusanTanahInline): model = PenghapusanTanahDPUPR class SKPDAsalTanahDPUPRInline(SKPDAsalTanahInline): model = SKPDAsalTanahDPUPR class SKPDTujuanTanahDPUPRInline(SKPDTujuanTanahInline): model = SKPDTujuanTanahDPUPR class FotoTanahDPUPRInline(FotoTanahInline): model = FotoTanahDPUPR class GedungBangunanDPUPRInline(GedungBangunanInline): model = GedungBangunanDPUPR class HargaTanahDPUPRInline(HargaTanahInline): model = HargaTanahDPUPR def formfield_for_foreignkey(self, db_field, request, **kwargs): if db_field.name == "id_kontrak": kwargs["queryset"] = KontrakTanah.objects.filter(id_skpd__exact=3) return super(HargaTanahDPUPRInline, self).formfield_for_foreignkey(db_field, request, **kwargs) class TahunBerkurangUsulHapusTanahDPUPRInline(TahunBerkurangUsulHapusTanahInline): model = TahunBerkurangUsulHapusTanahDPUPR class TanahDPUPRAdmin(TanahAdmin): inlines = [HargaTanahDPUPRInline, SKPDAsalTanahDPUPRInline, FotoTanahDPUPRInline, ] def formfield_for_foreignkey(self, db_field, request, **kwargs): if db_field.name == "id_sub_skpd": kwargs["queryset"] = SUBSKPD.objects.filter(id_skpd__exact=3) return super(TanahDPUPRAdmin, self).formfield_for_foreignkey(db_field, request, **kwargs) def get_queryset(self, request): qs = SUBSKPD.objects.filter(id_skpd__exact=3) return self.model.objects.filter(id_sub_skpd__in=qs).filter(id_mutasi_berkurang__exact=5) class TanahUsulHapusDPUPRAdmin(TanahUsulHapusAdmin): inlines = [TahunBerkurangUsulHapusTanahDPUPRInline, SKPDAsalTanahDPUPRInline, FotoTanahDPUPRInline, ] def get_queryset(self, request): qs = SUBSKPD.objects.filter(id_skpd__exact=3) return self.model.objects.filter(id_sub_skpd__in=qs).filter(id_mutasi_berkurang__exact=3) class KontrakTanahDPUPRAdmin(KontrakTanahAdmin): def formfield_for_foreignkey(self, db_field, request, **kwargs): if db_field.name == "id_skpd": kwargs["queryset"] = SKPD.objects.filter(id__exact=3) return super(KontrakTanahDPUPRAdmin, self).formfield_for_foreignkey(db_field, request, **kwargs) def get_queryset(self, request): return self.model.objects.filter(id_skpd__exact=3) class HargaTanahDPUPRAdmin(HargaTanahAdmin): def get_queryset(self, request): sub_skpd_qs = SUBSKPD.objects.filter(id_skpd__exact=3) tanah_qs = Tanah.objects.filter(id_sub_skpd__in=sub_skpd_qs) return self.model.objects.filter(id_tanah__in=tanah_qs) class TanahPenghapusanDPUPRAdmin(TanahPenghapusanAdmin): inlines = [PenghapusanTanahDPUPRInline, TahunBerkurangTanahDPUPRInline, SKPDAsalTanahDPUPRInline, SKPDTujuanTanahDPUPRInline, FotoTanahDPUPRInline, ] def get_queryset(self, request): qs = SUBSKPD.objects.filter(id_skpd__exact=3) return self.model.objects.filter(id_sub_skpd__in=qs).filter(id_mutasi_berkurang__in=[2,4,6,7,10,]) admin.site.register(TanahDPUPR, TanahDPUPRAdmin) admin.site.register(TanahUsulHapusDPUPR, TanahUsulHapusDPUPRAdmin) admin.site.register(KontrakTanahDPUPR, KontrakTanahDPUPRAdmin) admin.site.register(HargaTanahDPUPR, HargaTanahDPUPRAdmin) admin.site.register(TanahPenghapusanDPUPR, TanahPenghapusanDPUPRAdmin) from gedungbangunan.models import KDPGedungBangunanDPUPR class TahunBerkurangGedungBangunanDPUPRInline(TahunBerkurangGedungBangunanInline): model = TahunBerkurangGedungBangunanDPUPR class PenghapusanGedungBangunanDPUPRInline(PenghapusanGedungBangunanInline): model = PenghapusanGedungBangunanDPUPR class SKPDAsalGedungBangunanDPUPRInline(SKPDAsalGedungBangunanInline): model = SKPDAsalGedungBangunanDPUPR class SKPDTujuanGedungBangunanDPUPRInline(SKPDTujuanGedungBangunanInline): model = SKPDTujuanGedungBangunanDPUPR class FotoGedungBangunanDPUPRInline(FotoGedungBangunanInline): model = FotoGedungBangunanDPUPR class HargaGedungBangunanDPUPRInline(HargaGedungBangunanInline): model = HargaGedungBangunanDPUPR def formfield_for_foreignkey(self, db_field, request, **kwargs): if db_field.name == "id_kontrak_gedung_bangunan": kwargs["queryset"] = KontrakGedungBangunan.objects.filter(id_skpd__exact=3) return super(HargaGedungBangunanDPUPRInline, self).formfield_for_foreignkey(db_field, request, **kwargs) class TahunBerkurangUsulHapusGedungDPUPRInline(TahunBerkurangUsulHapusGedungInline): model = TahunBerkurangUsulHapusGedungDPUPR class GedungBangunanDPUPRAdmin(GedungBangunanAdmin): inlines = [HargaGedungBangunanDPUPRInline, SKPDAsalGedungBangunanDPUPRInline, FotoGedungBangunanDPUPRInline, ] def formfield_for_foreignkey(self, db_field, request, **kwargs): if db_field.name == "id_sub_skpd": kwargs["queryset"] = SUBSKPD.objects.filter(id_skpd__exact=3) return super(GedungBangunanDPUPRAdmin, self).formfield_for_foreignkey(db_field, request, **kwargs) def get_queryset(self, request): qs = SUBSKPD.objects.filter(id_skpd__exact=3) return self.model.objects.filter(id_sub_skpd__in=qs).filter(id_golongan_barang__exact=3).filter(id_mutasi_berkurang__exact=5) class KDPGedungBangunanDPUPRAdmin(KDPGedungBangunanAdmin): inlines = [HargaGedungBangunanDPUPRInline, SKPDAsalGedungBangunanDPUPRInline, FotoGedungBangunanDPUPRInline, ] def formfield_for_foreignkey(self, db_field, request, **kwargs): if db_field.name == "id_sub_skpd": kwargs["queryset"] = SUBSKPD.objects.filter(id_skpd__exact=3) return super(KDPGedungBangunanDPUPRAdmin, self).formfield_for_foreignkey(db_field, request, **kwargs) def get_queryset(self, request): qs = SUBSKPD.objects.filter(id_skpd__exact=3) return self.model.objects.filter(id_sub_skpd__in=qs).filter(id_golongan_barang__exact=6).filter(id_mutasi_berkurang__exact=5) class GedungBangunanRuanganDPUPRAdmin(GedungBangunanRuanganAdmin): def get_queryset(self, request): qs = SUBSKPD.objects.filter(id_skpd__exact=3) return self.model.objects.filter(id_sub_skpd__in=qs).filter(id_golongan_barang__exact=3).filter(id_mutasi_berkurang__exact=5) class GedungBangunanUsulHapusDPUPRAdmin(GedungBangunanUsulHapusAdmin): inlines = [TahunBerkurangUsulHapusGedungDPUPRInline, SKPDAsalGedungBangunanDPUPRInline, FotoGedungBangunanDPUPRInline, ] def get_queryset(self, request): qs = SUBSKPD.objects.filter(id_skpd__exact=3) return self.model.objects.filter(id_sub_skpd__in=qs).filter(id_golongan_barang__exact=3).filter(id_mutasi_berkurang__exact=3) class KontrakGedungBangunanDPUPRAdmin(KontrakGedungBangunanAdmin): def formfield_for_foreignkey(self, db_field, request, **kwargs): if db_field.name == "id_skpd": kwargs["queryset"] = SKPD.objects.filter(id__exact=3) return super(KontrakGedungBangunanDPUPRAdmin, self).formfield_for_foreignkey(db_field, request, **kwargs) def get_queryset(self, request): return self.model.objects.filter(id_skpd__exact=3) class HargaGedungBangunanDPUPRAdmin(HargaGedungBangunanAdmin): def get_queryset(self, request): sub_skpd_qs = SUBSKPD.objects.filter(id_skpd__exact=3) gedung_bangunan_qs = GedungBangunan.objects.filter(id_sub_skpd__in=sub_skpd_qs) return self.model.objects.filter(id_gedung_bangunan__in=gedung_bangunan_qs) class GedungBangunanPenghapusanDPUPRAdmin(GedungBangunanPenghapusanAdmin): inlines = [PenghapusanGedungBangunanDPUPRInline, TahunBerkurangGedungBangunanDPUPRInline, SKPDAsalGedungBangunanDPUPRInline, SKPDTujuanGedungBangunanDPUPRInline, FotoGedungBangunanDPUPRInline, ] def get_queryset(self, request): qs = SUBSKPD.objects.filter(id_skpd__exact=3) return self.model.objects.filter(id_sub_skpd__in=qs).filter(id_mutasi_berkurang__in=[2,4,6,7,10,]) admin.site.register(GedungBangunanDPUPR, GedungBangunanDPUPRAdmin) admin.site.register(KDPGedungBangunanDPUPR, KDPGedungBangunanDPUPRAdmin) admin.site.register(GedungBangunanRuanganDPUPR, GedungBangunanRuanganDPUPRAdmin) admin.site.register(GedungBangunanUsulHapusDPUPR, GedungBangunanUsulHapusDPUPRAdmin) admin.site.register(KontrakGedungBangunanDPUPR, KontrakGedungBangunanDPUPRAdmin) admin.site.register(HargaGedungBangunanDPUPR, HargaGedungBangunanDPUPRAdmin) admin.site.register(GedungBangunanPenghapusanDPUPR, GedungBangunanPenghapusanDPUPRAdmin) class TahunBerkurangPeralatanMesinDPUPRInline(TahunBerkurangPeralatanMesinInline): model = TahunBerkurangPeralatanMesinDPUPR class PenghapusanPeralatanMesinDPUPRInline(PenghapusanPeralatanMesinInline): model = PenghapusanPeralatanMesinDPUPR class SKPDAsalPeralatanMesinDPUPRInline(SKPDAsalPeralatanMesinInline): model = SKPDAsalPeralatanMesinDPUPR class SKPDTujuanPeralatanMesinDPUPRInline(SKPDTujuanPeralatanMesinInline): model = SKPDTujuanPeralatanMesinDPUPR class FotoPeralatanMesinDPUPRInline(FotoPeralatanMesinInline): model = FotoPeralatanMesinDPUPR class HargaPeralatanMesinDPUPRInline(HargaPeralatanMesinInline): model = HargaPeralatanMesinDPUPR def formfield_for_foreignkey(self, db_field, request, **kwargs): if db_field.name == "id_kontrak_peralatan_mesin": kwargs["queryset"] = KontrakPeralatanMesin.objects.filter(id_skpd__exact=3) return super(HargaPeralatanMesinDPUPRInline, self).formfield_for_foreignkey(db_field, request, **kwargs) class TahunBerkurangUsulHapusPeralatanMesinDPUPRInline(TahunBerkurangUsulHapusPeralatanMesinInline): model = TahunBerkurangUsulHapusPeralatanMesinDPUPR class PeralatanMesinDPUPRAdmin(PeralatanMesinAdmin): inlines = [HargaPeralatanMesinDPUPRInline, SKPDAsalPeralatanMesinDPUPRInline, FotoPeralatanMesinDPUPRInline, ] def formfield_for_foreignkey(self, db_field, request, **kwargs): if db_field.name == "id_sub_skpd": kwargs["queryset"] = SUBSKPD.objects.filter(id_skpd__exact=3) if db_field.name == "id_ruangan": kwargs["queryset"] = Ruangan.objects.filter(id_gedung_bangunan__id_sub_skpd__id_skpd__exact=3) return super(PeralatanMesinDPUPRAdmin, self).formfield_for_foreignkey(db_field, request, **kwargs) def get_queryset(self, request): qs = SUBSKPD.objects.filter(id_skpd__exact=3) return self.model.objects.filter(id_sub_skpd__in=qs).filter(id_mutasi_berkurang__exact=5) class PeralatanMesinUsulHapusDPUPRAdmin(PeralatanMesinUsulHapusAdmin): inlines = [TahunBerkurangUsulHapusPeralatanMesinDPUPRInline, SKPDAsalPeralatanMesinDPUPRInline, FotoPeralatanMesinDPUPRInline, ] def get_queryset(self, request): qs = SUBSKPD.objects.filter(id_skpd__exact=3) return self.model.objects.filter(id_sub_skpd__in=qs).filter(id_mutasi_berkurang__exact=3) class KontrakPeralatanMesinDPUPRAdmin(KontrakPeralatanMesinAdmin): def formfield_for_foreignkey(self, db_field, request, **kwargs): if db_field.name == "id_skpd": kwargs["queryset"] = SKPD.objects.filter(id__exact=3) return super(KontrakPeralatanMesinDPUPRAdmin, self).formfield_for_foreignkey(db_field, request, **kwargs) def get_queryset(self, request): return self.model.objects.filter(id_skpd__exact=3) class HargaPeralatanMesinDPUPRAdmin(HargaPeralatanMesinAdmin): def get_queryset(self, request): sub_skpd_qs = SUBSKPD.objects.filter(id_skpd__exact=3) peralatan_mesin_qs = PeralatanMesin.objects.filter(id_sub_skpd__in=sub_skpd_qs) return self.model.objects.filter(id_peralatan_mesin__in=peralatan_mesin_qs) class PeralatanMesinPenghapusanDPUPRAdmin(PeralatanMesinPenghapusanAdmin): inlines = [PenghapusanPeralatanMesinDPUPRInline, TahunBerkurangPeralatanMesinDPUPRInline, SKPDAsalPeralatanMesinDPUPRInline, SKPDTujuanPeralatanMesinDPUPRInline, FotoPeralatanMesinDPUPRInline, ] def get_queryset(self, request): qs = SUBSKPD.objects.filter(id_skpd__exact=3) return self.model.objects.filter(id_sub_skpd__in=qs).filter(id_mutasi_berkurang__in=[2,4,6,7,10,]) admin.site.register(PeralatanMesinDPUPR, PeralatanMesinDPUPRAdmin) admin.site.register(PeralatanMesinUsulHapusDPUPR, PeralatanMesinUsulHapusDPUPRAdmin) admin.site.register(KontrakPeralatanMesinDPUPR, KontrakPeralatanMesinDPUPRAdmin) admin.site.register(HargaPeralatanMesinDPUPR, HargaPeralatanMesinDPUPRAdmin) admin.site.register(PeralatanMesinPenghapusanDPUPR, PeralatanMesinPenghapusanDPUPRAdmin) from jalanirigasijaringan.models import KontrakJalanIrigasiJaringan, HargaJalanIrigasiJaringan, JalanIrigasiJaringan, PenghapusanJalanIrigasiJaringan, PemanfaatanJalanIrigasiJaringan, TahunBerkurangJalanIrigasiJaringan, TahunBerkurangUsulHapusJalanIrigasiJaringan from jalanirigasijaringan.models import JalanIrigasiJaringanPemanfaatan, JalanIrigasiJaringanPenghapusan, JalanIrigasiJaringanUsulHapus from jalanirigasijaringan.models import JalanIrigasiJaringanDPUPR, KontrakJalanIrigasiJaringanDPUPR, HargaJalanIrigasiJaringanDPUPR, KDPJalanIrigasiJaringanDPUPR, JalanIrigasiJaringanUsulHapusDPUPR, TahunBerkurangUsulHapusJalanIrigasiJaringanDPUPR from jalanirigasijaringan.models import JalanIrigasiJaringanPenghapusanDPUPR, TahunBerkurangJalanIrigasiJaringanDPUPR, PenghapusanJalanIrigasiJaringanDPUPR from jalanirigasijaringan.models import SKPDAsalJalanIrigasiJaringanDPUPR, SKPDTujuanJalanIrigasiJaringanDPUPR, FotoJalanIrigasiJaringanDPUPR from jalanirigasijaringan.admin import HargaJalanIrigasiJaringanInline, JalanIrigasiJaringanAdmin, KontrakJalanIrigasiJaringanAdmin, HargaJalanIrigasiJaringanAdmin, KDPJalanIrigasiJaringanAdmin, TahunBerkurangUsulHapusJalanIrigasiJaringanInline, JalanIrigasiJaringanUsulHapusAdmin from jalanirigasijaringan.admin import TahunBerkurangJalanIrigasiJaringanInline, PenghapusanJalanIrigasiJaringanInline, JalanIrigasiJaringanPenghapusanAdmin from jalanirigasijaringan.admin import SKPDAsalJalanIrigasiJaringanInline, SKPDTujuanJalanIrigasiJaringanInline, FotoJalanIrigasiJaringanInline class TahunBerkurangJalanIrigasiJaringanDPUPRInline(TahunBerkurangJalanIrigasiJaringanInline): model = TahunBerkurangJalanIrigasiJaringanDPUPR class PenghapusanJalanIrigasiJaringanDPUPRInline(PenghapusanJalanIrigasiJaringanInline): model = PenghapusanJalanIrigasiJaringanDPUPR class SKPDAsalJalanIrigasiJaringanDPUPRInline(SKPDAsalJalanIrigasiJaringanInline): model = SKPDAsalJalanIrigasiJaringanDPUPR class SKPDTujuanJalanIrigasiJaringanDPUPRInline(SKPDTujuanJalanIrigasiJaringanInline): model = SKPDTujuanJalanIrigasiJaringanDPUPR class FotoJalanIrigasiJaringanDPUPRInline(FotoJalanIrigasiJaringanInline): model = FotoJalanIrigasiJaringanDPUPR class HargaJalanIrigasiJaringanDPUPRInline(HargaJalanIrigasiJaringanInline): model = HargaJalanIrigasiJaringanDPUPR def formfield_for_foreignkey(self, db_field, request, **kwargs): if db_field.name == "id_kontrak_jalan_irigasi_jaringan": kwargs["queryset"] = KontrakJalanIrigasiJaringan.objects.filter(id_skpd__exact=3) return super(HargaJalanIrigasiJaringanDPUPRInline, self).formfield_for_foreignkey(db_field, request, **kwargs) class TahunBerkurangUsulHapusJalanIrigasiJaringanDPUPRInline(TahunBerkurangUsulHapusJalanIrigasiJaringanInline): model = TahunBerkurangUsulHapusJalanIrigasiJaringanDPUPR class JalanIrigasiJaringanDPUPRAdmin(JalanIrigasiJaringanAdmin): inlines = [HargaJalanIrigasiJaringanDPUPRInline, SKPDAsalJalanIrigasiJaringanDPUPRInline, FotoJalanIrigasiJaringanDPUPRInline, ] def formfield_for_foreignkey(self, db_field, request, **kwargs): if db_field.name == "id_sub_skpd": kwargs["queryset"] = SUBSKPD.objects.filter(id_skpd__exact=3) return super(JalanIrigasiJaringanDPUPRAdmin, self).formfield_for_foreignkey(db_field, request, **kwargs) def get_queryset(self, request): qs = SUBSKPD.objects.filter(id_skpd__exact=3) return self.model.objects.filter(id_sub_skpd__in=qs).filter(id_golongan_barang__exact=4).filter(id_mutasi_berkurang__exact=5) class JalanIrigasiJaringanUsulHapusDPUPRAdmin(JalanIrigasiJaringanUsulHapusAdmin): inlines = [TahunBerkurangUsulHapusJalanIrigasiJaringanDPUPRInline, SKPDAsalJalanIrigasiJaringanDPUPRInline, FotoJalanIrigasiJaringanDPUPRInline, ] def get_queryset(self, request): qs = SUBSKPD.objects.filter(id_skpd__exact=3) return self.model.objects.filter(id_sub_skpd__in=qs).filter(id_golongan_barang__exact=4).filter(id_mutasi_berkurang__exact=3) class KDPJalanIrigasiJaringanDPUPRAdmin(KDPJalanIrigasiJaringanAdmin): inlines = [HargaJalanIrigasiJaringanDPUPRInline, SKPDAsalJalanIrigasiJaringanDPUPRInline, FotoJalanIrigasiJaringanDPUPRInline, ] def formfield_for_foreignkey(self, db_field, request, **kwargs): if db_field.name == "id_sub_skpd": kwargs["queryset"] = SUBSKPD.objects.filter(id_skpd__exact=3) return super(KDPJalanIrigasiJaringanDPUPRAdmin, self).formfield_for_foreignkey(db_field, request, **kwargs) def get_queryset(self, request): qs = SUBSKPD.objects.filter(id_skpd__exact=3) return self.model.objects.filter(id_sub_skpd__in=qs).filter(id_golongan_barang__exact=6).filter(id_mutasi_berkurang__exact=5) class KontrakJalanIrigasiJaringanDPUPRAdmin(KontrakJalanIrigasiJaringanAdmin): def formfield_for_foreignkey(self, db_field, request, **kwargs): if db_field.name == "id_skpd": kwargs["queryset"] = SKPD.objects.filter(id__exact=3) return super(KontrakJalanIrigasiJaringanDPUPRAdmin, self).formfield_for_foreignkey(db_field, request, **kwargs) def get_queryset(self, request): return self.model.objects.filter(id_skpd__exact=3) class HargaJalanIrigasiJaringanDPUPRAdmin(HargaJalanIrigasiJaringanAdmin): def get_queryset(self, request): sub_skpd_qs = SUBSKPD.objects.filter(id_skpd__exact=3) jalan_irigasi_jaringan_qs = JalanIrigasiJaringan.objects.filter(id_sub_skpd__in=sub_skpd_qs) return self.model.objects.filter(id_jalan_irigasi_jaringan__in=jalan_irigasi_jaringan_qs) class JalanIrigasiJaringanPenghapusanDPUPRAdmin(JalanIrigasiJaringanPenghapusanAdmin): inlines = [PenghapusanJalanIrigasiJaringanDPUPRInline, TahunBerkurangJalanIrigasiJaringanDPUPRInline, SKPDAsalJalanIrigasiJaringanDPUPRInline, SKPDTujuanJalanIrigasiJaringanDPUPRInline, FotoJalanIrigasiJaringanDPUPRInline, ] def get_queryset(self, request): qs = SUBSKPD.objects.filter(id_skpd__exact=3) return self.model.objects.filter(id_sub_skpd__in=qs).filter(id_mutasi_berkurang__in=[2,4,6,7,10,]) admin.site.register(JalanIrigasiJaringanDPUPR, JalanIrigasiJaringanDPUPRAdmin) admin.site.register(JalanIrigasiJaringanUsulHapusDPUPR, JalanIrigasiJaringanUsulHapusDPUPRAdmin) admin.site.register(KDPJalanIrigasiJaringanDPUPR, KDPJalanIrigasiJaringanDPUPRAdmin) admin.site.register(KontrakJalanIrigasiJaringanDPUPR, KontrakJalanIrigasiJaringanDPUPRAdmin) admin.site.register(HargaJalanIrigasiJaringanDPUPR, HargaJalanIrigasiJaringanDPUPRAdmin) admin.site.register(JalanIrigasiJaringanPenghapusanDPUPR, JalanIrigasiJaringanPenghapusanDPUPRAdmin) from atl.models import KontrakATL, HargaATL, ATL, PenghapusanATL, PemanfaatanATL, TahunBerkurangATL, TahunBerkurangUsulHapusATL from atl.models import ATLPemanfaatan, ATLPenghapusan, ATLUsulHapus from atl.models import ATLDPUPR, KontrakATLDPUPR, HargaATLDPUPR, ATLUsulHapusDPUPR, TahunBerkurangUsulHapusATLDPUPR from atl.models import ATLPenghapusanDPUPR, TahunBerkurangATLDPUPR, PenghapusanATLDPUPR from atl.models import SKPDAsalATLDPUPR, SKPDTujuanATLDPUPR, FotoATLDPUPR from atl.admin import HargaATLInline, ATLAdmin, KontrakATLAdmin, HargaATLAdmin, TahunBerkurangUsulHapusATLInline, ATLUsulHapusAdmin from atl.admin import TahunBerkurangATLInline, PenghapusanATLInline, ATLPenghapusanAdmin from atl.admin import SKPDAsalATLInline, SKPDTujuanATLInline, FotoATLInline class TahunBerkurangATLDPUPRInline(TahunBerkurangATLInline): model = TahunBerkurangATLDPUPR class PenghapusanATLDPUPRInline(PenghapusanATLInline): model = PenghapusanATLDPUPR class SKPDAsalATLDPUPRInline(SKPDAsalATLInline): model = SKPDAsalATLDPUPR class SKPDTujuanATLDPUPRInline(SKPDTujuanATLInline): model = SKPDTujuanATLDPUPR class FotoATLDPUPRInline(FotoATLInline): model = FotoATLDPUPR class HargaATLDPUPRInline(HargaATLInline): model = HargaATLDPUPR def formfield_for_foreignkey(self, db_field, request, **kwargs): if db_field.name == "id_kontrak_atl": kwargs["queryset"] = KontrakATL.objects.filter(id_skpd__exact=3) return super(HargaATLDPUPRInline, self).formfield_for_foreignkey(db_field, request, **kwargs) class TahunBerkurangUsulHapusATLDPUPRInline(TahunBerkurangUsulHapusATLInline): model = TahunBerkurangUsulHapusATLDPUPR class ATLDPUPRAdmin(ATLAdmin): inlines = [HargaATLDPUPRInline, SKPDAsalATLDPUPRInline, FotoATLDPUPRInline, ] def formfield_for_foreignkey(self, db_field, request, **kwargs): if db_field.name == "id_sub_skpd": kwargs["queryset"] = SUBSKPD.objects.filter(id_skpd__exact=3) if db_field.name == "id_ruangan": kwargs["queryset"] = Ruangan.objects.filter(id_gedung_bangunan__id_sub_skpd__id_skpd__exact=3) return super(ATLDPUPRAdmin, self).formfield_for_foreignkey(db_field, request, **kwargs) def get_queryset(self, request): qs = SUBSKPD.objects.filter(id_skpd__exact=3) return self.model.objects.filter(id_sub_skpd__in=qs).filter(id_mutasi_berkurang__exact=5) class ATLUsulHapusDPUPRAdmin(ATLUsulHapusAdmin): inlines = [TahunBerkurangUsulHapusATLDPUPRInline, SKPDAsalATLDPUPRInline, FotoATLDPUPRInline, ] def get_queryset(self, request): qs = SUBSKPD.objects.filter(id_skpd__exact=3) return self.model.objects.filter(id_sub_skpd__in=qs).filter(id_golongan_barang__exact=5).filter(id_mutasi_berkurang__exact=3) class KontrakATLDPUPRAdmin(KontrakATLAdmin): def formfield_for_foreignkey(self, db_field, request, **kwargs): if db_field.name == "id_skpd": kwargs["queryset"] = SKPD.objects.filter(id__exact=3) return super(KontrakATLDPUPRAdmin, self).formfield_for_foreignkey(db_field, request, **kwargs) def get_queryset(self, request): return self.model.objects.filter(id_skpd__exact=3) class HargaATLDPUPRAdmin(HargaATLAdmin): def get_queryset(self, request): sub_skpd_qs = SUBSKPD.objects.filter(id_skpd__exact=3) atl_qs = ATL.objects.filter(id_sub_skpd__in=sub_skpd_qs) return self.model.objects.filter(id_atl__in=atl_qs) class ATLPenghapusanDPUPRAdmin(ATLPenghapusanAdmin): inlines = [PenghapusanATLDPUPRInline, TahunBerkurangATLDPUPRInline, SKPDAsalATLDPUPRInline, SKPDTujuanATLDPUPRInline, FotoATLDPUPRInline, ] def get_queryset(self, request): qs = SUBSKPD.objects.filter(id_skpd__exact=3) return self.model.objects.filter(id_sub_skpd__in=qs).filter(id_mutasi_berkurang__in=[2,4,6,7,10,]) admin.site.register(ATLDPUPR, ATLDPUPRAdmin) admin.site.register(ATLUsulHapusDPUPR, ATLUsulHapusDPUPRAdmin) admin.site.register(KontrakATLDPUPR, KontrakATLDPUPRAdmin) admin.site.register(HargaATLDPUPR, HargaATLDPUPRAdmin) admin.site.register(ATLPenghapusanDPUPR, ATLPenghapusanDPUPRAdmin)
true
true
f70113c43d8703cf4d54d5b5d27b33e786dc3768
6,757
py
Python
Detector_1/fusion_detecting.py
JaeyoonSSim/Design-Project
8a0037bec50b44b3f5d92da5254e79964fdaf9cf
[ "MIT" ]
null
null
null
Detector_1/fusion_detecting.py
JaeyoonSSim/Design-Project
8a0037bec50b44b3f5d92da5254e79964fdaf9cf
[ "MIT" ]
null
null
null
Detector_1/fusion_detecting.py
JaeyoonSSim/Design-Project
8a0037bec50b44b3f5d92da5254e79964fdaf9cf
[ "MIT" ]
null
null
null
import cv2 import sys import os import numpy as np import time # Initialize the parameters confThreshold = 0.5 # Confidence threshold nmsThreshold = 0.4 # Non-maximum suppression threshold inpWidth = 416 # Width of network's input image inpHeight = 416 # Height of network's input image starting_time = 0 frame_id = 0 font = cv2.FONT_HERSHEY_PLAIN # Load names of classes classesFile = "coco.names" classes = None with open(classesFile, 'rt') as f: classes = f.read().rstrip('\n').split('\n') # Give the configuration and weight files for the model and load the network using them. modelConfiguration = "yolov3.cfg" modelWeights = "yolov3.weights" net = cv2.dnn.readNetFromDarknet(modelConfiguration, modelWeights) net.setPreferableBackend(cv2.dnn.DNN_BACKEND_OPENCV) net.setPreferableTarget(cv2.dnn.DNN_TARGET_CPU) inputFile = "presen_T.mp4" inputFile2 = "presen_R.mp4" outputFile = "yolo_out_py.avi" # Open the video file if not os.path.isfile(inputFile): print("Input video file ", inputFile, " doesn't exist") sys.exit(1) cap = cv2.VideoCapture(inputFile) cap2 = cv2.VideoCapture(inputFile2) outputFile = inputFile[:-4] + "_yolo_out_py.avi" # Get the video writer initialized to save the output video vid_writer = cv2.VideoWriter(outputFile, cv2.VideoWriter_fourcc('M', 'J', 'P', 'G'), 30, (round(cap.get(cv2.CAP_PROP_FRAME_WIDTH)), round(cap.get(cv2.CAP_PROP_FRAME_HEIGHT)))) # Get the names of the output layers def getOutputsNames(net): # Get the names of all the layers in the network layersNames = net.getLayerNames() # Get the names of the output layers, i.e. the layers with unconnected outputs return [layersNames[i[0] - 1] for i in net.getUnconnectedOutLayers()] # Draw the predicted bounding box def drawPred(classId, conf, left, top, right, bottom): # Draw a bounding box. cv2.rectangle(frame, (left, top), (right, bottom), (0, 255, 0)) label = '%.2f' % conf # Get the label for the class name and its confidence if classes: assert (classId < len(classes)) label = '%s:%s' % (classes[classId], label) # Display the label at the top of the bounding box labelSize, baseLine = cv2.getTextSize(label, font, 0.5, 1) top = max(top, labelSize[1]) cv2.putText(frame, label, (left, top), font, 1, (0, 255, 0), 2) # Remove the bounding boxes with low confidence using non-maxima suppression def postprocess(frame, outs): frameHeight = frame.shape[0] frameWidth = frame.shape[1] # Scan through all the bounding boxes output from the network and keep only the # ones with high confidence scores. Assign the box's class label as the class with the highest score. classIds = [] confidences = [] boxes = [] for out in outs: for detection in out: scores = detection[5:] classId = np.argmax(scores) confidence = scores[classId] if confidence > confThreshold: center_x = int(detection[0] * frameWidth) center_y = int(detection[1] * frameHeight) width = int(detection[2] * frameWidth) height = int(detection[3] * frameHeight) left = int(center_x - width / 2) top = int(center_y - height / 2) classIds.append(classId) confidences.append(float(confidence)) boxes.append([left, top, width, height]) # Perform non maximum suppression to eliminate redundant overlapping boxes with # lower confidences. indices = cv2.dnn.NMSBoxes(boxes, confidences, confThreshold, nmsThreshold) for i in indices: i = i[0] box = boxes[i] left = box[0] top = box[1] width = box[2] height = box[3] drawPred(classIds[i], confidences[i], left, top, left + width, top + height) # Main while True: # get frame from the video hasFrame, frame = cap.read() hasFrame2, frame2 = cap2.read() frame = cv2.resize(frame, dsize=(600, 402)) frame2 = cv2.resize(frame2, dsize=(600, 402)) cv2.imshow("Camera", frame) cv2.imshow("Thermal_Camera", frame2) # Stop the program if reached end of video if not hasFrame: print("Done processing !!!") cv2.waitKey(3000) break # Create a 4D blob from a frame. blob = cv2.dnn.blobFromImage(frame, 1 / 255, (inpWidth, inpHeight), [0, 0, 0], 1, crop=False) # Sets the input to the network net.setInput(blob) # Runs the forward pass to get output of the output layers outs = net.forward(getOutputsNames(net)) # Remove the bounding boxes with low confidence postprocess(frame, outs) # Print the FPS current_time = time.time() sec = current_time - starting_time starting_time = current_time fps = 1 / (sec) str2 = "FPS : %0.1f" % fps # cv2.putText(frame, str2, (10, 50), font, 2, (0, 255, 0), 2) # Write the frame with the detection boxes vid_writer.write(frame.astype(np.uint8)) # CAMERA RESULT cv2.imshow("CAMERA_Detection", frame) img2 = None fast = cv2.FastFeatureDetector_create(30) fast.setNonmaxSuppression(0) kp = fast.detect(frame2, None) img2 = cv2.drawKeypoints(frame2, kp, img2, (0, 255, 255)) # cv2.imshow("THERMAL", img2) hsv = cv2.cvtColor(frame2, cv2.COLOR_BGR2HSV) car_prediction = 30 lower_white = np.array([0, 0, 255 - car_prediction], dtype=np.uint8) upper_white = np.array([255, car_prediction, 255], dtype=np.uint8) mask_white = cv2.inRange(hsv, lower_white, upper_white) res = cv2.bitwise_and(frame2, frame2, mask=mask_white) # cv2.imshow("THERMAL_CAR", res) res2 = None res2 = res igray = cv2.cvtColor(res2, cv2.COLOR_BGR2GRAY) iret, ibinary = cv2.threshold(igray, 127, 255, cv2.THRESH_BINARY) contours, hierachy = cv2.findContours(ibinary, cv2.RETR_CCOMP, cv2.CHAIN_APPROX_NONE) for i in range(len(contours)): cv2.drawContours(res2, [contours[i]], 0, (255, 255, 255), 2) cv2.putText(res2, "car", tuple(contours[i][0][0]), font, 1, (0, 255, 0), 1) # cv2.imshow("THERMAL_CONTOUR", res2) # THERMAL PROCESSING RESULT dst = cv2.addWeighted(res2, 1, frame2, 1, 0) #cv2.imshow('THERMAL_RES',dst) #cv2.imshow("THERMAL",frame2) # FINAL RESULT dst2 = cv2.addWeighted(res2, 1, frame, 1, 0) cv2.imshow("RESULT",dst2) # End the video with "Esc" key = cv2.waitKey(1) if key == 27: break cap.release() cv2.destroyAllWindows()
34.829897
115
0.637413
import cv2 import sys import os import numpy as np import time confThreshold = 0.5 nmsThreshold = 0.4 inpWidth = 416 inpHeight = 416 # Height of network's input image starting_time = 0 frame_id = 0 font = cv2.FONT_HERSHEY_PLAIN classesFile = "coco.names" classes = None with open(classesFile, 'rt') as f: classes = f.read().rstrip('\n').split('\n') modelConfiguration = "yolov3.cfg" modelWeights = "yolov3.weights" net = cv2.dnn.readNetFromDarknet(modelConfiguration, modelWeights) net.setPreferableBackend(cv2.dnn.DNN_BACKEND_OPENCV) net.setPreferableTarget(cv2.dnn.DNN_TARGET_CPU) inputFile = "presen_T.mp4" inputFile2 = "presen_R.mp4" outputFile = "yolo_out_py.avi" if not os.path.isfile(inputFile): print("Input video file ", inputFile, " doesn't exist") sys.exit(1) cap = cv2.VideoCapture(inputFile) cap2 = cv2.VideoCapture(inputFile2) outputFile = inputFile[:-4] + "_yolo_out_py.avi" # Get the video writer initialized to save the output video vid_writer = cv2.VideoWriter(outputFile, cv2.VideoWriter_fourcc('M', 'J', 'P', 'G'), 30, (round(cap.get(cv2.CAP_PROP_FRAME_WIDTH)), round(cap.get(cv2.CAP_PROP_FRAME_HEIGHT)))) # Get the names of the output layers def getOutputsNames(net): # Get the names of all the layers in the network layersNames = net.getLayerNames() # Get the names of the output layers, i.e. the layers with unconnected outputs return [layersNames[i[0] - 1] for i in net.getUnconnectedOutLayers()] # Draw the predicted bounding box def drawPred(classId, conf, left, top, right, bottom): # Draw a bounding box. cv2.rectangle(frame, (left, top), (right, bottom), (0, 255, 0)) label = '%.2f' % conf # Get the label for the class name and its confidence if classes: assert (classId < len(classes)) label = '%s:%s' % (classes[classId], label) # Display the label at the top of the bounding box labelSize, baseLine = cv2.getTextSize(label, font, 0.5, 1) top = max(top, labelSize[1]) cv2.putText(frame, label, (left, top), font, 1, (0, 255, 0), 2) # Remove the bounding boxes with low confidence using non-maxima suppression def postprocess(frame, outs): frameHeight = frame.shape[0] frameWidth = frame.shape[1] # Scan through all the bounding boxes output from the network and keep only the # ones with high confidence scores. Assign the box's class label as the class with the highest score. classIds = [] confidences = [] boxes = [] for out in outs: for detection in out: scores = detection[5:] classId = np.argmax(scores) confidence = scores[classId] if confidence > confThreshold: center_x = int(detection[0] * frameWidth) center_y = int(detection[1] * frameHeight) width = int(detection[2] * frameWidth) height = int(detection[3] * frameHeight) left = int(center_x - width / 2) top = int(center_y - height / 2) classIds.append(classId) confidences.append(float(confidence)) boxes.append([left, top, width, height]) indices = cv2.dnn.NMSBoxes(boxes, confidences, confThreshold, nmsThreshold) for i in indices: i = i[0] box = boxes[i] left = box[0] top = box[1] width = box[2] height = box[3] drawPred(classIds[i], confidences[i], left, top, left + width, top + height) while True: hasFrame, frame = cap.read() hasFrame2, frame2 = cap2.read() frame = cv2.resize(frame, dsize=(600, 402)) frame2 = cv2.resize(frame2, dsize=(600, 402)) cv2.imshow("Camera", frame) cv2.imshow("Thermal_Camera", frame2) if not hasFrame: print("Done processing !!!") cv2.waitKey(3000) break blob = cv2.dnn.blobFromImage(frame, 1 / 255, (inpWidth, inpHeight), [0, 0, 0], 1, crop=False) net.setInput(blob) outs = net.forward(getOutputsNames(net)) postprocess(frame, outs) current_time = time.time() sec = current_time - starting_time starting_time = current_time fps = 1 / (sec) str2 = "FPS : %0.1f" % fps vid_writer.write(frame.astype(np.uint8)) cv2.imshow("CAMERA_Detection", frame) img2 = None fast = cv2.FastFeatureDetector_create(30) fast.setNonmaxSuppression(0) kp = fast.detect(frame2, None) img2 = cv2.drawKeypoints(frame2, kp, img2, (0, 255, 255)) hsv = cv2.cvtColor(frame2, cv2.COLOR_BGR2HSV) car_prediction = 30 lower_white = np.array([0, 0, 255 - car_prediction], dtype=np.uint8) upper_white = np.array([255, car_prediction, 255], dtype=np.uint8) mask_white = cv2.inRange(hsv, lower_white, upper_white) res = cv2.bitwise_and(frame2, frame2, mask=mask_white) res2 = None res2 = res igray = cv2.cvtColor(res2, cv2.COLOR_BGR2GRAY) iret, ibinary = cv2.threshold(igray, 127, 255, cv2.THRESH_BINARY) contours, hierachy = cv2.findContours(ibinary, cv2.RETR_CCOMP, cv2.CHAIN_APPROX_NONE) for i in range(len(contours)): cv2.drawContours(res2, [contours[i]], 0, (255, 255, 255), 2) cv2.putText(res2, "car", tuple(contours[i][0][0]), font, 1, (0, 255, 0), 1) dst = cv2.addWeighted(res2, 1, frame2, 1, 0) dst2 = cv2.addWeighted(res2, 1, frame, 1, 0) cv2.imshow("RESULT",dst2) key = cv2.waitKey(1) if key == 27: break cap.release() cv2.destroyAllWindows()
true
true
f701144532a32d57093b84a8487c8cd5c89881c1
166,327
py
Python
eventstore_grpc/proto/ClientMessageDtos_pb2.py
jmshnds/eventstore_grpc
cf6a5978bcdf5bb725a569b7373ce15ee4babb25
[ "MIT" ]
6
2021-02-04T15:48:28.000Z
2021-12-26T03:04:26.000Z
eventstore_grpc/proto/ClientMessageDtos_pb2.py
jmshnds/eventstore_grpc
cf6a5978bcdf5bb725a569b7373ce15ee4babb25
[ "MIT" ]
1
2021-04-14T00:06:25.000Z
2021-04-14T07:28:34.000Z
eventstore_grpc/proto/ClientMessageDtos_pb2.py
jmshnds/eventstore_grpc
cf6a5978bcdf5bb725a569b7373ce15ee4babb25
[ "MIT" ]
1
2021-04-14T17:22:54.000Z
2021-04-14T17:22:54.000Z
# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: ClientMessageDtos.proto """Generated protocol buffer code.""" from google.protobuf.internal import enum_type_wrapper from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() DESCRIPTOR = _descriptor.FileDescriptor( name='ClientMessageDtos.proto', package='EventStore.Client.Messages', syntax='proto2', serialized_options=None, create_key=_descriptor._internal_create_key, serialized_pb=b'\n\x17\x43lientMessageDtos.proto\x12\x1a\x45ventStore.Client.Messages\"\x8a\x01\n\x08NewEvent\x12\x10\n\x08\x65vent_id\x18\x01 \x02(\x0c\x12\x12\n\nevent_type\x18\x02 \x02(\t\x12\x19\n\x11\x64\x61ta_content_type\x18\x03 \x02(\x05\x12\x1d\n\x15metadata_content_type\x18\x04 \x02(\x05\x12\x0c\n\x04\x64\x61ta\x18\x05 \x02(\x0c\x12\x10\n\x08metadata\x18\x06 \x01(\x0c\"\xe4\x01\n\x0b\x45ventRecord\x12\x17\n\x0f\x65vent_stream_id\x18\x01 \x02(\t\x12\x14\n\x0c\x65vent_number\x18\x02 \x02(\x03\x12\x10\n\x08\x65vent_id\x18\x03 \x02(\x0c\x12\x12\n\nevent_type\x18\x04 \x02(\t\x12\x19\n\x11\x64\x61ta_content_type\x18\x05 \x02(\x05\x12\x1d\n\x15metadata_content_type\x18\x06 \x02(\x05\x12\x0c\n\x04\x64\x61ta\x18\x07 \x02(\x0c\x12\x10\n\x08metadata\x18\x08 \x01(\x0c\x12\x0f\n\x07\x63reated\x18\t \x01(\x03\x12\x15\n\rcreated_epoch\x18\n \x01(\x03\"\x85\x01\n\x14ResolvedIndexedEvent\x12\x36\n\x05\x65vent\x18\x01 \x02(\x0b\x32\'.EventStore.Client.Messages.EventRecord\x12\x35\n\x04link\x18\x02 \x01(\x0b\x32\'.EventStore.Client.Messages.EventRecord\"\xb1\x01\n\rResolvedEvent\x12\x36\n\x05\x65vent\x18\x01 \x02(\x0b\x32\'.EventStore.Client.Messages.EventRecord\x12\x35\n\x04link\x18\x02 \x01(\x0b\x32\'.EventStore.Client.Messages.EventRecord\x12\x17\n\x0f\x63ommit_position\x18\x03 \x02(\x03\x12\x18\n\x10prepare_position\x18\x04 \x02(\x03\"\x8e\x01\n\x0bWriteEvents\x12\x17\n\x0f\x65vent_stream_id\x18\x01 \x02(\t\x12\x18\n\x10\x65xpected_version\x18\x02 \x02(\x03\x12\x34\n\x06\x65vents\x18\x03 \x03(\x0b\x32$.EventStore.Client.Messages.NewEvent\x12\x16\n\x0erequire_leader\x18\x04 \x02(\x08\"\xe7\x01\n\x14WriteEventsCompleted\x12;\n\x06result\x18\x01 \x02(\x0e\x32+.EventStore.Client.Messages.OperationResult\x12\x0f\n\x07message\x18\x02 \x01(\t\x12\x1a\n\x12\x66irst_event_number\x18\x03 \x02(\x03\x12\x19\n\x11last_event_number\x18\x04 \x02(\x03\x12\x18\n\x10prepare_position\x18\x05 \x01(\x03\x12\x17\n\x0f\x63ommit_position\x18\x06 \x01(\x03\x12\x17\n\x0f\x63urrent_version\x18\x07 \x01(\x03\"n\n\x0c\x44\x65leteStream\x12\x17\n\x0f\x65vent_stream_id\x18\x01 \x02(\t\x12\x18\n\x10\x65xpected_version\x18\x02 \x02(\x03\x12\x16\n\x0erequire_leader\x18\x03 \x02(\x08\x12\x13\n\x0bhard_delete\x18\x04 \x01(\x08\"\x98\x01\n\x15\x44\x65leteStreamCompleted\x12;\n\x06result\x18\x01 \x02(\x0e\x32+.EventStore.Client.Messages.OperationResult\x12\x0f\n\x07message\x18\x02 \x01(\t\x12\x18\n\x10prepare_position\x18\x03 \x01(\x03\x12\x17\n\x0f\x63ommit_position\x18\x04 \x01(\x03\"]\n\x10TransactionStart\x12\x17\n\x0f\x65vent_stream_id\x18\x01 \x02(\t\x12\x18\n\x10\x65xpected_version\x18\x02 \x02(\x03\x12\x16\n\x0erequire_leader\x18\x03 \x02(\x08\"\x81\x01\n\x19TransactionStartCompleted\x12\x16\n\x0etransaction_id\x18\x01 \x02(\x03\x12;\n\x06result\x18\x02 \x02(\x0e\x32+.EventStore.Client.Messages.OperationResult\x12\x0f\n\x07message\x18\x03 \x01(\t\"x\n\x10TransactionWrite\x12\x16\n\x0etransaction_id\x18\x01 \x02(\x03\x12\x34\n\x06\x65vents\x18\x02 \x03(\x0b\x32$.EventStore.Client.Messages.NewEvent\x12\x16\n\x0erequire_leader\x18\x03 \x02(\x08\"\x81\x01\n\x19TransactionWriteCompleted\x12\x16\n\x0etransaction_id\x18\x01 \x02(\x03\x12;\n\x06result\x18\x02 \x02(\x0e\x32+.EventStore.Client.Messages.OperationResult\x12\x0f\n\x07message\x18\x03 \x01(\t\"C\n\x11TransactionCommit\x12\x16\n\x0etransaction_id\x18\x01 \x02(\x03\x12\x16\n\x0erequire_leader\x18\x02 \x02(\x08\"\xec\x01\n\x1aTransactionCommitCompleted\x12\x16\n\x0etransaction_id\x18\x01 \x02(\x03\x12;\n\x06result\x18\x02 \x02(\x0e\x32+.EventStore.Client.Messages.OperationResult\x12\x0f\n\x07message\x18\x03 \x01(\t\x12\x1a\n\x12\x66irst_event_number\x18\x04 \x02(\x03\x12\x19\n\x11last_event_number\x18\x05 \x02(\x03\x12\x18\n\x10prepare_position\x18\x06 \x01(\x03\x12\x17\n\x0f\x63ommit_position\x18\x07 \x01(\x03\"l\n\tReadEvent\x12\x17\n\x0f\x65vent_stream_id\x18\x01 \x02(\t\x12\x14\n\x0c\x65vent_number\x18\x02 \x02(\x03\x12\x18\n\x10resolve_link_tos\x18\x03 \x02(\x08\x12\x16\n\x0erequire_leader\x18\x04 \x02(\x08\"\xa0\x02\n\x12ReadEventCompleted\x12N\n\x06result\x18\x01 \x02(\x0e\x32>.EventStore.Client.Messages.ReadEventCompleted.ReadEventResult\x12?\n\x05\x65vent\x18\x02 \x02(\x0b\x32\x30.EventStore.Client.Messages.ResolvedIndexedEvent\x12\r\n\x05\x65rror\x18\x03 \x01(\t\"j\n\x0fReadEventResult\x12\x0b\n\x07Success\x10\x00\x12\x0c\n\x08NotFound\x10\x01\x12\x0c\n\x08NoStream\x10\x02\x12\x11\n\rStreamDeleted\x10\x03\x12\t\n\x05\x45rror\x10\x04\x12\x10\n\x0c\x41\x63\x63\x65ssDenied\x10\x05\"\x8b\x01\n\x10ReadStreamEvents\x12\x17\n\x0f\x65vent_stream_id\x18\x01 \x02(\t\x12\x19\n\x11\x66rom_event_number\x18\x02 \x02(\x03\x12\x11\n\tmax_count\x18\x03 \x02(\x05\x12\x18\n\x10resolve_link_tos\x18\x04 \x02(\x08\x12\x16\n\x0erequire_leader\x18\x05 \x02(\x08\"\xa2\x03\n\x19ReadStreamEventsCompleted\x12@\n\x06\x65vents\x18\x01 \x03(\x0b\x32\x30.EventStore.Client.Messages.ResolvedIndexedEvent\x12V\n\x06result\x18\x02 \x02(\x0e\x32\x46.EventStore.Client.Messages.ReadStreamEventsCompleted.ReadStreamResult\x12\x19\n\x11next_event_number\x18\x03 \x02(\x03\x12\x19\n\x11last_event_number\x18\x04 \x02(\x03\x12\x18\n\x10is_end_of_stream\x18\x05 \x02(\x08\x12\x1c\n\x14last_commit_position\x18\x06 \x02(\x03\x12\r\n\x05\x65rror\x18\x07 \x01(\t\"n\n\x10ReadStreamResult\x12\x0b\n\x07Success\x10\x00\x12\x0c\n\x08NoStream\x10\x01\x12\x11\n\rStreamDeleted\x10\x02\x12\x0f\n\x0bNotModified\x10\x03\x12\t\n\x05\x45rror\x10\x04\x12\x10\n\x0c\x41\x63\x63\x65ssDenied\x10\x05\"\x87\x01\n\rReadAllEvents\x12\x17\n\x0f\x63ommit_position\x18\x01 \x02(\x03\x12\x18\n\x10prepare_position\x18\x02 \x02(\x03\x12\x11\n\tmax_count\x18\x03 \x02(\x05\x12\x18\n\x10resolve_link_tos\x18\x04 \x02(\x08\x12\x16\n\x0erequire_leader\x18\x05 \x02(\x08\"\xf9\x02\n\x16ReadAllEventsCompleted\x12\x17\n\x0f\x63ommit_position\x18\x01 \x02(\x03\x12\x18\n\x10prepare_position\x18\x02 \x02(\x03\x12\x39\n\x06\x65vents\x18\x03 \x03(\x0b\x32).EventStore.Client.Messages.ResolvedEvent\x12\x1c\n\x14next_commit_position\x18\x04 \x02(\x03\x12\x1d\n\x15next_prepare_position\x18\x05 \x02(\x03\x12Y\n\x06result\x18\x06 \x01(\x0e\x32@.EventStore.Client.Messages.ReadAllEventsCompleted.ReadAllResult:\x07Success\x12\r\n\x05\x65rror\x18\x07 \x01(\t\"J\n\rReadAllResult\x12\x0b\n\x07Success\x10\x00\x12\x0f\n\x0bNotModified\x10\x01\x12\t\n\x05\x45rror\x10\x02\x12\x10\n\x0c\x41\x63\x63\x65ssDenied\x10\x03\"\xe9\x01\n\x06\x46ilter\x12\x41\n\x07\x63ontext\x18\x01 \x02(\x0e\x32\x30.EventStore.Client.Messages.Filter.FilterContext\x12;\n\x04type\x18\x02 \x02(\x0e\x32-.EventStore.Client.Messages.Filter.FilterType\x12\x0c\n\x04\x64\x61ta\x18\x03 \x03(\t\",\n\rFilterContext\x12\x0c\n\x08StreamId\x10\x00\x12\r\n\tEventType\x10\x01\"#\n\nFilterType\x12\t\n\x05Regex\x10\x00\x12\n\n\x06Prefix\x10\x01\"\xde\x01\n\x15\x46ilteredReadAllEvents\x12\x17\n\x0f\x63ommit_position\x18\x01 \x02(\x03\x12\x18\n\x10prepare_position\x18\x02 \x02(\x03\x12\x11\n\tmax_count\x18\x03 \x02(\x05\x12\x19\n\x11max_search_window\x18\x04 \x01(\x05\x12\x18\n\x10resolve_link_tos\x18\x05 \x02(\x08\x12\x16\n\x0erequire_leader\x18\x06 \x02(\x08\x12\x32\n\x06\x66ilter\x18\x07 \x02(\x0b\x32\".EventStore.Client.Messages.Filter\"\xb3\x03\n\x1e\x46ilteredReadAllEventsCompleted\x12\x17\n\x0f\x63ommit_position\x18\x01 \x02(\x03\x12\x18\n\x10prepare_position\x18\x02 \x02(\x03\x12\x39\n\x06\x65vents\x18\x03 \x03(\x0b\x32).EventStore.Client.Messages.ResolvedEvent\x12\x1c\n\x14next_commit_position\x18\x04 \x02(\x03\x12\x1d\n\x15next_prepare_position\x18\x05 \x02(\x03\x12\x18\n\x10is_end_of_stream\x18\x06 \x02(\x08\x12i\n\x06result\x18\x07 \x01(\x0e\x32P.EventStore.Client.Messages.FilteredReadAllEventsCompleted.FilteredReadAllResult:\x07Success\x12\r\n\x05\x65rror\x18\x08 \x01(\t\"R\n\x15\x46ilteredReadAllResult\x12\x0b\n\x07Success\x10\x00\x12\x0f\n\x0bNotModified\x10\x01\x12\t\n\x05\x45rror\x10\x02\x12\x10\n\x0c\x41\x63\x63\x65ssDenied\x10\x03\"\xde\x03\n\x1c\x43reatePersistentSubscription\x12\x1f\n\x17subscription_group_name\x18\x01 \x02(\t\x12\x17\n\x0f\x65vent_stream_id\x18\x02 \x02(\t\x12\x18\n\x10resolve_link_tos\x18\x03 \x02(\x08\x12\x12\n\nstart_from\x18\x04 \x02(\x03\x12$\n\x1cmessage_timeout_milliseconds\x18\x05 \x02(\x05\x12\x19\n\x11record_statistics\x18\x06 \x02(\x08\x12\x18\n\x10live_buffer_size\x18\x07 \x02(\x05\x12\x17\n\x0fread_batch_size\x18\x08 \x02(\x05\x12\x13\n\x0b\x62uffer_size\x18\t \x02(\x05\x12\x17\n\x0fmax_retry_count\x18\n \x02(\x05\x12\x1a\n\x12prefer_round_robin\x18\x0b \x02(\x08\x12\x1d\n\x15\x63heckpoint_after_time\x18\x0c \x02(\x05\x12\x1c\n\x14\x63heckpoint_max_count\x18\r \x02(\x05\x12\x1c\n\x14\x63heckpoint_min_count\x18\x0e \x02(\x05\x12\x1c\n\x14subscriber_max_count\x18\x0f \x02(\x05\x12\x1f\n\x17named_consumer_strategy\x18\x10 \x01(\t\"X\n\x1c\x44\x65letePersistentSubscription\x12\x1f\n\x17subscription_group_name\x18\x01 \x02(\t\x12\x17\n\x0f\x65vent_stream_id\x18\x02 \x02(\t\"\xde\x03\n\x1cUpdatePersistentSubscription\x12\x1f\n\x17subscription_group_name\x18\x01 \x02(\t\x12\x17\n\x0f\x65vent_stream_id\x18\x02 \x02(\t\x12\x18\n\x10resolve_link_tos\x18\x03 \x02(\x08\x12\x12\n\nstart_from\x18\x04 \x02(\x03\x12$\n\x1cmessage_timeout_milliseconds\x18\x05 \x02(\x05\x12\x19\n\x11record_statistics\x18\x06 \x02(\x08\x12\x18\n\x10live_buffer_size\x18\x07 \x02(\x05\x12\x17\n\x0fread_batch_size\x18\x08 \x02(\x05\x12\x13\n\x0b\x62uffer_size\x18\t \x02(\x05\x12\x17\n\x0fmax_retry_count\x18\n \x02(\x05\x12\x1a\n\x12prefer_round_robin\x18\x0b \x02(\x08\x12\x1d\n\x15\x63heckpoint_after_time\x18\x0c \x02(\x05\x12\x1c\n\x14\x63heckpoint_max_count\x18\r \x02(\x05\x12\x1c\n\x14\x63heckpoint_min_count\x18\x0e \x02(\x05\x12\x1c\n\x14subscriber_max_count\x18\x0f \x02(\x05\x12\x1f\n\x17named_consumer_strategy\x18\x10 \x01(\t\"\x97\x02\n%UpdatePersistentSubscriptionCompleted\x12}\n\x06result\x18\x01 \x02(\x0e\x32\x64.EventStore.Client.Messages.UpdatePersistentSubscriptionCompleted.UpdatePersistentSubscriptionResult:\x07Success\x12\x0e\n\x06reason\x18\x02 \x01(\t\"_\n\"UpdatePersistentSubscriptionResult\x12\x0b\n\x07Success\x10\x00\x12\x10\n\x0c\x44oesNotExist\x10\x01\x12\x08\n\x04\x46\x61il\x10\x02\x12\x10\n\x0c\x41\x63\x63\x65ssDenied\x10\x03\"\x98\x02\n%CreatePersistentSubscriptionCompleted\x12}\n\x06result\x18\x01 \x02(\x0e\x32\x64.EventStore.Client.Messages.CreatePersistentSubscriptionCompleted.CreatePersistentSubscriptionResult:\x07Success\x12\x0e\n\x06reason\x18\x02 \x01(\t\"`\n\"CreatePersistentSubscriptionResult\x12\x0b\n\x07Success\x10\x00\x12\x11\n\rAlreadyExists\x10\x01\x12\x08\n\x04\x46\x61il\x10\x02\x12\x10\n\x0c\x41\x63\x63\x65ssDenied\x10\x03\"\x97\x02\n%DeletePersistentSubscriptionCompleted\x12}\n\x06result\x18\x01 \x02(\x0e\x32\x64.EventStore.Client.Messages.DeletePersistentSubscriptionCompleted.DeletePersistentSubscriptionResult:\x07Success\x12\x0e\n\x06reason\x18\x02 \x01(\t\"_\n\"DeletePersistentSubscriptionResult\x12\x0b\n\x07Success\x10\x00\x12\x10\n\x0c\x44oesNotExist\x10\x01\x12\x08\n\x04\x46\x61il\x10\x02\x12\x10\n\x0c\x41\x63\x63\x65ssDenied\x10\x03\"w\n\x1f\x43onnectToPersistentSubscription\x12\x17\n\x0fsubscription_id\x18\x01 \x02(\t\x12\x17\n\x0f\x65vent_stream_id\x18\x02 \x02(\t\x12\"\n\x1a\x61llowed_in_flight_messages\x18\x03 \x02(\x05\"W\n\x1fPersistentSubscriptionAckEvents\x12\x17\n\x0fsubscription_id\x18\x01 \x02(\t\x12\x1b\n\x13processed_event_ids\x18\x02 \x03(\x0c\"\x8b\x02\n\x1fPersistentSubscriptionNakEvents\x12\x17\n\x0fsubscription_id\x18\x01 \x02(\t\x12\x1b\n\x13processed_event_ids\x18\x02 \x03(\x0c\x12\x0f\n\x07message\x18\x03 \x01(\t\x12^\n\x06\x61\x63tion\x18\x04 \x02(\x0e\x32\x45.EventStore.Client.Messages.PersistentSubscriptionNakEvents.NakAction:\x07Unknown\"A\n\tNakAction\x12\x0b\n\x07Unknown\x10\x00\x12\x08\n\x04Park\x10\x01\x12\t\n\x05Retry\x10\x02\x12\x08\n\x04Skip\x10\x03\x12\x08\n\x04Stop\x10\x04\"v\n\"PersistentSubscriptionConfirmation\x12\x1c\n\x14last_commit_position\x18\x01 \x02(\x03\x12\x17\n\x0fsubscription_id\x18\x02 \x02(\t\x12\x19\n\x11last_event_number\x18\x03 \x01(\x03\"\x80\x01\n)PersistentSubscriptionStreamEventAppeared\x12?\n\x05\x65vent\x18\x01 \x02(\x0b\x32\x30.EventStore.Client.Messages.ResolvedIndexedEvent\x12\x12\n\nretryCount\x18\x02 \x01(\x05\"F\n\x11SubscribeToStream\x12\x17\n\x0f\x65vent_stream_id\x18\x01 \x02(\t\x12\x18\n\x10resolve_link_tos\x18\x02 \x02(\x08\"\x9f\x01\n\x19\x46ilteredSubscribeToStream\x12\x17\n\x0f\x65vent_stream_id\x18\x01 \x02(\t\x12\x18\n\x10resolve_link_tos\x18\x02 \x02(\x08\x12\x32\n\x06\x66ilter\x18\x03 \x02(\x0b\x32\".EventStore.Client.Messages.Filter\x12\x1b\n\x13\x63heckpoint_interval\x18\x04 \x02(\x05\"F\n\x11\x43heckpointReached\x12\x17\n\x0f\x63ommit_position\x18\x01 \x02(\x03\x12\x18\n\x10prepare_position\x18\x02 \x02(\x03\"S\n\x18SubscriptionConfirmation\x12\x1c\n\x14last_commit_position\x18\x01 \x02(\x03\x12\x19\n\x11last_event_number\x18\x02 \x01(\x03\"O\n\x13StreamEventAppeared\x12\x38\n\x05\x65vent\x18\x01 \x02(\x0b\x32).EventStore.Client.Messages.ResolvedEvent\"\x17\n\x15UnsubscribeFromStream\"\x8a\x02\n\x13SubscriptionDropped\x12\x64\n\x06reason\x18\x01 \x01(\x0e\x32\x46.EventStore.Client.Messages.SubscriptionDropped.SubscriptionDropReason:\x0cUnsubscribed\"\x8c\x01\n\x16SubscriptionDropReason\x12\x10\n\x0cUnsubscribed\x10\x00\x12\x10\n\x0c\x41\x63\x63\x65ssDenied\x10\x01\x12\x0c\n\x08NotFound\x10\x02\x12!\n\x1dPersistentSubscriptionDeleted\x10\x03\x12\x1d\n\x19SubscriberMaxCountReached\x10\x04\"\xf4\x02\n\nNotHandled\x12G\n\x06reason\x18\x01 \x02(\x0e\x32\x37.EventStore.Client.Messages.NotHandled.NotHandledReason\x12\x17\n\x0f\x61\x64\x64itional_info\x18\x02 \x01(\x0c\x1a\xb5\x01\n\nLeaderInfo\x12\x1c\n\x14\x65xternal_tcp_address\x18\x01 \x02(\t\x12\x19\n\x11\x65xternal_tcp_port\x18\x02 \x02(\x05\x12\x14\n\x0chttp_address\x18\x03 \x02(\t\x12\x11\n\thttp_port\x18\x04 \x02(\x05\x12#\n\x1b\x65xternal_secure_tcp_address\x18\x05 \x01(\t\x12 \n\x18\x65xternal_secure_tcp_port\x18\x06 \x01(\x05\"L\n\x10NotHandledReason\x12\x0c\n\x08NotReady\x10\x00\x12\x0b\n\x07TooBusy\x10\x01\x12\r\n\tNotLeader\x10\x02\x12\x0e\n\nIsReadOnly\x10\x03\"\x12\n\x10ScavengeDatabase\"\xc4\x01\n\x18ScavengeDatabaseResponse\x12S\n\x06result\x18\x01 \x02(\x0e\x32\x43.EventStore.Client.Messages.ScavengeDatabaseResponse.ScavengeResult\x12\x12\n\nscavengeId\x18\x02 \x01(\t\"?\n\x0eScavengeResult\x12\x0b\n\x07Started\x10\x00\x12\x0e\n\nInProgress\x10\x01\x12\x10\n\x0cUnauthorized\x10\x02\":\n\x0eIdentifyClient\x12\x0f\n\x07version\x18\x01 \x02(\x05\x12\x17\n\x0f\x63onnection_name\x18\x02 \x01(\t\"\x12\n\x10\x43lientIdentified*\xb0\x01\n\x0fOperationResult\x12\x0b\n\x07Success\x10\x00\x12\x12\n\x0ePrepareTimeout\x10\x01\x12\x11\n\rCommitTimeout\x10\x02\x12\x12\n\x0e\x46orwardTimeout\x10\x03\x12\x18\n\x14WrongExpectedVersion\x10\x04\x12\x11\n\rStreamDeleted\x10\x05\x12\x16\n\x12InvalidTransaction\x10\x06\x12\x10\n\x0c\x41\x63\x63\x65ssDenied\x10\x07' ) _OPERATIONRESULT = _descriptor.EnumDescriptor( name='OperationResult', full_name='EventStore.Client.Messages.OperationResult', filename=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key, values=[ _descriptor.EnumValueDescriptor( name='Success', index=0, number=0, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='PrepareTimeout', index=1, number=1, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='CommitTimeout', index=2, number=2, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='ForwardTimeout', index=3, number=3, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='WrongExpectedVersion', index=4, number=4, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='StreamDeleted', index=5, number=5, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='InvalidTransaction', index=6, number=6, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='AccessDenied', index=7, number=7, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), ], containing_type=None, serialized_options=None, serialized_start=8630, serialized_end=8806, ) _sym_db.RegisterEnumDescriptor(_OPERATIONRESULT) OperationResult = enum_type_wrapper.EnumTypeWrapper(_OPERATIONRESULT) Success = 0 PrepareTimeout = 1 CommitTimeout = 2 ForwardTimeout = 3 WrongExpectedVersion = 4 StreamDeleted = 5 InvalidTransaction = 6 AccessDenied = 7 _READEVENTCOMPLETED_READEVENTRESULT = _descriptor.EnumDescriptor( name='ReadEventResult', full_name='EventStore.Client.Messages.ReadEventCompleted.ReadEventResult', filename=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key, values=[ _descriptor.EnumValueDescriptor( name='Success', index=0, number=0, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='NotFound', index=1, number=1, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='NoStream', index=2, number=2, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='StreamDeleted', index=3, number=3, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='Error', index=4, number=4, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='AccessDenied', index=5, number=5, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), ], containing_type=None, serialized_options=None, serialized_start=2471, serialized_end=2577, ) _sym_db.RegisterEnumDescriptor(_READEVENTCOMPLETED_READEVENTRESULT) _READSTREAMEVENTSCOMPLETED_READSTREAMRESULT = _descriptor.EnumDescriptor( name='ReadStreamResult', full_name='EventStore.Client.Messages.ReadStreamEventsCompleted.ReadStreamResult', filename=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key, values=[ _descriptor.EnumValueDescriptor( name='Success', index=0, number=0, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='NoStream', index=1, number=1, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='StreamDeleted', index=2, number=2, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='NotModified', index=3, number=3, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='Error', index=4, number=4, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='AccessDenied', index=5, number=5, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), ], containing_type=None, serialized_options=None, serialized_start=3030, serialized_end=3140, ) _sym_db.RegisterEnumDescriptor(_READSTREAMEVENTSCOMPLETED_READSTREAMRESULT) _READALLEVENTSCOMPLETED_READALLRESULT = _descriptor.EnumDescriptor( name='ReadAllResult', full_name='EventStore.Client.Messages.ReadAllEventsCompleted.ReadAllResult', filename=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key, values=[ _descriptor.EnumValueDescriptor( name='Success', index=0, number=0, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='NotModified', index=1, number=1, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='Error', index=2, number=2, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='AccessDenied', index=3, number=3, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), ], containing_type=None, serialized_options=None, serialized_start=3584, serialized_end=3658, ) _sym_db.RegisterEnumDescriptor(_READALLEVENTSCOMPLETED_READALLRESULT) _FILTER_FILTERCONTEXT = _descriptor.EnumDescriptor( name='FilterContext', full_name='EventStore.Client.Messages.Filter.FilterContext', filename=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key, values=[ _descriptor.EnumValueDescriptor( name='StreamId', index=0, number=0, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='EventType', index=1, number=1, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), ], containing_type=None, serialized_options=None, serialized_start=3813, serialized_end=3857, ) _sym_db.RegisterEnumDescriptor(_FILTER_FILTERCONTEXT) _FILTER_FILTERTYPE = _descriptor.EnumDescriptor( name='FilterType', full_name='EventStore.Client.Messages.Filter.FilterType', filename=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key, values=[ _descriptor.EnumValueDescriptor( name='Regex', index=0, number=0, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='Prefix', index=1, number=1, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), ], containing_type=None, serialized_options=None, serialized_start=3859, serialized_end=3894, ) _sym_db.RegisterEnumDescriptor(_FILTER_FILTERTYPE) _FILTEREDREADALLEVENTSCOMPLETED_FILTEREDREADALLRESULT = _descriptor.EnumDescriptor( name='FilteredReadAllResult', full_name='EventStore.Client.Messages.FilteredReadAllEventsCompleted.FilteredReadAllResult', filename=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key, values=[ _descriptor.EnumValueDescriptor( name='Success', index=0, number=0, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='NotModified', index=1, number=1, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='Error', index=2, number=2, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='AccessDenied', index=3, number=3, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), ], containing_type=None, serialized_options=None, serialized_start=4475, serialized_end=4557, ) _sym_db.RegisterEnumDescriptor(_FILTEREDREADALLEVENTSCOMPLETED_FILTEREDREADALLRESULT) _UPDATEPERSISTENTSUBSCRIPTIONCOMPLETED_UPDATEPERSISTENTSUBSCRIPTIONRESULT = _descriptor.EnumDescriptor( name='UpdatePersistentSubscriptionResult', full_name='EventStore.Client.Messages.UpdatePersistentSubscriptionCompleted.UpdatePersistentSubscriptionResult', filename=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key, values=[ _descriptor.EnumValueDescriptor( name='Success', index=0, number=0, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='DoesNotExist', index=1, number=1, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='Fail', index=2, number=2, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='AccessDenied', index=3, number=3, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), ], containing_type=None, serialized_options=None, serialized_start=5796, serialized_end=5891, ) _sym_db.RegisterEnumDescriptor(_UPDATEPERSISTENTSUBSCRIPTIONCOMPLETED_UPDATEPERSISTENTSUBSCRIPTIONRESULT) _CREATEPERSISTENTSUBSCRIPTIONCOMPLETED_CREATEPERSISTENTSUBSCRIPTIONRESULT = _descriptor.EnumDescriptor( name='CreatePersistentSubscriptionResult', full_name='EventStore.Client.Messages.CreatePersistentSubscriptionCompleted.CreatePersistentSubscriptionResult', filename=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key, values=[ _descriptor.EnumValueDescriptor( name='Success', index=0, number=0, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='AlreadyExists', index=1, number=1, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='Fail', index=2, number=2, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='AccessDenied', index=3, number=3, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), ], containing_type=None, serialized_options=None, serialized_start=6078, serialized_end=6174, ) _sym_db.RegisterEnumDescriptor(_CREATEPERSISTENTSUBSCRIPTIONCOMPLETED_CREATEPERSISTENTSUBSCRIPTIONRESULT) _DELETEPERSISTENTSUBSCRIPTIONCOMPLETED_DELETEPERSISTENTSUBSCRIPTIONRESULT = _descriptor.EnumDescriptor( name='DeletePersistentSubscriptionResult', full_name='EventStore.Client.Messages.DeletePersistentSubscriptionCompleted.DeletePersistentSubscriptionResult', filename=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key, values=[ _descriptor.EnumValueDescriptor( name='Success', index=0, number=0, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='DoesNotExist', index=1, number=1, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='Fail', index=2, number=2, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='AccessDenied', index=3, number=3, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), ], containing_type=None, serialized_options=None, serialized_start=6361, serialized_end=6456, ) _sym_db.RegisterEnumDescriptor(_DELETEPERSISTENTSUBSCRIPTIONCOMPLETED_DELETEPERSISTENTSUBSCRIPTIONRESULT) _PERSISTENTSUBSCRIPTIONNAKEVENTS_NAKACTION = _descriptor.EnumDescriptor( name='NakAction', full_name='EventStore.Client.Messages.PersistentSubscriptionNakEvents.NakAction', filename=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key, values=[ _descriptor.EnumValueDescriptor( name='Unknown', index=0, number=0, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='Park', index=1, number=1, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='Retry', index=2, number=2, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='Skip', index=3, number=3, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='Stop', index=4, number=4, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), ], containing_type=None, serialized_options=None, serialized_start=6871, serialized_end=6936, ) _sym_db.RegisterEnumDescriptor(_PERSISTENTSUBSCRIPTIONNAKEVENTS_NAKACTION) _SUBSCRIPTIONDROPPED_SUBSCRIPTIONDROPREASON = _descriptor.EnumDescriptor( name='SubscriptionDropReason', full_name='EventStore.Client.Messages.SubscriptionDropped.SubscriptionDropReason', filename=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key, values=[ _descriptor.EnumValueDescriptor( name='Unsubscribed', index=0, number=0, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='AccessDenied', index=1, number=1, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='NotFound', index=2, number=2, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='PersistentSubscriptionDeleted', index=3, number=3, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='SubscriberMaxCountReached', index=4, number=4, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), ], containing_type=None, serialized_options=None, serialized_start=7813, serialized_end=7953, ) _sym_db.RegisterEnumDescriptor(_SUBSCRIPTIONDROPPED_SUBSCRIPTIONDROPREASON) _NOTHANDLED_NOTHANDLEDREASON = _descriptor.EnumDescriptor( name='NotHandledReason', full_name='EventStore.Client.Messages.NotHandled.NotHandledReason', filename=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key, values=[ _descriptor.EnumValueDescriptor( name='NotReady', index=0, number=0, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='TooBusy', index=1, number=1, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='NotLeader', index=2, number=2, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='IsReadOnly', index=3, number=3, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), ], containing_type=None, serialized_options=None, serialized_start=8252, serialized_end=8328, ) _sym_db.RegisterEnumDescriptor(_NOTHANDLED_NOTHANDLEDREASON) _SCAVENGEDATABASERESPONSE_SCAVENGERESULT = _descriptor.EnumDescriptor( name='ScavengeResult', full_name='EventStore.Client.Messages.ScavengeDatabaseResponse.ScavengeResult', filename=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key, values=[ _descriptor.EnumValueDescriptor( name='Started', index=0, number=0, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='InProgress', index=1, number=1, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='Unauthorized', index=2, number=2, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), ], containing_type=None, serialized_options=None, serialized_start=8484, serialized_end=8547, ) _sym_db.RegisterEnumDescriptor(_SCAVENGEDATABASERESPONSE_SCAVENGERESULT) _NEWEVENT = _descriptor.Descriptor( name='NewEvent', full_name='EventStore.Client.Messages.NewEvent', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='event_id', full_name='EventStore.Client.Messages.NewEvent.event_id', index=0, number=1, type=12, cpp_type=9, label=2, has_default_value=False, default_value=b"", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='event_type', full_name='EventStore.Client.Messages.NewEvent.event_type', index=1, number=2, type=9, cpp_type=9, label=2, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='data_content_type', full_name='EventStore.Client.Messages.NewEvent.data_content_type', index=2, number=3, type=5, cpp_type=1, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='metadata_content_type', full_name='EventStore.Client.Messages.NewEvent.metadata_content_type', index=3, number=4, type=5, cpp_type=1, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='data', full_name='EventStore.Client.Messages.NewEvent.data', index=4, number=5, type=12, cpp_type=9, label=2, has_default_value=False, default_value=b"", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='metadata', full_name='EventStore.Client.Messages.NewEvent.metadata', index=5, number=6, type=12, cpp_type=9, label=1, has_default_value=False, default_value=b"", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=56, serialized_end=194, ) _EVENTRECORD = _descriptor.Descriptor( name='EventRecord', full_name='EventStore.Client.Messages.EventRecord', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='event_stream_id', full_name='EventStore.Client.Messages.EventRecord.event_stream_id', index=0, number=1, type=9, cpp_type=9, label=2, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='event_number', full_name='EventStore.Client.Messages.EventRecord.event_number', index=1, number=2, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='event_id', full_name='EventStore.Client.Messages.EventRecord.event_id', index=2, number=3, type=12, cpp_type=9, label=2, has_default_value=False, default_value=b"", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='event_type', full_name='EventStore.Client.Messages.EventRecord.event_type', index=3, number=4, type=9, cpp_type=9, label=2, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='data_content_type', full_name='EventStore.Client.Messages.EventRecord.data_content_type', index=4, number=5, type=5, cpp_type=1, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='metadata_content_type', full_name='EventStore.Client.Messages.EventRecord.metadata_content_type', index=5, number=6, type=5, cpp_type=1, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='data', full_name='EventStore.Client.Messages.EventRecord.data', index=6, number=7, type=12, cpp_type=9, label=2, has_default_value=False, default_value=b"", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='metadata', full_name='EventStore.Client.Messages.EventRecord.metadata', index=7, number=8, type=12, cpp_type=9, label=1, has_default_value=False, default_value=b"", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='created', full_name='EventStore.Client.Messages.EventRecord.created', index=8, number=9, type=3, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='created_epoch', full_name='EventStore.Client.Messages.EventRecord.created_epoch', index=9, number=10, type=3, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=197, serialized_end=425, ) _RESOLVEDINDEXEDEVENT = _descriptor.Descriptor( name='ResolvedIndexedEvent', full_name='EventStore.Client.Messages.ResolvedIndexedEvent', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='event', full_name='EventStore.Client.Messages.ResolvedIndexedEvent.event', index=0, number=1, type=11, cpp_type=10, label=2, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='link', full_name='EventStore.Client.Messages.ResolvedIndexedEvent.link', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=428, serialized_end=561, ) _RESOLVEDEVENT = _descriptor.Descriptor( name='ResolvedEvent', full_name='EventStore.Client.Messages.ResolvedEvent', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='event', full_name='EventStore.Client.Messages.ResolvedEvent.event', index=0, number=1, type=11, cpp_type=10, label=2, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='link', full_name='EventStore.Client.Messages.ResolvedEvent.link', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='commit_position', full_name='EventStore.Client.Messages.ResolvedEvent.commit_position', index=2, number=3, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='prepare_position', full_name='EventStore.Client.Messages.ResolvedEvent.prepare_position', index=3, number=4, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=564, serialized_end=741, ) _WRITEEVENTS = _descriptor.Descriptor( name='WriteEvents', full_name='EventStore.Client.Messages.WriteEvents', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='event_stream_id', full_name='EventStore.Client.Messages.WriteEvents.event_stream_id', index=0, number=1, type=9, cpp_type=9, label=2, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='expected_version', full_name='EventStore.Client.Messages.WriteEvents.expected_version', index=1, number=2, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='events', full_name='EventStore.Client.Messages.WriteEvents.events', index=2, number=3, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='require_leader', full_name='EventStore.Client.Messages.WriteEvents.require_leader', index=3, number=4, type=8, cpp_type=7, label=2, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=744, serialized_end=886, ) _WRITEEVENTSCOMPLETED = _descriptor.Descriptor( name='WriteEventsCompleted', full_name='EventStore.Client.Messages.WriteEventsCompleted', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='result', full_name='EventStore.Client.Messages.WriteEventsCompleted.result', index=0, number=1, type=14, cpp_type=8, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='message', full_name='EventStore.Client.Messages.WriteEventsCompleted.message', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='first_event_number', full_name='EventStore.Client.Messages.WriteEventsCompleted.first_event_number', index=2, number=3, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='last_event_number', full_name='EventStore.Client.Messages.WriteEventsCompleted.last_event_number', index=3, number=4, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='prepare_position', full_name='EventStore.Client.Messages.WriteEventsCompleted.prepare_position', index=4, number=5, type=3, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='commit_position', full_name='EventStore.Client.Messages.WriteEventsCompleted.commit_position', index=5, number=6, type=3, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='current_version', full_name='EventStore.Client.Messages.WriteEventsCompleted.current_version', index=6, number=7, type=3, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=889, serialized_end=1120, ) _DELETESTREAM = _descriptor.Descriptor( name='DeleteStream', full_name='EventStore.Client.Messages.DeleteStream', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='event_stream_id', full_name='EventStore.Client.Messages.DeleteStream.event_stream_id', index=0, number=1, type=9, cpp_type=9, label=2, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='expected_version', full_name='EventStore.Client.Messages.DeleteStream.expected_version', index=1, number=2, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='require_leader', full_name='EventStore.Client.Messages.DeleteStream.require_leader', index=2, number=3, type=8, cpp_type=7, label=2, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='hard_delete', full_name='EventStore.Client.Messages.DeleteStream.hard_delete', index=3, number=4, type=8, cpp_type=7, label=1, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=1122, serialized_end=1232, ) _DELETESTREAMCOMPLETED = _descriptor.Descriptor( name='DeleteStreamCompleted', full_name='EventStore.Client.Messages.DeleteStreamCompleted', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='result', full_name='EventStore.Client.Messages.DeleteStreamCompleted.result', index=0, number=1, type=14, cpp_type=8, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='message', full_name='EventStore.Client.Messages.DeleteStreamCompleted.message', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='prepare_position', full_name='EventStore.Client.Messages.DeleteStreamCompleted.prepare_position', index=2, number=3, type=3, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='commit_position', full_name='EventStore.Client.Messages.DeleteStreamCompleted.commit_position', index=3, number=4, type=3, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=1235, serialized_end=1387, ) _TRANSACTIONSTART = _descriptor.Descriptor( name='TransactionStart', full_name='EventStore.Client.Messages.TransactionStart', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='event_stream_id', full_name='EventStore.Client.Messages.TransactionStart.event_stream_id', index=0, number=1, type=9, cpp_type=9, label=2, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='expected_version', full_name='EventStore.Client.Messages.TransactionStart.expected_version', index=1, number=2, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='require_leader', full_name='EventStore.Client.Messages.TransactionStart.require_leader', index=2, number=3, type=8, cpp_type=7, label=2, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=1389, serialized_end=1482, ) _TRANSACTIONSTARTCOMPLETED = _descriptor.Descriptor( name='TransactionStartCompleted', full_name='EventStore.Client.Messages.TransactionStartCompleted', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='transaction_id', full_name='EventStore.Client.Messages.TransactionStartCompleted.transaction_id', index=0, number=1, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='result', full_name='EventStore.Client.Messages.TransactionStartCompleted.result', index=1, number=2, type=14, cpp_type=8, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='message', full_name='EventStore.Client.Messages.TransactionStartCompleted.message', index=2, number=3, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=1485, serialized_end=1614, ) _TRANSACTIONWRITE = _descriptor.Descriptor( name='TransactionWrite', full_name='EventStore.Client.Messages.TransactionWrite', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='transaction_id', full_name='EventStore.Client.Messages.TransactionWrite.transaction_id', index=0, number=1, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='events', full_name='EventStore.Client.Messages.TransactionWrite.events', index=1, number=2, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='require_leader', full_name='EventStore.Client.Messages.TransactionWrite.require_leader', index=2, number=3, type=8, cpp_type=7, label=2, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=1616, serialized_end=1736, ) _TRANSACTIONWRITECOMPLETED = _descriptor.Descriptor( name='TransactionWriteCompleted', full_name='EventStore.Client.Messages.TransactionWriteCompleted', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='transaction_id', full_name='EventStore.Client.Messages.TransactionWriteCompleted.transaction_id', index=0, number=1, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='result', full_name='EventStore.Client.Messages.TransactionWriteCompleted.result', index=1, number=2, type=14, cpp_type=8, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='message', full_name='EventStore.Client.Messages.TransactionWriteCompleted.message', index=2, number=3, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=1739, serialized_end=1868, ) _TRANSACTIONCOMMIT = _descriptor.Descriptor( name='TransactionCommit', full_name='EventStore.Client.Messages.TransactionCommit', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='transaction_id', full_name='EventStore.Client.Messages.TransactionCommit.transaction_id', index=0, number=1, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='require_leader', full_name='EventStore.Client.Messages.TransactionCommit.require_leader', index=1, number=2, type=8, cpp_type=7, label=2, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=1870, serialized_end=1937, ) _TRANSACTIONCOMMITCOMPLETED = _descriptor.Descriptor( name='TransactionCommitCompleted', full_name='EventStore.Client.Messages.TransactionCommitCompleted', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='transaction_id', full_name='EventStore.Client.Messages.TransactionCommitCompleted.transaction_id', index=0, number=1, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='result', full_name='EventStore.Client.Messages.TransactionCommitCompleted.result', index=1, number=2, type=14, cpp_type=8, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='message', full_name='EventStore.Client.Messages.TransactionCommitCompleted.message', index=2, number=3, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='first_event_number', full_name='EventStore.Client.Messages.TransactionCommitCompleted.first_event_number', index=3, number=4, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='last_event_number', full_name='EventStore.Client.Messages.TransactionCommitCompleted.last_event_number', index=4, number=5, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='prepare_position', full_name='EventStore.Client.Messages.TransactionCommitCompleted.prepare_position', index=5, number=6, type=3, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='commit_position', full_name='EventStore.Client.Messages.TransactionCommitCompleted.commit_position', index=6, number=7, type=3, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=1940, serialized_end=2176, ) _READEVENT = _descriptor.Descriptor( name='ReadEvent', full_name='EventStore.Client.Messages.ReadEvent', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='event_stream_id', full_name='EventStore.Client.Messages.ReadEvent.event_stream_id', index=0, number=1, type=9, cpp_type=9, label=2, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='event_number', full_name='EventStore.Client.Messages.ReadEvent.event_number', index=1, number=2, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='resolve_link_tos', full_name='EventStore.Client.Messages.ReadEvent.resolve_link_tos', index=2, number=3, type=8, cpp_type=7, label=2, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='require_leader', full_name='EventStore.Client.Messages.ReadEvent.require_leader', index=3, number=4, type=8, cpp_type=7, label=2, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=2178, serialized_end=2286, ) _READEVENTCOMPLETED = _descriptor.Descriptor( name='ReadEventCompleted', full_name='EventStore.Client.Messages.ReadEventCompleted', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='result', full_name='EventStore.Client.Messages.ReadEventCompleted.result', index=0, number=1, type=14, cpp_type=8, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='event', full_name='EventStore.Client.Messages.ReadEventCompleted.event', index=1, number=2, type=11, cpp_type=10, label=2, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='error', full_name='EventStore.Client.Messages.ReadEventCompleted.error', index=2, number=3, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ _READEVENTCOMPLETED_READEVENTRESULT, ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=2289, serialized_end=2577, ) _READSTREAMEVENTS = _descriptor.Descriptor( name='ReadStreamEvents', full_name='EventStore.Client.Messages.ReadStreamEvents', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='event_stream_id', full_name='EventStore.Client.Messages.ReadStreamEvents.event_stream_id', index=0, number=1, type=9, cpp_type=9, label=2, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='from_event_number', full_name='EventStore.Client.Messages.ReadStreamEvents.from_event_number', index=1, number=2, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='max_count', full_name='EventStore.Client.Messages.ReadStreamEvents.max_count', index=2, number=3, type=5, cpp_type=1, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='resolve_link_tos', full_name='EventStore.Client.Messages.ReadStreamEvents.resolve_link_tos', index=3, number=4, type=8, cpp_type=7, label=2, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='require_leader', full_name='EventStore.Client.Messages.ReadStreamEvents.require_leader', index=4, number=5, type=8, cpp_type=7, label=2, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=2580, serialized_end=2719, ) _READSTREAMEVENTSCOMPLETED = _descriptor.Descriptor( name='ReadStreamEventsCompleted', full_name='EventStore.Client.Messages.ReadStreamEventsCompleted', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='events', full_name='EventStore.Client.Messages.ReadStreamEventsCompleted.events', index=0, number=1, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='result', full_name='EventStore.Client.Messages.ReadStreamEventsCompleted.result', index=1, number=2, type=14, cpp_type=8, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='next_event_number', full_name='EventStore.Client.Messages.ReadStreamEventsCompleted.next_event_number', index=2, number=3, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='last_event_number', full_name='EventStore.Client.Messages.ReadStreamEventsCompleted.last_event_number', index=3, number=4, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='is_end_of_stream', full_name='EventStore.Client.Messages.ReadStreamEventsCompleted.is_end_of_stream', index=4, number=5, type=8, cpp_type=7, label=2, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='last_commit_position', full_name='EventStore.Client.Messages.ReadStreamEventsCompleted.last_commit_position', index=5, number=6, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='error', full_name='EventStore.Client.Messages.ReadStreamEventsCompleted.error', index=6, number=7, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ _READSTREAMEVENTSCOMPLETED_READSTREAMRESULT, ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=2722, serialized_end=3140, ) _READALLEVENTS = _descriptor.Descriptor( name='ReadAllEvents', full_name='EventStore.Client.Messages.ReadAllEvents', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='commit_position', full_name='EventStore.Client.Messages.ReadAllEvents.commit_position', index=0, number=1, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='prepare_position', full_name='EventStore.Client.Messages.ReadAllEvents.prepare_position', index=1, number=2, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='max_count', full_name='EventStore.Client.Messages.ReadAllEvents.max_count', index=2, number=3, type=5, cpp_type=1, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='resolve_link_tos', full_name='EventStore.Client.Messages.ReadAllEvents.resolve_link_tos', index=3, number=4, type=8, cpp_type=7, label=2, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='require_leader', full_name='EventStore.Client.Messages.ReadAllEvents.require_leader', index=4, number=5, type=8, cpp_type=7, label=2, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=3143, serialized_end=3278, ) _READALLEVENTSCOMPLETED = _descriptor.Descriptor( name='ReadAllEventsCompleted', full_name='EventStore.Client.Messages.ReadAllEventsCompleted', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='commit_position', full_name='EventStore.Client.Messages.ReadAllEventsCompleted.commit_position', index=0, number=1, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='prepare_position', full_name='EventStore.Client.Messages.ReadAllEventsCompleted.prepare_position', index=1, number=2, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='events', full_name='EventStore.Client.Messages.ReadAllEventsCompleted.events', index=2, number=3, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='next_commit_position', full_name='EventStore.Client.Messages.ReadAllEventsCompleted.next_commit_position', index=3, number=4, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='next_prepare_position', full_name='EventStore.Client.Messages.ReadAllEventsCompleted.next_prepare_position', index=4, number=5, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='result', full_name='EventStore.Client.Messages.ReadAllEventsCompleted.result', index=5, number=6, type=14, cpp_type=8, label=1, has_default_value=True, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='error', full_name='EventStore.Client.Messages.ReadAllEventsCompleted.error', index=6, number=7, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ _READALLEVENTSCOMPLETED_READALLRESULT, ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=3281, serialized_end=3658, ) _FILTER = _descriptor.Descriptor( name='Filter', full_name='EventStore.Client.Messages.Filter', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='context', full_name='EventStore.Client.Messages.Filter.context', index=0, number=1, type=14, cpp_type=8, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='type', full_name='EventStore.Client.Messages.Filter.type', index=1, number=2, type=14, cpp_type=8, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='data', full_name='EventStore.Client.Messages.Filter.data', index=2, number=3, type=9, cpp_type=9, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ _FILTER_FILTERCONTEXT, _FILTER_FILTERTYPE, ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=3661, serialized_end=3894, ) _FILTEREDREADALLEVENTS = _descriptor.Descriptor( name='FilteredReadAllEvents', full_name='EventStore.Client.Messages.FilteredReadAllEvents', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='commit_position', full_name='EventStore.Client.Messages.FilteredReadAllEvents.commit_position', index=0, number=1, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='prepare_position', full_name='EventStore.Client.Messages.FilteredReadAllEvents.prepare_position', index=1, number=2, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='max_count', full_name='EventStore.Client.Messages.FilteredReadAllEvents.max_count', index=2, number=3, type=5, cpp_type=1, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='max_search_window', full_name='EventStore.Client.Messages.FilteredReadAllEvents.max_search_window', index=3, number=4, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='resolve_link_tos', full_name='EventStore.Client.Messages.FilteredReadAllEvents.resolve_link_tos', index=4, number=5, type=8, cpp_type=7, label=2, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='require_leader', full_name='EventStore.Client.Messages.FilteredReadAllEvents.require_leader', index=5, number=6, type=8, cpp_type=7, label=2, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='filter', full_name='EventStore.Client.Messages.FilteredReadAllEvents.filter', index=6, number=7, type=11, cpp_type=10, label=2, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=3897, serialized_end=4119, ) _FILTEREDREADALLEVENTSCOMPLETED = _descriptor.Descriptor( name='FilteredReadAllEventsCompleted', full_name='EventStore.Client.Messages.FilteredReadAllEventsCompleted', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='commit_position', full_name='EventStore.Client.Messages.FilteredReadAllEventsCompleted.commit_position', index=0, number=1, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='prepare_position', full_name='EventStore.Client.Messages.FilteredReadAllEventsCompleted.prepare_position', index=1, number=2, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='events', full_name='EventStore.Client.Messages.FilteredReadAllEventsCompleted.events', index=2, number=3, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='next_commit_position', full_name='EventStore.Client.Messages.FilteredReadAllEventsCompleted.next_commit_position', index=3, number=4, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='next_prepare_position', full_name='EventStore.Client.Messages.FilteredReadAllEventsCompleted.next_prepare_position', index=4, number=5, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='is_end_of_stream', full_name='EventStore.Client.Messages.FilteredReadAllEventsCompleted.is_end_of_stream', index=5, number=6, type=8, cpp_type=7, label=2, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='result', full_name='EventStore.Client.Messages.FilteredReadAllEventsCompleted.result', index=6, number=7, type=14, cpp_type=8, label=1, has_default_value=True, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='error', full_name='EventStore.Client.Messages.FilteredReadAllEventsCompleted.error', index=7, number=8, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ _FILTEREDREADALLEVENTSCOMPLETED_FILTEREDREADALLRESULT, ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=4122, serialized_end=4557, ) _CREATEPERSISTENTSUBSCRIPTION = _descriptor.Descriptor( name='CreatePersistentSubscription', full_name='EventStore.Client.Messages.CreatePersistentSubscription', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='subscription_group_name', full_name='EventStore.Client.Messages.CreatePersistentSubscription.subscription_group_name', index=0, number=1, type=9, cpp_type=9, label=2, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='event_stream_id', full_name='EventStore.Client.Messages.CreatePersistentSubscription.event_stream_id', index=1, number=2, type=9, cpp_type=9, label=2, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='resolve_link_tos', full_name='EventStore.Client.Messages.CreatePersistentSubscription.resolve_link_tos', index=2, number=3, type=8, cpp_type=7, label=2, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='start_from', full_name='EventStore.Client.Messages.CreatePersistentSubscription.start_from', index=3, number=4, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='message_timeout_milliseconds', full_name='EventStore.Client.Messages.CreatePersistentSubscription.message_timeout_milliseconds', index=4, number=5, type=5, cpp_type=1, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='record_statistics', full_name='EventStore.Client.Messages.CreatePersistentSubscription.record_statistics', index=5, number=6, type=8, cpp_type=7, label=2, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='live_buffer_size', full_name='EventStore.Client.Messages.CreatePersistentSubscription.live_buffer_size', index=6, number=7, type=5, cpp_type=1, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='read_batch_size', full_name='EventStore.Client.Messages.CreatePersistentSubscription.read_batch_size', index=7, number=8, type=5, cpp_type=1, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='buffer_size', full_name='EventStore.Client.Messages.CreatePersistentSubscription.buffer_size', index=8, number=9, type=5, cpp_type=1, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='max_retry_count', full_name='EventStore.Client.Messages.CreatePersistentSubscription.max_retry_count', index=9, number=10, type=5, cpp_type=1, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='prefer_round_robin', full_name='EventStore.Client.Messages.CreatePersistentSubscription.prefer_round_robin', index=10, number=11, type=8, cpp_type=7, label=2, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='checkpoint_after_time', full_name='EventStore.Client.Messages.CreatePersistentSubscription.checkpoint_after_time', index=11, number=12, type=5, cpp_type=1, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='checkpoint_max_count', full_name='EventStore.Client.Messages.CreatePersistentSubscription.checkpoint_max_count', index=12, number=13, type=5, cpp_type=1, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='checkpoint_min_count', full_name='EventStore.Client.Messages.CreatePersistentSubscription.checkpoint_min_count', index=13, number=14, type=5, cpp_type=1, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='subscriber_max_count', full_name='EventStore.Client.Messages.CreatePersistentSubscription.subscriber_max_count', index=14, number=15, type=5, cpp_type=1, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='named_consumer_strategy', full_name='EventStore.Client.Messages.CreatePersistentSubscription.named_consumer_strategy', index=15, number=16, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=4560, serialized_end=5038, ) _DELETEPERSISTENTSUBSCRIPTION = _descriptor.Descriptor( name='DeletePersistentSubscription', full_name='EventStore.Client.Messages.DeletePersistentSubscription', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='subscription_group_name', full_name='EventStore.Client.Messages.DeletePersistentSubscription.subscription_group_name', index=0, number=1, type=9, cpp_type=9, label=2, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='event_stream_id', full_name='EventStore.Client.Messages.DeletePersistentSubscription.event_stream_id', index=1, number=2, type=9, cpp_type=9, label=2, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=5040, serialized_end=5128, ) _UPDATEPERSISTENTSUBSCRIPTION = _descriptor.Descriptor( name='UpdatePersistentSubscription', full_name='EventStore.Client.Messages.UpdatePersistentSubscription', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='subscription_group_name', full_name='EventStore.Client.Messages.UpdatePersistentSubscription.subscription_group_name', index=0, number=1, type=9, cpp_type=9, label=2, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='event_stream_id', full_name='EventStore.Client.Messages.UpdatePersistentSubscription.event_stream_id', index=1, number=2, type=9, cpp_type=9, label=2, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='resolve_link_tos', full_name='EventStore.Client.Messages.UpdatePersistentSubscription.resolve_link_tos', index=2, number=3, type=8, cpp_type=7, label=2, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='start_from', full_name='EventStore.Client.Messages.UpdatePersistentSubscription.start_from', index=3, number=4, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='message_timeout_milliseconds', full_name='EventStore.Client.Messages.UpdatePersistentSubscription.message_timeout_milliseconds', index=4, number=5, type=5, cpp_type=1, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='record_statistics', full_name='EventStore.Client.Messages.UpdatePersistentSubscription.record_statistics', index=5, number=6, type=8, cpp_type=7, label=2, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='live_buffer_size', full_name='EventStore.Client.Messages.UpdatePersistentSubscription.live_buffer_size', index=6, number=7, type=5, cpp_type=1, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='read_batch_size', full_name='EventStore.Client.Messages.UpdatePersistentSubscription.read_batch_size', index=7, number=8, type=5, cpp_type=1, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='buffer_size', full_name='EventStore.Client.Messages.UpdatePersistentSubscription.buffer_size', index=8, number=9, type=5, cpp_type=1, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='max_retry_count', full_name='EventStore.Client.Messages.UpdatePersistentSubscription.max_retry_count', index=9, number=10, type=5, cpp_type=1, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='prefer_round_robin', full_name='EventStore.Client.Messages.UpdatePersistentSubscription.prefer_round_robin', index=10, number=11, type=8, cpp_type=7, label=2, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='checkpoint_after_time', full_name='EventStore.Client.Messages.UpdatePersistentSubscription.checkpoint_after_time', index=11, number=12, type=5, cpp_type=1, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='checkpoint_max_count', full_name='EventStore.Client.Messages.UpdatePersistentSubscription.checkpoint_max_count', index=12, number=13, type=5, cpp_type=1, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='checkpoint_min_count', full_name='EventStore.Client.Messages.UpdatePersistentSubscription.checkpoint_min_count', index=13, number=14, type=5, cpp_type=1, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='subscriber_max_count', full_name='EventStore.Client.Messages.UpdatePersistentSubscription.subscriber_max_count', index=14, number=15, type=5, cpp_type=1, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='named_consumer_strategy', full_name='EventStore.Client.Messages.UpdatePersistentSubscription.named_consumer_strategy', index=15, number=16, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=5131, serialized_end=5609, ) _UPDATEPERSISTENTSUBSCRIPTIONCOMPLETED = _descriptor.Descriptor( name='UpdatePersistentSubscriptionCompleted', full_name='EventStore.Client.Messages.UpdatePersistentSubscriptionCompleted', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='result', full_name='EventStore.Client.Messages.UpdatePersistentSubscriptionCompleted.result', index=0, number=1, type=14, cpp_type=8, label=2, has_default_value=True, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='reason', full_name='EventStore.Client.Messages.UpdatePersistentSubscriptionCompleted.reason', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ _UPDATEPERSISTENTSUBSCRIPTIONCOMPLETED_UPDATEPERSISTENTSUBSCRIPTIONRESULT, ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=5612, serialized_end=5891, ) _CREATEPERSISTENTSUBSCRIPTIONCOMPLETED = _descriptor.Descriptor( name='CreatePersistentSubscriptionCompleted', full_name='EventStore.Client.Messages.CreatePersistentSubscriptionCompleted', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='result', full_name='EventStore.Client.Messages.CreatePersistentSubscriptionCompleted.result', index=0, number=1, type=14, cpp_type=8, label=2, has_default_value=True, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='reason', full_name='EventStore.Client.Messages.CreatePersistentSubscriptionCompleted.reason', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ _CREATEPERSISTENTSUBSCRIPTIONCOMPLETED_CREATEPERSISTENTSUBSCRIPTIONRESULT, ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=5894, serialized_end=6174, ) _DELETEPERSISTENTSUBSCRIPTIONCOMPLETED = _descriptor.Descriptor( name='DeletePersistentSubscriptionCompleted', full_name='EventStore.Client.Messages.DeletePersistentSubscriptionCompleted', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='result', full_name='EventStore.Client.Messages.DeletePersistentSubscriptionCompleted.result', index=0, number=1, type=14, cpp_type=8, label=2, has_default_value=True, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='reason', full_name='EventStore.Client.Messages.DeletePersistentSubscriptionCompleted.reason', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ _DELETEPERSISTENTSUBSCRIPTIONCOMPLETED_DELETEPERSISTENTSUBSCRIPTIONRESULT, ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=6177, serialized_end=6456, ) _CONNECTTOPERSISTENTSUBSCRIPTION = _descriptor.Descriptor( name='ConnectToPersistentSubscription', full_name='EventStore.Client.Messages.ConnectToPersistentSubscription', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='subscription_id', full_name='EventStore.Client.Messages.ConnectToPersistentSubscription.subscription_id', index=0, number=1, type=9, cpp_type=9, label=2, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='event_stream_id', full_name='EventStore.Client.Messages.ConnectToPersistentSubscription.event_stream_id', index=1, number=2, type=9, cpp_type=9, label=2, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='allowed_in_flight_messages', full_name='EventStore.Client.Messages.ConnectToPersistentSubscription.allowed_in_flight_messages', index=2, number=3, type=5, cpp_type=1, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=6458, serialized_end=6577, ) _PERSISTENTSUBSCRIPTIONACKEVENTS = _descriptor.Descriptor( name='PersistentSubscriptionAckEvents', full_name='EventStore.Client.Messages.PersistentSubscriptionAckEvents', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='subscription_id', full_name='EventStore.Client.Messages.PersistentSubscriptionAckEvents.subscription_id', index=0, number=1, type=9, cpp_type=9, label=2, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='processed_event_ids', full_name='EventStore.Client.Messages.PersistentSubscriptionAckEvents.processed_event_ids', index=1, number=2, type=12, cpp_type=9, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=6579, serialized_end=6666, ) _PERSISTENTSUBSCRIPTIONNAKEVENTS = _descriptor.Descriptor( name='PersistentSubscriptionNakEvents', full_name='EventStore.Client.Messages.PersistentSubscriptionNakEvents', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='subscription_id', full_name='EventStore.Client.Messages.PersistentSubscriptionNakEvents.subscription_id', index=0, number=1, type=9, cpp_type=9, label=2, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='processed_event_ids', full_name='EventStore.Client.Messages.PersistentSubscriptionNakEvents.processed_event_ids', index=1, number=2, type=12, cpp_type=9, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='message', full_name='EventStore.Client.Messages.PersistentSubscriptionNakEvents.message', index=2, number=3, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='action', full_name='EventStore.Client.Messages.PersistentSubscriptionNakEvents.action', index=3, number=4, type=14, cpp_type=8, label=2, has_default_value=True, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ _PERSISTENTSUBSCRIPTIONNAKEVENTS_NAKACTION, ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=6669, serialized_end=6936, ) _PERSISTENTSUBSCRIPTIONCONFIRMATION = _descriptor.Descriptor( name='PersistentSubscriptionConfirmation', full_name='EventStore.Client.Messages.PersistentSubscriptionConfirmation', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='last_commit_position', full_name='EventStore.Client.Messages.PersistentSubscriptionConfirmation.last_commit_position', index=0, number=1, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='subscription_id', full_name='EventStore.Client.Messages.PersistentSubscriptionConfirmation.subscription_id', index=1, number=2, type=9, cpp_type=9, label=2, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='last_event_number', full_name='EventStore.Client.Messages.PersistentSubscriptionConfirmation.last_event_number', index=2, number=3, type=3, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=6938, serialized_end=7056, ) _PERSISTENTSUBSCRIPTIONSTREAMEVENTAPPEARED = _descriptor.Descriptor( name='PersistentSubscriptionStreamEventAppeared', full_name='EventStore.Client.Messages.PersistentSubscriptionStreamEventAppeared', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='event', full_name='EventStore.Client.Messages.PersistentSubscriptionStreamEventAppeared.event', index=0, number=1, type=11, cpp_type=10, label=2, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='retryCount', full_name='EventStore.Client.Messages.PersistentSubscriptionStreamEventAppeared.retryCount', index=1, number=2, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=7059, serialized_end=7187, ) _SUBSCRIBETOSTREAM = _descriptor.Descriptor( name='SubscribeToStream', full_name='EventStore.Client.Messages.SubscribeToStream', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='event_stream_id', full_name='EventStore.Client.Messages.SubscribeToStream.event_stream_id', index=0, number=1, type=9, cpp_type=9, label=2, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='resolve_link_tos', full_name='EventStore.Client.Messages.SubscribeToStream.resolve_link_tos', index=1, number=2, type=8, cpp_type=7, label=2, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=7189, serialized_end=7259, ) _FILTEREDSUBSCRIBETOSTREAM = _descriptor.Descriptor( name='FilteredSubscribeToStream', full_name='EventStore.Client.Messages.FilteredSubscribeToStream', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='event_stream_id', full_name='EventStore.Client.Messages.FilteredSubscribeToStream.event_stream_id', index=0, number=1, type=9, cpp_type=9, label=2, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='resolve_link_tos', full_name='EventStore.Client.Messages.FilteredSubscribeToStream.resolve_link_tos', index=1, number=2, type=8, cpp_type=7, label=2, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='filter', full_name='EventStore.Client.Messages.FilteredSubscribeToStream.filter', index=2, number=3, type=11, cpp_type=10, label=2, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='checkpoint_interval', full_name='EventStore.Client.Messages.FilteredSubscribeToStream.checkpoint_interval', index=3, number=4, type=5, cpp_type=1, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=7262, serialized_end=7421, ) _CHECKPOINTREACHED = _descriptor.Descriptor( name='CheckpointReached', full_name='EventStore.Client.Messages.CheckpointReached', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='commit_position', full_name='EventStore.Client.Messages.CheckpointReached.commit_position', index=0, number=1, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='prepare_position', full_name='EventStore.Client.Messages.CheckpointReached.prepare_position', index=1, number=2, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=7423, serialized_end=7493, ) _SUBSCRIPTIONCONFIRMATION = _descriptor.Descriptor( name='SubscriptionConfirmation', full_name='EventStore.Client.Messages.SubscriptionConfirmation', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='last_commit_position', full_name='EventStore.Client.Messages.SubscriptionConfirmation.last_commit_position', index=0, number=1, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='last_event_number', full_name='EventStore.Client.Messages.SubscriptionConfirmation.last_event_number', index=1, number=2, type=3, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=7495, serialized_end=7578, ) _STREAMEVENTAPPEARED = _descriptor.Descriptor( name='StreamEventAppeared', full_name='EventStore.Client.Messages.StreamEventAppeared', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='event', full_name='EventStore.Client.Messages.StreamEventAppeared.event', index=0, number=1, type=11, cpp_type=10, label=2, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=7580, serialized_end=7659, ) _UNSUBSCRIBEFROMSTREAM = _descriptor.Descriptor( name='UnsubscribeFromStream', full_name='EventStore.Client.Messages.UnsubscribeFromStream', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=7661, serialized_end=7684, ) _SUBSCRIPTIONDROPPED = _descriptor.Descriptor( name='SubscriptionDropped', full_name='EventStore.Client.Messages.SubscriptionDropped', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='reason', full_name='EventStore.Client.Messages.SubscriptionDropped.reason', index=0, number=1, type=14, cpp_type=8, label=1, has_default_value=True, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ _SUBSCRIPTIONDROPPED_SUBSCRIPTIONDROPREASON, ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=7687, serialized_end=7953, ) _NOTHANDLED_LEADERINFO = _descriptor.Descriptor( name='LeaderInfo', full_name='EventStore.Client.Messages.NotHandled.LeaderInfo', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='external_tcp_address', full_name='EventStore.Client.Messages.NotHandled.LeaderInfo.external_tcp_address', index=0, number=1, type=9, cpp_type=9, label=2, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='external_tcp_port', full_name='EventStore.Client.Messages.NotHandled.LeaderInfo.external_tcp_port', index=1, number=2, type=5, cpp_type=1, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='http_address', full_name='EventStore.Client.Messages.NotHandled.LeaderInfo.http_address', index=2, number=3, type=9, cpp_type=9, label=2, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='http_port', full_name='EventStore.Client.Messages.NotHandled.LeaderInfo.http_port', index=3, number=4, type=5, cpp_type=1, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='external_secure_tcp_address', full_name='EventStore.Client.Messages.NotHandled.LeaderInfo.external_secure_tcp_address', index=4, number=5, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='external_secure_tcp_port', full_name='EventStore.Client.Messages.NotHandled.LeaderInfo.external_secure_tcp_port', index=5, number=6, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=8069, serialized_end=8250, ) _NOTHANDLED = _descriptor.Descriptor( name='NotHandled', full_name='EventStore.Client.Messages.NotHandled', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='reason', full_name='EventStore.Client.Messages.NotHandled.reason', index=0, number=1, type=14, cpp_type=8, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='additional_info', full_name='EventStore.Client.Messages.NotHandled.additional_info', index=1, number=2, type=12, cpp_type=9, label=1, has_default_value=False, default_value=b"", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[_NOTHANDLED_LEADERINFO, ], enum_types=[ _NOTHANDLED_NOTHANDLEDREASON, ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=7956, serialized_end=8328, ) _SCAVENGEDATABASE = _descriptor.Descriptor( name='ScavengeDatabase', full_name='EventStore.Client.Messages.ScavengeDatabase', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=8330, serialized_end=8348, ) _SCAVENGEDATABASERESPONSE = _descriptor.Descriptor( name='ScavengeDatabaseResponse', full_name='EventStore.Client.Messages.ScavengeDatabaseResponse', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='result', full_name='EventStore.Client.Messages.ScavengeDatabaseResponse.result', index=0, number=1, type=14, cpp_type=8, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='scavengeId', full_name='EventStore.Client.Messages.ScavengeDatabaseResponse.scavengeId', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ _SCAVENGEDATABASERESPONSE_SCAVENGERESULT, ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=8351, serialized_end=8547, ) _IDENTIFYCLIENT = _descriptor.Descriptor( name='IdentifyClient', full_name='EventStore.Client.Messages.IdentifyClient', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='version', full_name='EventStore.Client.Messages.IdentifyClient.version', index=0, number=1, type=5, cpp_type=1, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='connection_name', full_name='EventStore.Client.Messages.IdentifyClient.connection_name', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=8549, serialized_end=8607, ) _CLIENTIDENTIFIED = _descriptor.Descriptor( name='ClientIdentified', full_name='EventStore.Client.Messages.ClientIdentified', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=8609, serialized_end=8627, ) _RESOLVEDINDEXEDEVENT.fields_by_name['event'].message_type = _EVENTRECORD _RESOLVEDINDEXEDEVENT.fields_by_name['link'].message_type = _EVENTRECORD _RESOLVEDEVENT.fields_by_name['event'].message_type = _EVENTRECORD _RESOLVEDEVENT.fields_by_name['link'].message_type = _EVENTRECORD _WRITEEVENTS.fields_by_name['events'].message_type = _NEWEVENT _WRITEEVENTSCOMPLETED.fields_by_name['result'].enum_type = _OPERATIONRESULT _DELETESTREAMCOMPLETED.fields_by_name['result'].enum_type = _OPERATIONRESULT _TRANSACTIONSTARTCOMPLETED.fields_by_name['result'].enum_type = _OPERATIONRESULT _TRANSACTIONWRITE.fields_by_name['events'].message_type = _NEWEVENT _TRANSACTIONWRITECOMPLETED.fields_by_name['result'].enum_type = _OPERATIONRESULT _TRANSACTIONCOMMITCOMPLETED.fields_by_name['result'].enum_type = _OPERATIONRESULT _READEVENTCOMPLETED.fields_by_name['result'].enum_type = _READEVENTCOMPLETED_READEVENTRESULT _READEVENTCOMPLETED.fields_by_name['event'].message_type = _RESOLVEDINDEXEDEVENT _READEVENTCOMPLETED_READEVENTRESULT.containing_type = _READEVENTCOMPLETED _READSTREAMEVENTSCOMPLETED.fields_by_name['events'].message_type = _RESOLVEDINDEXEDEVENT _READSTREAMEVENTSCOMPLETED.fields_by_name['result'].enum_type = _READSTREAMEVENTSCOMPLETED_READSTREAMRESULT _READSTREAMEVENTSCOMPLETED_READSTREAMRESULT.containing_type = _READSTREAMEVENTSCOMPLETED _READALLEVENTSCOMPLETED.fields_by_name['events'].message_type = _RESOLVEDEVENT _READALLEVENTSCOMPLETED.fields_by_name['result'].enum_type = _READALLEVENTSCOMPLETED_READALLRESULT _READALLEVENTSCOMPLETED_READALLRESULT.containing_type = _READALLEVENTSCOMPLETED _FILTER.fields_by_name['context'].enum_type = _FILTER_FILTERCONTEXT _FILTER.fields_by_name['type'].enum_type = _FILTER_FILTERTYPE _FILTER_FILTERCONTEXT.containing_type = _FILTER _FILTER_FILTERTYPE.containing_type = _FILTER _FILTEREDREADALLEVENTS.fields_by_name['filter'].message_type = _FILTER _FILTEREDREADALLEVENTSCOMPLETED.fields_by_name['events'].message_type = _RESOLVEDEVENT _FILTEREDREADALLEVENTSCOMPLETED.fields_by_name['result'].enum_type = _FILTEREDREADALLEVENTSCOMPLETED_FILTEREDREADALLRESULT _FILTEREDREADALLEVENTSCOMPLETED_FILTEREDREADALLRESULT.containing_type = _FILTEREDREADALLEVENTSCOMPLETED _UPDATEPERSISTENTSUBSCRIPTIONCOMPLETED.fields_by_name['result'].enum_type = _UPDATEPERSISTENTSUBSCRIPTIONCOMPLETED_UPDATEPERSISTENTSUBSCRIPTIONRESULT _UPDATEPERSISTENTSUBSCRIPTIONCOMPLETED_UPDATEPERSISTENTSUBSCRIPTIONRESULT.containing_type = _UPDATEPERSISTENTSUBSCRIPTIONCOMPLETED _CREATEPERSISTENTSUBSCRIPTIONCOMPLETED.fields_by_name['result'].enum_type = _CREATEPERSISTENTSUBSCRIPTIONCOMPLETED_CREATEPERSISTENTSUBSCRIPTIONRESULT _CREATEPERSISTENTSUBSCRIPTIONCOMPLETED_CREATEPERSISTENTSUBSCRIPTIONRESULT.containing_type = _CREATEPERSISTENTSUBSCRIPTIONCOMPLETED _DELETEPERSISTENTSUBSCRIPTIONCOMPLETED.fields_by_name['result'].enum_type = _DELETEPERSISTENTSUBSCRIPTIONCOMPLETED_DELETEPERSISTENTSUBSCRIPTIONRESULT _DELETEPERSISTENTSUBSCRIPTIONCOMPLETED_DELETEPERSISTENTSUBSCRIPTIONRESULT.containing_type = _DELETEPERSISTENTSUBSCRIPTIONCOMPLETED _PERSISTENTSUBSCRIPTIONNAKEVENTS.fields_by_name['action'].enum_type = _PERSISTENTSUBSCRIPTIONNAKEVENTS_NAKACTION _PERSISTENTSUBSCRIPTIONNAKEVENTS_NAKACTION.containing_type = _PERSISTENTSUBSCRIPTIONNAKEVENTS _PERSISTENTSUBSCRIPTIONSTREAMEVENTAPPEARED.fields_by_name['event'].message_type = _RESOLVEDINDEXEDEVENT _FILTEREDSUBSCRIBETOSTREAM.fields_by_name['filter'].message_type = _FILTER _STREAMEVENTAPPEARED.fields_by_name['event'].message_type = _RESOLVEDEVENT _SUBSCRIPTIONDROPPED.fields_by_name['reason'].enum_type = _SUBSCRIPTIONDROPPED_SUBSCRIPTIONDROPREASON _SUBSCRIPTIONDROPPED_SUBSCRIPTIONDROPREASON.containing_type = _SUBSCRIPTIONDROPPED _NOTHANDLED_LEADERINFO.containing_type = _NOTHANDLED _NOTHANDLED.fields_by_name['reason'].enum_type = _NOTHANDLED_NOTHANDLEDREASON _NOTHANDLED_NOTHANDLEDREASON.containing_type = _NOTHANDLED _SCAVENGEDATABASERESPONSE.fields_by_name['result'].enum_type = _SCAVENGEDATABASERESPONSE_SCAVENGERESULT _SCAVENGEDATABASERESPONSE_SCAVENGERESULT.containing_type = _SCAVENGEDATABASERESPONSE DESCRIPTOR.message_types_by_name['NewEvent'] = _NEWEVENT DESCRIPTOR.message_types_by_name['EventRecord'] = _EVENTRECORD DESCRIPTOR.message_types_by_name['ResolvedIndexedEvent'] = _RESOLVEDINDEXEDEVENT DESCRIPTOR.message_types_by_name['ResolvedEvent'] = _RESOLVEDEVENT DESCRIPTOR.message_types_by_name['WriteEvents'] = _WRITEEVENTS DESCRIPTOR.message_types_by_name['WriteEventsCompleted'] = _WRITEEVENTSCOMPLETED DESCRIPTOR.message_types_by_name['DeleteStream'] = _DELETESTREAM DESCRIPTOR.message_types_by_name['DeleteStreamCompleted'] = _DELETESTREAMCOMPLETED DESCRIPTOR.message_types_by_name['TransactionStart'] = _TRANSACTIONSTART DESCRIPTOR.message_types_by_name['TransactionStartCompleted'] = _TRANSACTIONSTARTCOMPLETED DESCRIPTOR.message_types_by_name['TransactionWrite'] = _TRANSACTIONWRITE DESCRIPTOR.message_types_by_name['TransactionWriteCompleted'] = _TRANSACTIONWRITECOMPLETED DESCRIPTOR.message_types_by_name['TransactionCommit'] = _TRANSACTIONCOMMIT DESCRIPTOR.message_types_by_name['TransactionCommitCompleted'] = _TRANSACTIONCOMMITCOMPLETED DESCRIPTOR.message_types_by_name['ReadEvent'] = _READEVENT DESCRIPTOR.message_types_by_name['ReadEventCompleted'] = _READEVENTCOMPLETED DESCRIPTOR.message_types_by_name['ReadStreamEvents'] = _READSTREAMEVENTS DESCRIPTOR.message_types_by_name['ReadStreamEventsCompleted'] = _READSTREAMEVENTSCOMPLETED DESCRIPTOR.message_types_by_name['ReadAllEvents'] = _READALLEVENTS DESCRIPTOR.message_types_by_name['ReadAllEventsCompleted'] = _READALLEVENTSCOMPLETED DESCRIPTOR.message_types_by_name['Filter'] = _FILTER DESCRIPTOR.message_types_by_name['FilteredReadAllEvents'] = _FILTEREDREADALLEVENTS DESCRIPTOR.message_types_by_name['FilteredReadAllEventsCompleted'] = _FILTEREDREADALLEVENTSCOMPLETED DESCRIPTOR.message_types_by_name['CreatePersistentSubscription'] = _CREATEPERSISTENTSUBSCRIPTION DESCRIPTOR.message_types_by_name['DeletePersistentSubscription'] = _DELETEPERSISTENTSUBSCRIPTION DESCRIPTOR.message_types_by_name['UpdatePersistentSubscription'] = _UPDATEPERSISTENTSUBSCRIPTION DESCRIPTOR.message_types_by_name['UpdatePersistentSubscriptionCompleted'] = _UPDATEPERSISTENTSUBSCRIPTIONCOMPLETED DESCRIPTOR.message_types_by_name['CreatePersistentSubscriptionCompleted'] = _CREATEPERSISTENTSUBSCRIPTIONCOMPLETED DESCRIPTOR.message_types_by_name['DeletePersistentSubscriptionCompleted'] = _DELETEPERSISTENTSUBSCRIPTIONCOMPLETED DESCRIPTOR.message_types_by_name['ConnectToPersistentSubscription'] = _CONNECTTOPERSISTENTSUBSCRIPTION DESCRIPTOR.message_types_by_name['PersistentSubscriptionAckEvents'] = _PERSISTENTSUBSCRIPTIONACKEVENTS DESCRIPTOR.message_types_by_name['PersistentSubscriptionNakEvents'] = _PERSISTENTSUBSCRIPTIONNAKEVENTS DESCRIPTOR.message_types_by_name['PersistentSubscriptionConfirmation'] = _PERSISTENTSUBSCRIPTIONCONFIRMATION DESCRIPTOR.message_types_by_name['PersistentSubscriptionStreamEventAppeared'] = _PERSISTENTSUBSCRIPTIONSTREAMEVENTAPPEARED DESCRIPTOR.message_types_by_name['SubscribeToStream'] = _SUBSCRIBETOSTREAM DESCRIPTOR.message_types_by_name['FilteredSubscribeToStream'] = _FILTEREDSUBSCRIBETOSTREAM DESCRIPTOR.message_types_by_name['CheckpointReached'] = _CHECKPOINTREACHED DESCRIPTOR.message_types_by_name['SubscriptionConfirmation'] = _SUBSCRIPTIONCONFIRMATION DESCRIPTOR.message_types_by_name['StreamEventAppeared'] = _STREAMEVENTAPPEARED DESCRIPTOR.message_types_by_name['UnsubscribeFromStream'] = _UNSUBSCRIBEFROMSTREAM DESCRIPTOR.message_types_by_name['SubscriptionDropped'] = _SUBSCRIPTIONDROPPED DESCRIPTOR.message_types_by_name['NotHandled'] = _NOTHANDLED DESCRIPTOR.message_types_by_name['ScavengeDatabase'] = _SCAVENGEDATABASE DESCRIPTOR.message_types_by_name['ScavengeDatabaseResponse'] = _SCAVENGEDATABASERESPONSE DESCRIPTOR.message_types_by_name['IdentifyClient'] = _IDENTIFYCLIENT DESCRIPTOR.message_types_by_name['ClientIdentified'] = _CLIENTIDENTIFIED DESCRIPTOR.enum_types_by_name['OperationResult'] = _OPERATIONRESULT _sym_db.RegisterFileDescriptor(DESCRIPTOR) NewEvent = _reflection.GeneratedProtocolMessageType('NewEvent', (_message.Message,), { 'DESCRIPTOR' : _NEWEVENT, '__module__' : 'ClientMessageDtos_pb2' # @@protoc_insertion_point(class_scope:EventStore.Client.Messages.NewEvent) }) _sym_db.RegisterMessage(NewEvent) EventRecord = _reflection.GeneratedProtocolMessageType('EventRecord', (_message.Message,), { 'DESCRIPTOR' : _EVENTRECORD, '__module__' : 'ClientMessageDtos_pb2' # @@protoc_insertion_point(class_scope:EventStore.Client.Messages.EventRecord) }) _sym_db.RegisterMessage(EventRecord) ResolvedIndexedEvent = _reflection.GeneratedProtocolMessageType('ResolvedIndexedEvent', (_message.Message,), { 'DESCRIPTOR' : _RESOLVEDINDEXEDEVENT, '__module__' : 'ClientMessageDtos_pb2' # @@protoc_insertion_point(class_scope:EventStore.Client.Messages.ResolvedIndexedEvent) }) _sym_db.RegisterMessage(ResolvedIndexedEvent) ResolvedEvent = _reflection.GeneratedProtocolMessageType('ResolvedEvent', (_message.Message,), { 'DESCRIPTOR' : _RESOLVEDEVENT, '__module__' : 'ClientMessageDtos_pb2' # @@protoc_insertion_point(class_scope:EventStore.Client.Messages.ResolvedEvent) }) _sym_db.RegisterMessage(ResolvedEvent) WriteEvents = _reflection.GeneratedProtocolMessageType('WriteEvents', (_message.Message,), { 'DESCRIPTOR' : _WRITEEVENTS, '__module__' : 'ClientMessageDtos_pb2' # @@protoc_insertion_point(class_scope:EventStore.Client.Messages.WriteEvents) }) _sym_db.RegisterMessage(WriteEvents) WriteEventsCompleted = _reflection.GeneratedProtocolMessageType('WriteEventsCompleted', (_message.Message,), { 'DESCRIPTOR' : _WRITEEVENTSCOMPLETED, '__module__' : 'ClientMessageDtos_pb2' # @@protoc_insertion_point(class_scope:EventStore.Client.Messages.WriteEventsCompleted) }) _sym_db.RegisterMessage(WriteEventsCompleted) DeleteStream = _reflection.GeneratedProtocolMessageType('DeleteStream', (_message.Message,), { 'DESCRIPTOR' : _DELETESTREAM, '__module__' : 'ClientMessageDtos_pb2' # @@protoc_insertion_point(class_scope:EventStore.Client.Messages.DeleteStream) }) _sym_db.RegisterMessage(DeleteStream) DeleteStreamCompleted = _reflection.GeneratedProtocolMessageType('DeleteStreamCompleted', (_message.Message,), { 'DESCRIPTOR' : _DELETESTREAMCOMPLETED, '__module__' : 'ClientMessageDtos_pb2' # @@protoc_insertion_point(class_scope:EventStore.Client.Messages.DeleteStreamCompleted) }) _sym_db.RegisterMessage(DeleteStreamCompleted) TransactionStart = _reflection.GeneratedProtocolMessageType('TransactionStart', (_message.Message,), { 'DESCRIPTOR' : _TRANSACTIONSTART, '__module__' : 'ClientMessageDtos_pb2' # @@protoc_insertion_point(class_scope:EventStore.Client.Messages.TransactionStart) }) _sym_db.RegisterMessage(TransactionStart) TransactionStartCompleted = _reflection.GeneratedProtocolMessageType('TransactionStartCompleted', (_message.Message,), { 'DESCRIPTOR' : _TRANSACTIONSTARTCOMPLETED, '__module__' : 'ClientMessageDtos_pb2' # @@protoc_insertion_point(class_scope:EventStore.Client.Messages.TransactionStartCompleted) }) _sym_db.RegisterMessage(TransactionStartCompleted) TransactionWrite = _reflection.GeneratedProtocolMessageType('TransactionWrite', (_message.Message,), { 'DESCRIPTOR' : _TRANSACTIONWRITE, '__module__' : 'ClientMessageDtos_pb2' # @@protoc_insertion_point(class_scope:EventStore.Client.Messages.TransactionWrite) }) _sym_db.RegisterMessage(TransactionWrite) TransactionWriteCompleted = _reflection.GeneratedProtocolMessageType('TransactionWriteCompleted', (_message.Message,), { 'DESCRIPTOR' : _TRANSACTIONWRITECOMPLETED, '__module__' : 'ClientMessageDtos_pb2' # @@protoc_insertion_point(class_scope:EventStore.Client.Messages.TransactionWriteCompleted) }) _sym_db.RegisterMessage(TransactionWriteCompleted) TransactionCommit = _reflection.GeneratedProtocolMessageType('TransactionCommit', (_message.Message,), { 'DESCRIPTOR' : _TRANSACTIONCOMMIT, '__module__' : 'ClientMessageDtos_pb2' # @@protoc_insertion_point(class_scope:EventStore.Client.Messages.TransactionCommit) }) _sym_db.RegisterMessage(TransactionCommit) TransactionCommitCompleted = _reflection.GeneratedProtocolMessageType('TransactionCommitCompleted', (_message.Message,), { 'DESCRIPTOR' : _TRANSACTIONCOMMITCOMPLETED, '__module__' : 'ClientMessageDtos_pb2' # @@protoc_insertion_point(class_scope:EventStore.Client.Messages.TransactionCommitCompleted) }) _sym_db.RegisterMessage(TransactionCommitCompleted) ReadEvent = _reflection.GeneratedProtocolMessageType('ReadEvent', (_message.Message,), { 'DESCRIPTOR' : _READEVENT, '__module__' : 'ClientMessageDtos_pb2' # @@protoc_insertion_point(class_scope:EventStore.Client.Messages.ReadEvent) }) _sym_db.RegisterMessage(ReadEvent) ReadEventCompleted = _reflection.GeneratedProtocolMessageType('ReadEventCompleted', (_message.Message,), { 'DESCRIPTOR' : _READEVENTCOMPLETED, '__module__' : 'ClientMessageDtos_pb2' # @@protoc_insertion_point(class_scope:EventStore.Client.Messages.ReadEventCompleted) }) _sym_db.RegisterMessage(ReadEventCompleted) ReadStreamEvents = _reflection.GeneratedProtocolMessageType('ReadStreamEvents', (_message.Message,), { 'DESCRIPTOR' : _READSTREAMEVENTS, '__module__' : 'ClientMessageDtos_pb2' # @@protoc_insertion_point(class_scope:EventStore.Client.Messages.ReadStreamEvents) }) _sym_db.RegisterMessage(ReadStreamEvents) ReadStreamEventsCompleted = _reflection.GeneratedProtocolMessageType('ReadStreamEventsCompleted', (_message.Message,), { 'DESCRIPTOR' : _READSTREAMEVENTSCOMPLETED, '__module__' : 'ClientMessageDtos_pb2' # @@protoc_insertion_point(class_scope:EventStore.Client.Messages.ReadStreamEventsCompleted) }) _sym_db.RegisterMessage(ReadStreamEventsCompleted) ReadAllEvents = _reflection.GeneratedProtocolMessageType('ReadAllEvents', (_message.Message,), { 'DESCRIPTOR' : _READALLEVENTS, '__module__' : 'ClientMessageDtos_pb2' # @@protoc_insertion_point(class_scope:EventStore.Client.Messages.ReadAllEvents) }) _sym_db.RegisterMessage(ReadAllEvents) ReadAllEventsCompleted = _reflection.GeneratedProtocolMessageType('ReadAllEventsCompleted', (_message.Message,), { 'DESCRIPTOR' : _READALLEVENTSCOMPLETED, '__module__' : 'ClientMessageDtos_pb2' # @@protoc_insertion_point(class_scope:EventStore.Client.Messages.ReadAllEventsCompleted) }) _sym_db.RegisterMessage(ReadAllEventsCompleted) Filter = _reflection.GeneratedProtocolMessageType('Filter', (_message.Message,), { 'DESCRIPTOR' : _FILTER, '__module__' : 'ClientMessageDtos_pb2' # @@protoc_insertion_point(class_scope:EventStore.Client.Messages.Filter) }) _sym_db.RegisterMessage(Filter) FilteredReadAllEvents = _reflection.GeneratedProtocolMessageType('FilteredReadAllEvents', (_message.Message,), { 'DESCRIPTOR' : _FILTEREDREADALLEVENTS, '__module__' : 'ClientMessageDtos_pb2' # @@protoc_insertion_point(class_scope:EventStore.Client.Messages.FilteredReadAllEvents) }) _sym_db.RegisterMessage(FilteredReadAllEvents) FilteredReadAllEventsCompleted = _reflection.GeneratedProtocolMessageType('FilteredReadAllEventsCompleted', (_message.Message,), { 'DESCRIPTOR' : _FILTEREDREADALLEVENTSCOMPLETED, '__module__' : 'ClientMessageDtos_pb2' # @@protoc_insertion_point(class_scope:EventStore.Client.Messages.FilteredReadAllEventsCompleted) }) _sym_db.RegisterMessage(FilteredReadAllEventsCompleted) CreatePersistentSubscription = _reflection.GeneratedProtocolMessageType('CreatePersistentSubscription', (_message.Message,), { 'DESCRIPTOR' : _CREATEPERSISTENTSUBSCRIPTION, '__module__' : 'ClientMessageDtos_pb2' # @@protoc_insertion_point(class_scope:EventStore.Client.Messages.CreatePersistentSubscription) }) _sym_db.RegisterMessage(CreatePersistentSubscription) DeletePersistentSubscription = _reflection.GeneratedProtocolMessageType('DeletePersistentSubscription', (_message.Message,), { 'DESCRIPTOR' : _DELETEPERSISTENTSUBSCRIPTION, '__module__' : 'ClientMessageDtos_pb2' # @@protoc_insertion_point(class_scope:EventStore.Client.Messages.DeletePersistentSubscription) }) _sym_db.RegisterMessage(DeletePersistentSubscription) UpdatePersistentSubscription = _reflection.GeneratedProtocolMessageType('UpdatePersistentSubscription', (_message.Message,), { 'DESCRIPTOR' : _UPDATEPERSISTENTSUBSCRIPTION, '__module__' : 'ClientMessageDtos_pb2' # @@protoc_insertion_point(class_scope:EventStore.Client.Messages.UpdatePersistentSubscription) }) _sym_db.RegisterMessage(UpdatePersistentSubscription) UpdatePersistentSubscriptionCompleted = _reflection.GeneratedProtocolMessageType('UpdatePersistentSubscriptionCompleted', (_message.Message,), { 'DESCRIPTOR' : _UPDATEPERSISTENTSUBSCRIPTIONCOMPLETED, '__module__' : 'ClientMessageDtos_pb2' # @@protoc_insertion_point(class_scope:EventStore.Client.Messages.UpdatePersistentSubscriptionCompleted) }) _sym_db.RegisterMessage(UpdatePersistentSubscriptionCompleted) CreatePersistentSubscriptionCompleted = _reflection.GeneratedProtocolMessageType('CreatePersistentSubscriptionCompleted', (_message.Message,), { 'DESCRIPTOR' : _CREATEPERSISTENTSUBSCRIPTIONCOMPLETED, '__module__' : 'ClientMessageDtos_pb2' # @@protoc_insertion_point(class_scope:EventStore.Client.Messages.CreatePersistentSubscriptionCompleted) }) _sym_db.RegisterMessage(CreatePersistentSubscriptionCompleted) DeletePersistentSubscriptionCompleted = _reflection.GeneratedProtocolMessageType('DeletePersistentSubscriptionCompleted', (_message.Message,), { 'DESCRIPTOR' : _DELETEPERSISTENTSUBSCRIPTIONCOMPLETED, '__module__' : 'ClientMessageDtos_pb2' # @@protoc_insertion_point(class_scope:EventStore.Client.Messages.DeletePersistentSubscriptionCompleted) }) _sym_db.RegisterMessage(DeletePersistentSubscriptionCompleted) ConnectToPersistentSubscription = _reflection.GeneratedProtocolMessageType('ConnectToPersistentSubscription', (_message.Message,), { 'DESCRIPTOR' : _CONNECTTOPERSISTENTSUBSCRIPTION, '__module__' : 'ClientMessageDtos_pb2' # @@protoc_insertion_point(class_scope:EventStore.Client.Messages.ConnectToPersistentSubscription) }) _sym_db.RegisterMessage(ConnectToPersistentSubscription) PersistentSubscriptionAckEvents = _reflection.GeneratedProtocolMessageType('PersistentSubscriptionAckEvents', (_message.Message,), { 'DESCRIPTOR' : _PERSISTENTSUBSCRIPTIONACKEVENTS, '__module__' : 'ClientMessageDtos_pb2' # @@protoc_insertion_point(class_scope:EventStore.Client.Messages.PersistentSubscriptionAckEvents) }) _sym_db.RegisterMessage(PersistentSubscriptionAckEvents) PersistentSubscriptionNakEvents = _reflection.GeneratedProtocolMessageType('PersistentSubscriptionNakEvents', (_message.Message,), { 'DESCRIPTOR' : _PERSISTENTSUBSCRIPTIONNAKEVENTS, '__module__' : 'ClientMessageDtos_pb2' # @@protoc_insertion_point(class_scope:EventStore.Client.Messages.PersistentSubscriptionNakEvents) }) _sym_db.RegisterMessage(PersistentSubscriptionNakEvents) PersistentSubscriptionConfirmation = _reflection.GeneratedProtocolMessageType('PersistentSubscriptionConfirmation', (_message.Message,), { 'DESCRIPTOR' : _PERSISTENTSUBSCRIPTIONCONFIRMATION, '__module__' : 'ClientMessageDtos_pb2' # @@protoc_insertion_point(class_scope:EventStore.Client.Messages.PersistentSubscriptionConfirmation) }) _sym_db.RegisterMessage(PersistentSubscriptionConfirmation) PersistentSubscriptionStreamEventAppeared = _reflection.GeneratedProtocolMessageType('PersistentSubscriptionStreamEventAppeared', (_message.Message,), { 'DESCRIPTOR' : _PERSISTENTSUBSCRIPTIONSTREAMEVENTAPPEARED, '__module__' : 'ClientMessageDtos_pb2' # @@protoc_insertion_point(class_scope:EventStore.Client.Messages.PersistentSubscriptionStreamEventAppeared) }) _sym_db.RegisterMessage(PersistentSubscriptionStreamEventAppeared) SubscribeToStream = _reflection.GeneratedProtocolMessageType('SubscribeToStream', (_message.Message,), { 'DESCRIPTOR' : _SUBSCRIBETOSTREAM, '__module__' : 'ClientMessageDtos_pb2' # @@protoc_insertion_point(class_scope:EventStore.Client.Messages.SubscribeToStream) }) _sym_db.RegisterMessage(SubscribeToStream) FilteredSubscribeToStream = _reflection.GeneratedProtocolMessageType('FilteredSubscribeToStream', (_message.Message,), { 'DESCRIPTOR' : _FILTEREDSUBSCRIBETOSTREAM, '__module__' : 'ClientMessageDtos_pb2' # @@protoc_insertion_point(class_scope:EventStore.Client.Messages.FilteredSubscribeToStream) }) _sym_db.RegisterMessage(FilteredSubscribeToStream) CheckpointReached = _reflection.GeneratedProtocolMessageType('CheckpointReached', (_message.Message,), { 'DESCRIPTOR' : _CHECKPOINTREACHED, '__module__' : 'ClientMessageDtos_pb2' # @@protoc_insertion_point(class_scope:EventStore.Client.Messages.CheckpointReached) }) _sym_db.RegisterMessage(CheckpointReached) SubscriptionConfirmation = _reflection.GeneratedProtocolMessageType('SubscriptionConfirmation', (_message.Message,), { 'DESCRIPTOR' : _SUBSCRIPTIONCONFIRMATION, '__module__' : 'ClientMessageDtos_pb2' # @@protoc_insertion_point(class_scope:EventStore.Client.Messages.SubscriptionConfirmation) }) _sym_db.RegisterMessage(SubscriptionConfirmation) StreamEventAppeared = _reflection.GeneratedProtocolMessageType('StreamEventAppeared', (_message.Message,), { 'DESCRIPTOR' : _STREAMEVENTAPPEARED, '__module__' : 'ClientMessageDtos_pb2' # @@protoc_insertion_point(class_scope:EventStore.Client.Messages.StreamEventAppeared) }) _sym_db.RegisterMessage(StreamEventAppeared) UnsubscribeFromStream = _reflection.GeneratedProtocolMessageType('UnsubscribeFromStream', (_message.Message,), { 'DESCRIPTOR' : _UNSUBSCRIBEFROMSTREAM, '__module__' : 'ClientMessageDtos_pb2' # @@protoc_insertion_point(class_scope:EventStore.Client.Messages.UnsubscribeFromStream) }) _sym_db.RegisterMessage(UnsubscribeFromStream) SubscriptionDropped = _reflection.GeneratedProtocolMessageType('SubscriptionDropped', (_message.Message,), { 'DESCRIPTOR' : _SUBSCRIPTIONDROPPED, '__module__' : 'ClientMessageDtos_pb2' # @@protoc_insertion_point(class_scope:EventStore.Client.Messages.SubscriptionDropped) }) _sym_db.RegisterMessage(SubscriptionDropped) NotHandled = _reflection.GeneratedProtocolMessageType('NotHandled', (_message.Message,), { 'LeaderInfo' : _reflection.GeneratedProtocolMessageType('LeaderInfo', (_message.Message,), { 'DESCRIPTOR' : _NOTHANDLED_LEADERINFO, '__module__' : 'ClientMessageDtos_pb2' # @@protoc_insertion_point(class_scope:EventStore.Client.Messages.NotHandled.LeaderInfo) }) , 'DESCRIPTOR' : _NOTHANDLED, '__module__' : 'ClientMessageDtos_pb2' # @@protoc_insertion_point(class_scope:EventStore.Client.Messages.NotHandled) }) _sym_db.RegisterMessage(NotHandled) _sym_db.RegisterMessage(NotHandled.LeaderInfo) ScavengeDatabase = _reflection.GeneratedProtocolMessageType('ScavengeDatabase', (_message.Message,), { 'DESCRIPTOR' : _SCAVENGEDATABASE, '__module__' : 'ClientMessageDtos_pb2' # @@protoc_insertion_point(class_scope:EventStore.Client.Messages.ScavengeDatabase) }) _sym_db.RegisterMessage(ScavengeDatabase) ScavengeDatabaseResponse = _reflection.GeneratedProtocolMessageType('ScavengeDatabaseResponse', (_message.Message,), { 'DESCRIPTOR' : _SCAVENGEDATABASERESPONSE, '__module__' : 'ClientMessageDtos_pb2' # @@protoc_insertion_point(class_scope:EventStore.Client.Messages.ScavengeDatabaseResponse) }) _sym_db.RegisterMessage(ScavengeDatabaseResponse) IdentifyClient = _reflection.GeneratedProtocolMessageType('IdentifyClient', (_message.Message,), { 'DESCRIPTOR' : _IDENTIFYCLIENT, '__module__' : 'ClientMessageDtos_pb2' # @@protoc_insertion_point(class_scope:EventStore.Client.Messages.IdentifyClient) }) _sym_db.RegisterMessage(IdentifyClient) ClientIdentified = _reflection.GeneratedProtocolMessageType('ClientIdentified', (_message.Message,), { 'DESCRIPTOR' : _CLIENTIDENTIFIED, '__module__' : 'ClientMessageDtos_pb2' # @@protoc_insertion_point(class_scope:EventStore.Client.Messages.ClientIdentified) }) _sym_db.RegisterMessage(ClientIdentified) # @@protoc_insertion_point(module_scope)
47.671826
14,798
0.775046
from google.protobuf.internal import enum_type_wrapper from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database _sym_db = _symbol_database.Default() DESCRIPTOR = _descriptor.FileDescriptor( name='ClientMessageDtos.proto', package='EventStore.Client.Messages', syntax='proto2', serialized_options=None, create_key=_descriptor._internal_create_key, serialized_pb=b'\n\x17\x43lientMessageDtos.proto\x12\x1a\x45ventStore.Client.Messages\"\x8a\x01\n\x08NewEvent\x12\x10\n\x08\x65vent_id\x18\x01 \x02(\x0c\x12\x12\n\nevent_type\x18\x02 \x02(\t\x12\x19\n\x11\x64\x61ta_content_type\x18\x03 \x02(\x05\x12\x1d\n\x15metadata_content_type\x18\x04 \x02(\x05\x12\x0c\n\x04\x64\x61ta\x18\x05 \x02(\x0c\x12\x10\n\x08metadata\x18\x06 \x01(\x0c\"\xe4\x01\n\x0b\x45ventRecord\x12\x17\n\x0f\x65vent_stream_id\x18\x01 \x02(\t\x12\x14\n\x0c\x65vent_number\x18\x02 \x02(\x03\x12\x10\n\x08\x65vent_id\x18\x03 \x02(\x0c\x12\x12\n\nevent_type\x18\x04 \x02(\t\x12\x19\n\x11\x64\x61ta_content_type\x18\x05 \x02(\x05\x12\x1d\n\x15metadata_content_type\x18\x06 \x02(\x05\x12\x0c\n\x04\x64\x61ta\x18\x07 \x02(\x0c\x12\x10\n\x08metadata\x18\x08 \x01(\x0c\x12\x0f\n\x07\x63reated\x18\t \x01(\x03\x12\x15\n\rcreated_epoch\x18\n \x01(\x03\"\x85\x01\n\x14ResolvedIndexedEvent\x12\x36\n\x05\x65vent\x18\x01 \x02(\x0b\x32\'.EventStore.Client.Messages.EventRecord\x12\x35\n\x04link\x18\x02 \x01(\x0b\x32\'.EventStore.Client.Messages.EventRecord\"\xb1\x01\n\rResolvedEvent\x12\x36\n\x05\x65vent\x18\x01 \x02(\x0b\x32\'.EventStore.Client.Messages.EventRecord\x12\x35\n\x04link\x18\x02 \x01(\x0b\x32\'.EventStore.Client.Messages.EventRecord\x12\x17\n\x0f\x63ommit_position\x18\x03 \x02(\x03\x12\x18\n\x10prepare_position\x18\x04 \x02(\x03\"\x8e\x01\n\x0bWriteEvents\x12\x17\n\x0f\x65vent_stream_id\x18\x01 \x02(\t\x12\x18\n\x10\x65xpected_version\x18\x02 \x02(\x03\x12\x34\n\x06\x65vents\x18\x03 \x03(\x0b\x32$.EventStore.Client.Messages.NewEvent\x12\x16\n\x0erequire_leader\x18\x04 \x02(\x08\"\xe7\x01\n\x14WriteEventsCompleted\x12;\n\x06result\x18\x01 \x02(\x0e\x32+.EventStore.Client.Messages.OperationResult\x12\x0f\n\x07message\x18\x02 \x01(\t\x12\x1a\n\x12\x66irst_event_number\x18\x03 \x02(\x03\x12\x19\n\x11last_event_number\x18\x04 \x02(\x03\x12\x18\n\x10prepare_position\x18\x05 \x01(\x03\x12\x17\n\x0f\x63ommit_position\x18\x06 \x01(\x03\x12\x17\n\x0f\x63urrent_version\x18\x07 \x01(\x03\"n\n\x0c\x44\x65leteStream\x12\x17\n\x0f\x65vent_stream_id\x18\x01 \x02(\t\x12\x18\n\x10\x65xpected_version\x18\x02 \x02(\x03\x12\x16\n\x0erequire_leader\x18\x03 \x02(\x08\x12\x13\n\x0bhard_delete\x18\x04 \x01(\x08\"\x98\x01\n\x15\x44\x65leteStreamCompleted\x12;\n\x06result\x18\x01 \x02(\x0e\x32+.EventStore.Client.Messages.OperationResult\x12\x0f\n\x07message\x18\x02 \x01(\t\x12\x18\n\x10prepare_position\x18\x03 \x01(\x03\x12\x17\n\x0f\x63ommit_position\x18\x04 \x01(\x03\"]\n\x10TransactionStart\x12\x17\n\x0f\x65vent_stream_id\x18\x01 \x02(\t\x12\x18\n\x10\x65xpected_version\x18\x02 \x02(\x03\x12\x16\n\x0erequire_leader\x18\x03 \x02(\x08\"\x81\x01\n\x19TransactionStartCompleted\x12\x16\n\x0etransaction_id\x18\x01 \x02(\x03\x12;\n\x06result\x18\x02 \x02(\x0e\x32+.EventStore.Client.Messages.OperationResult\x12\x0f\n\x07message\x18\x03 \x01(\t\"x\n\x10TransactionWrite\x12\x16\n\x0etransaction_id\x18\x01 \x02(\x03\x12\x34\n\x06\x65vents\x18\x02 \x03(\x0b\x32$.EventStore.Client.Messages.NewEvent\x12\x16\n\x0erequire_leader\x18\x03 \x02(\x08\"\x81\x01\n\x19TransactionWriteCompleted\x12\x16\n\x0etransaction_id\x18\x01 \x02(\x03\x12;\n\x06result\x18\x02 \x02(\x0e\x32+.EventStore.Client.Messages.OperationResult\x12\x0f\n\x07message\x18\x03 \x01(\t\"C\n\x11TransactionCommit\x12\x16\n\x0etransaction_id\x18\x01 \x02(\x03\x12\x16\n\x0erequire_leader\x18\x02 \x02(\x08\"\xec\x01\n\x1aTransactionCommitCompleted\x12\x16\n\x0etransaction_id\x18\x01 \x02(\x03\x12;\n\x06result\x18\x02 \x02(\x0e\x32+.EventStore.Client.Messages.OperationResult\x12\x0f\n\x07message\x18\x03 \x01(\t\x12\x1a\n\x12\x66irst_event_number\x18\x04 \x02(\x03\x12\x19\n\x11last_event_number\x18\x05 \x02(\x03\x12\x18\n\x10prepare_position\x18\x06 \x01(\x03\x12\x17\n\x0f\x63ommit_position\x18\x07 \x01(\x03\"l\n\tReadEvent\x12\x17\n\x0f\x65vent_stream_id\x18\x01 \x02(\t\x12\x14\n\x0c\x65vent_number\x18\x02 \x02(\x03\x12\x18\n\x10resolve_link_tos\x18\x03 \x02(\x08\x12\x16\n\x0erequire_leader\x18\x04 \x02(\x08\"\xa0\x02\n\x12ReadEventCompleted\x12N\n\x06result\x18\x01 \x02(\x0e\x32>.EventStore.Client.Messages.ReadEventCompleted.ReadEventResult\x12?\n\x05\x65vent\x18\x02 \x02(\x0b\x32\x30.EventStore.Client.Messages.ResolvedIndexedEvent\x12\r\n\x05\x65rror\x18\x03 \x01(\t\"j\n\x0fReadEventResult\x12\x0b\n\x07Success\x10\x00\x12\x0c\n\x08NotFound\x10\x01\x12\x0c\n\x08NoStream\x10\x02\x12\x11\n\rStreamDeleted\x10\x03\x12\t\n\x05\x45rror\x10\x04\x12\x10\n\x0c\x41\x63\x63\x65ssDenied\x10\x05\"\x8b\x01\n\x10ReadStreamEvents\x12\x17\n\x0f\x65vent_stream_id\x18\x01 \x02(\t\x12\x19\n\x11\x66rom_event_number\x18\x02 \x02(\x03\x12\x11\n\tmax_count\x18\x03 \x02(\x05\x12\x18\n\x10resolve_link_tos\x18\x04 \x02(\x08\x12\x16\n\x0erequire_leader\x18\x05 \x02(\x08\"\xa2\x03\n\x19ReadStreamEventsCompleted\x12@\n\x06\x65vents\x18\x01 \x03(\x0b\x32\x30.EventStore.Client.Messages.ResolvedIndexedEvent\x12V\n\x06result\x18\x02 \x02(\x0e\x32\x46.EventStore.Client.Messages.ReadStreamEventsCompleted.ReadStreamResult\x12\x19\n\x11next_event_number\x18\x03 \x02(\x03\x12\x19\n\x11last_event_number\x18\x04 \x02(\x03\x12\x18\n\x10is_end_of_stream\x18\x05 \x02(\x08\x12\x1c\n\x14last_commit_position\x18\x06 \x02(\x03\x12\r\n\x05\x65rror\x18\x07 \x01(\t\"n\n\x10ReadStreamResult\x12\x0b\n\x07Success\x10\x00\x12\x0c\n\x08NoStream\x10\x01\x12\x11\n\rStreamDeleted\x10\x02\x12\x0f\n\x0bNotModified\x10\x03\x12\t\n\x05\x45rror\x10\x04\x12\x10\n\x0c\x41\x63\x63\x65ssDenied\x10\x05\"\x87\x01\n\rReadAllEvents\x12\x17\n\x0f\x63ommit_position\x18\x01 \x02(\x03\x12\x18\n\x10prepare_position\x18\x02 \x02(\x03\x12\x11\n\tmax_count\x18\x03 \x02(\x05\x12\x18\n\x10resolve_link_tos\x18\x04 \x02(\x08\x12\x16\n\x0erequire_leader\x18\x05 \x02(\x08\"\xf9\x02\n\x16ReadAllEventsCompleted\x12\x17\n\x0f\x63ommit_position\x18\x01 \x02(\x03\x12\x18\n\x10prepare_position\x18\x02 \x02(\x03\x12\x39\n\x06\x65vents\x18\x03 \x03(\x0b\x32).EventStore.Client.Messages.ResolvedEvent\x12\x1c\n\x14next_commit_position\x18\x04 \x02(\x03\x12\x1d\n\x15next_prepare_position\x18\x05 \x02(\x03\x12Y\n\x06result\x18\x06 \x01(\x0e\x32@.EventStore.Client.Messages.ReadAllEventsCompleted.ReadAllResult:\x07Success\x12\r\n\x05\x65rror\x18\x07 \x01(\t\"J\n\rReadAllResult\x12\x0b\n\x07Success\x10\x00\x12\x0f\n\x0bNotModified\x10\x01\x12\t\n\x05\x45rror\x10\x02\x12\x10\n\x0c\x41\x63\x63\x65ssDenied\x10\x03\"\xe9\x01\n\x06\x46ilter\x12\x41\n\x07\x63ontext\x18\x01 \x02(\x0e\x32\x30.EventStore.Client.Messages.Filter.FilterContext\x12;\n\x04type\x18\x02 \x02(\x0e\x32-.EventStore.Client.Messages.Filter.FilterType\x12\x0c\n\x04\x64\x61ta\x18\x03 \x03(\t\",\n\rFilterContext\x12\x0c\n\x08StreamId\x10\x00\x12\r\n\tEventType\x10\x01\"#\n\nFilterType\x12\t\n\x05Regex\x10\x00\x12\n\n\x06Prefix\x10\x01\"\xde\x01\n\x15\x46ilteredReadAllEvents\x12\x17\n\x0f\x63ommit_position\x18\x01 \x02(\x03\x12\x18\n\x10prepare_position\x18\x02 \x02(\x03\x12\x11\n\tmax_count\x18\x03 \x02(\x05\x12\x19\n\x11max_search_window\x18\x04 \x01(\x05\x12\x18\n\x10resolve_link_tos\x18\x05 \x02(\x08\x12\x16\n\x0erequire_leader\x18\x06 \x02(\x08\x12\x32\n\x06\x66ilter\x18\x07 \x02(\x0b\x32\".EventStore.Client.Messages.Filter\"\xb3\x03\n\x1e\x46ilteredReadAllEventsCompleted\x12\x17\n\x0f\x63ommit_position\x18\x01 \x02(\x03\x12\x18\n\x10prepare_position\x18\x02 \x02(\x03\x12\x39\n\x06\x65vents\x18\x03 \x03(\x0b\x32).EventStore.Client.Messages.ResolvedEvent\x12\x1c\n\x14next_commit_position\x18\x04 \x02(\x03\x12\x1d\n\x15next_prepare_position\x18\x05 \x02(\x03\x12\x18\n\x10is_end_of_stream\x18\x06 \x02(\x08\x12i\n\x06result\x18\x07 \x01(\x0e\x32P.EventStore.Client.Messages.FilteredReadAllEventsCompleted.FilteredReadAllResult:\x07Success\x12\r\n\x05\x65rror\x18\x08 \x01(\t\"R\n\x15\x46ilteredReadAllResult\x12\x0b\n\x07Success\x10\x00\x12\x0f\n\x0bNotModified\x10\x01\x12\t\n\x05\x45rror\x10\x02\x12\x10\n\x0c\x41\x63\x63\x65ssDenied\x10\x03\"\xde\x03\n\x1c\x43reatePersistentSubscription\x12\x1f\n\x17subscription_group_name\x18\x01 \x02(\t\x12\x17\n\x0f\x65vent_stream_id\x18\x02 \x02(\t\x12\x18\n\x10resolve_link_tos\x18\x03 \x02(\x08\x12\x12\n\nstart_from\x18\x04 \x02(\x03\x12$\n\x1cmessage_timeout_milliseconds\x18\x05 \x02(\x05\x12\x19\n\x11record_statistics\x18\x06 \x02(\x08\x12\x18\n\x10live_buffer_size\x18\x07 \x02(\x05\x12\x17\n\x0fread_batch_size\x18\x08 \x02(\x05\x12\x13\n\x0b\x62uffer_size\x18\t \x02(\x05\x12\x17\n\x0fmax_retry_count\x18\n \x02(\x05\x12\x1a\n\x12prefer_round_robin\x18\x0b \x02(\x08\x12\x1d\n\x15\x63heckpoint_after_time\x18\x0c \x02(\x05\x12\x1c\n\x14\x63heckpoint_max_count\x18\r \x02(\x05\x12\x1c\n\x14\x63heckpoint_min_count\x18\x0e \x02(\x05\x12\x1c\n\x14subscriber_max_count\x18\x0f \x02(\x05\x12\x1f\n\x17named_consumer_strategy\x18\x10 \x01(\t\"X\n\x1c\x44\x65letePersistentSubscription\x12\x1f\n\x17subscription_group_name\x18\x01 \x02(\t\x12\x17\n\x0f\x65vent_stream_id\x18\x02 \x02(\t\"\xde\x03\n\x1cUpdatePersistentSubscription\x12\x1f\n\x17subscription_group_name\x18\x01 \x02(\t\x12\x17\n\x0f\x65vent_stream_id\x18\x02 \x02(\t\x12\x18\n\x10resolve_link_tos\x18\x03 \x02(\x08\x12\x12\n\nstart_from\x18\x04 \x02(\x03\x12$\n\x1cmessage_timeout_milliseconds\x18\x05 \x02(\x05\x12\x19\n\x11record_statistics\x18\x06 \x02(\x08\x12\x18\n\x10live_buffer_size\x18\x07 \x02(\x05\x12\x17\n\x0fread_batch_size\x18\x08 \x02(\x05\x12\x13\n\x0b\x62uffer_size\x18\t \x02(\x05\x12\x17\n\x0fmax_retry_count\x18\n \x02(\x05\x12\x1a\n\x12prefer_round_robin\x18\x0b \x02(\x08\x12\x1d\n\x15\x63heckpoint_after_time\x18\x0c \x02(\x05\x12\x1c\n\x14\x63heckpoint_max_count\x18\r \x02(\x05\x12\x1c\n\x14\x63heckpoint_min_count\x18\x0e \x02(\x05\x12\x1c\n\x14subscriber_max_count\x18\x0f \x02(\x05\x12\x1f\n\x17named_consumer_strategy\x18\x10 \x01(\t\"\x97\x02\n%UpdatePersistentSubscriptionCompleted\x12}\n\x06result\x18\x01 \x02(\x0e\x32\x64.EventStore.Client.Messages.UpdatePersistentSubscriptionCompleted.UpdatePersistentSubscriptionResult:\x07Success\x12\x0e\n\x06reason\x18\x02 \x01(\t\"_\n\"UpdatePersistentSubscriptionResult\x12\x0b\n\x07Success\x10\x00\x12\x10\n\x0c\x44oesNotExist\x10\x01\x12\x08\n\x04\x46\x61il\x10\x02\x12\x10\n\x0c\x41\x63\x63\x65ssDenied\x10\x03\"\x98\x02\n%CreatePersistentSubscriptionCompleted\x12}\n\x06result\x18\x01 \x02(\x0e\x32\x64.EventStore.Client.Messages.CreatePersistentSubscriptionCompleted.CreatePersistentSubscriptionResult:\x07Success\x12\x0e\n\x06reason\x18\x02 \x01(\t\"`\n\"CreatePersistentSubscriptionResult\x12\x0b\n\x07Success\x10\x00\x12\x11\n\rAlreadyExists\x10\x01\x12\x08\n\x04\x46\x61il\x10\x02\x12\x10\n\x0c\x41\x63\x63\x65ssDenied\x10\x03\"\x97\x02\n%DeletePersistentSubscriptionCompleted\x12}\n\x06result\x18\x01 \x02(\x0e\x32\x64.EventStore.Client.Messages.DeletePersistentSubscriptionCompleted.DeletePersistentSubscriptionResult:\x07Success\x12\x0e\n\x06reason\x18\x02 \x01(\t\"_\n\"DeletePersistentSubscriptionResult\x12\x0b\n\x07Success\x10\x00\x12\x10\n\x0c\x44oesNotExist\x10\x01\x12\x08\n\x04\x46\x61il\x10\x02\x12\x10\n\x0c\x41\x63\x63\x65ssDenied\x10\x03\"w\n\x1f\x43onnectToPersistentSubscription\x12\x17\n\x0fsubscription_id\x18\x01 \x02(\t\x12\x17\n\x0f\x65vent_stream_id\x18\x02 \x02(\t\x12\"\n\x1a\x61llowed_in_flight_messages\x18\x03 \x02(\x05\"W\n\x1fPersistentSubscriptionAckEvents\x12\x17\n\x0fsubscription_id\x18\x01 \x02(\t\x12\x1b\n\x13processed_event_ids\x18\x02 \x03(\x0c\"\x8b\x02\n\x1fPersistentSubscriptionNakEvents\x12\x17\n\x0fsubscription_id\x18\x01 \x02(\t\x12\x1b\n\x13processed_event_ids\x18\x02 \x03(\x0c\x12\x0f\n\x07message\x18\x03 \x01(\t\x12^\n\x06\x61\x63tion\x18\x04 \x02(\x0e\x32\x45.EventStore.Client.Messages.PersistentSubscriptionNakEvents.NakAction:\x07Unknown\"A\n\tNakAction\x12\x0b\n\x07Unknown\x10\x00\x12\x08\n\x04Park\x10\x01\x12\t\n\x05Retry\x10\x02\x12\x08\n\x04Skip\x10\x03\x12\x08\n\x04Stop\x10\x04\"v\n\"PersistentSubscriptionConfirmation\x12\x1c\n\x14last_commit_position\x18\x01 \x02(\x03\x12\x17\n\x0fsubscription_id\x18\x02 \x02(\t\x12\x19\n\x11last_event_number\x18\x03 \x01(\x03\"\x80\x01\n)PersistentSubscriptionStreamEventAppeared\x12?\n\x05\x65vent\x18\x01 \x02(\x0b\x32\x30.EventStore.Client.Messages.ResolvedIndexedEvent\x12\x12\n\nretryCount\x18\x02 \x01(\x05\"F\n\x11SubscribeToStream\x12\x17\n\x0f\x65vent_stream_id\x18\x01 \x02(\t\x12\x18\n\x10resolve_link_tos\x18\x02 \x02(\x08\"\x9f\x01\n\x19\x46ilteredSubscribeToStream\x12\x17\n\x0f\x65vent_stream_id\x18\x01 \x02(\t\x12\x18\n\x10resolve_link_tos\x18\x02 \x02(\x08\x12\x32\n\x06\x66ilter\x18\x03 \x02(\x0b\x32\".EventStore.Client.Messages.Filter\x12\x1b\n\x13\x63heckpoint_interval\x18\x04 \x02(\x05\"F\n\x11\x43heckpointReached\x12\x17\n\x0f\x63ommit_position\x18\x01 \x02(\x03\x12\x18\n\x10prepare_position\x18\x02 \x02(\x03\"S\n\x18SubscriptionConfirmation\x12\x1c\n\x14last_commit_position\x18\x01 \x02(\x03\x12\x19\n\x11last_event_number\x18\x02 \x01(\x03\"O\n\x13StreamEventAppeared\x12\x38\n\x05\x65vent\x18\x01 \x02(\x0b\x32).EventStore.Client.Messages.ResolvedEvent\"\x17\n\x15UnsubscribeFromStream\"\x8a\x02\n\x13SubscriptionDropped\x12\x64\n\x06reason\x18\x01 \x01(\x0e\x32\x46.EventStore.Client.Messages.SubscriptionDropped.SubscriptionDropReason:\x0cUnsubscribed\"\x8c\x01\n\x16SubscriptionDropReason\x12\x10\n\x0cUnsubscribed\x10\x00\x12\x10\n\x0c\x41\x63\x63\x65ssDenied\x10\x01\x12\x0c\n\x08NotFound\x10\x02\x12!\n\x1dPersistentSubscriptionDeleted\x10\x03\x12\x1d\n\x19SubscriberMaxCountReached\x10\x04\"\xf4\x02\n\nNotHandled\x12G\n\x06reason\x18\x01 \x02(\x0e\x32\x37.EventStore.Client.Messages.NotHandled.NotHandledReason\x12\x17\n\x0f\x61\x64\x64itional_info\x18\x02 \x01(\x0c\x1a\xb5\x01\n\nLeaderInfo\x12\x1c\n\x14\x65xternal_tcp_address\x18\x01 \x02(\t\x12\x19\n\x11\x65xternal_tcp_port\x18\x02 \x02(\x05\x12\x14\n\x0chttp_address\x18\x03 \x02(\t\x12\x11\n\thttp_port\x18\x04 \x02(\x05\x12#\n\x1b\x65xternal_secure_tcp_address\x18\x05 \x01(\t\x12 \n\x18\x65xternal_secure_tcp_port\x18\x06 \x01(\x05\"L\n\x10NotHandledReason\x12\x0c\n\x08NotReady\x10\x00\x12\x0b\n\x07TooBusy\x10\x01\x12\r\n\tNotLeader\x10\x02\x12\x0e\n\nIsReadOnly\x10\x03\"\x12\n\x10ScavengeDatabase\"\xc4\x01\n\x18ScavengeDatabaseResponse\x12S\n\x06result\x18\x01 \x02(\x0e\x32\x43.EventStore.Client.Messages.ScavengeDatabaseResponse.ScavengeResult\x12\x12\n\nscavengeId\x18\x02 \x01(\t\"?\n\x0eScavengeResult\x12\x0b\n\x07Started\x10\x00\x12\x0e\n\nInProgress\x10\x01\x12\x10\n\x0cUnauthorized\x10\x02\":\n\x0eIdentifyClient\x12\x0f\n\x07version\x18\x01 \x02(\x05\x12\x17\n\x0f\x63onnection_name\x18\x02 \x01(\t\"\x12\n\x10\x43lientIdentified*\xb0\x01\n\x0fOperationResult\x12\x0b\n\x07Success\x10\x00\x12\x12\n\x0ePrepareTimeout\x10\x01\x12\x11\n\rCommitTimeout\x10\x02\x12\x12\n\x0e\x46orwardTimeout\x10\x03\x12\x18\n\x14WrongExpectedVersion\x10\x04\x12\x11\n\rStreamDeleted\x10\x05\x12\x16\n\x12InvalidTransaction\x10\x06\x12\x10\n\x0c\x41\x63\x63\x65ssDenied\x10\x07' ) _OPERATIONRESULT = _descriptor.EnumDescriptor( name='OperationResult', full_name='EventStore.Client.Messages.OperationResult', filename=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key, values=[ _descriptor.EnumValueDescriptor( name='Success', index=0, number=0, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='PrepareTimeout', index=1, number=1, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='CommitTimeout', index=2, number=2, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='ForwardTimeout', index=3, number=3, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='WrongExpectedVersion', index=4, number=4, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='StreamDeleted', index=5, number=5, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='InvalidTransaction', index=6, number=6, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='AccessDenied', index=7, number=7, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), ], containing_type=None, serialized_options=None, serialized_start=8630, serialized_end=8806, ) _sym_db.RegisterEnumDescriptor(_OPERATIONRESULT) OperationResult = enum_type_wrapper.EnumTypeWrapper(_OPERATIONRESULT) Success = 0 PrepareTimeout = 1 CommitTimeout = 2 ForwardTimeout = 3 WrongExpectedVersion = 4 StreamDeleted = 5 InvalidTransaction = 6 AccessDenied = 7 _READEVENTCOMPLETED_READEVENTRESULT = _descriptor.EnumDescriptor( name='ReadEventResult', full_name='EventStore.Client.Messages.ReadEventCompleted.ReadEventResult', filename=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key, values=[ _descriptor.EnumValueDescriptor( name='Success', index=0, number=0, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='NotFound', index=1, number=1, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='NoStream', index=2, number=2, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='StreamDeleted', index=3, number=3, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='Error', index=4, number=4, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='AccessDenied', index=5, number=5, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), ], containing_type=None, serialized_options=None, serialized_start=2471, serialized_end=2577, ) _sym_db.RegisterEnumDescriptor(_READEVENTCOMPLETED_READEVENTRESULT) _READSTREAMEVENTSCOMPLETED_READSTREAMRESULT = _descriptor.EnumDescriptor( name='ReadStreamResult', full_name='EventStore.Client.Messages.ReadStreamEventsCompleted.ReadStreamResult', filename=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key, values=[ _descriptor.EnumValueDescriptor( name='Success', index=0, number=0, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='NoStream', index=1, number=1, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='StreamDeleted', index=2, number=2, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='NotModified', index=3, number=3, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='Error', index=4, number=4, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='AccessDenied', index=5, number=5, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), ], containing_type=None, serialized_options=None, serialized_start=3030, serialized_end=3140, ) _sym_db.RegisterEnumDescriptor(_READSTREAMEVENTSCOMPLETED_READSTREAMRESULT) _READALLEVENTSCOMPLETED_READALLRESULT = _descriptor.EnumDescriptor( name='ReadAllResult', full_name='EventStore.Client.Messages.ReadAllEventsCompleted.ReadAllResult', filename=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key, values=[ _descriptor.EnumValueDescriptor( name='Success', index=0, number=0, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='NotModified', index=1, number=1, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='Error', index=2, number=2, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='AccessDenied', index=3, number=3, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), ], containing_type=None, serialized_options=None, serialized_start=3584, serialized_end=3658, ) _sym_db.RegisterEnumDescriptor(_READALLEVENTSCOMPLETED_READALLRESULT) _FILTER_FILTERCONTEXT = _descriptor.EnumDescriptor( name='FilterContext', full_name='EventStore.Client.Messages.Filter.FilterContext', filename=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key, values=[ _descriptor.EnumValueDescriptor( name='StreamId', index=0, number=0, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='EventType', index=1, number=1, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), ], containing_type=None, serialized_options=None, serialized_start=3813, serialized_end=3857, ) _sym_db.RegisterEnumDescriptor(_FILTER_FILTERCONTEXT) _FILTER_FILTERTYPE = _descriptor.EnumDescriptor( name='FilterType', full_name='EventStore.Client.Messages.Filter.FilterType', filename=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key, values=[ _descriptor.EnumValueDescriptor( name='Regex', index=0, number=0, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='Prefix', index=1, number=1, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), ], containing_type=None, serialized_options=None, serialized_start=3859, serialized_end=3894, ) _sym_db.RegisterEnumDescriptor(_FILTER_FILTERTYPE) _FILTEREDREADALLEVENTSCOMPLETED_FILTEREDREADALLRESULT = _descriptor.EnumDescriptor( name='FilteredReadAllResult', full_name='EventStore.Client.Messages.FilteredReadAllEventsCompleted.FilteredReadAllResult', filename=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key, values=[ _descriptor.EnumValueDescriptor( name='Success', index=0, number=0, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='NotModified', index=1, number=1, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='Error', index=2, number=2, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='AccessDenied', index=3, number=3, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), ], containing_type=None, serialized_options=None, serialized_start=4475, serialized_end=4557, ) _sym_db.RegisterEnumDescriptor(_FILTEREDREADALLEVENTSCOMPLETED_FILTEREDREADALLRESULT) _UPDATEPERSISTENTSUBSCRIPTIONCOMPLETED_UPDATEPERSISTENTSUBSCRIPTIONRESULT = _descriptor.EnumDescriptor( name='UpdatePersistentSubscriptionResult', full_name='EventStore.Client.Messages.UpdatePersistentSubscriptionCompleted.UpdatePersistentSubscriptionResult', filename=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key, values=[ _descriptor.EnumValueDescriptor( name='Success', index=0, number=0, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='DoesNotExist', index=1, number=1, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='Fail', index=2, number=2, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='AccessDenied', index=3, number=3, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), ], containing_type=None, serialized_options=None, serialized_start=5796, serialized_end=5891, ) _sym_db.RegisterEnumDescriptor(_UPDATEPERSISTENTSUBSCRIPTIONCOMPLETED_UPDATEPERSISTENTSUBSCRIPTIONRESULT) _CREATEPERSISTENTSUBSCRIPTIONCOMPLETED_CREATEPERSISTENTSUBSCRIPTIONRESULT = _descriptor.EnumDescriptor( name='CreatePersistentSubscriptionResult', full_name='EventStore.Client.Messages.CreatePersistentSubscriptionCompleted.CreatePersistentSubscriptionResult', filename=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key, values=[ _descriptor.EnumValueDescriptor( name='Success', index=0, number=0, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='AlreadyExists', index=1, number=1, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='Fail', index=2, number=2, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='AccessDenied', index=3, number=3, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), ], containing_type=None, serialized_options=None, serialized_start=6078, serialized_end=6174, ) _sym_db.RegisterEnumDescriptor(_CREATEPERSISTENTSUBSCRIPTIONCOMPLETED_CREATEPERSISTENTSUBSCRIPTIONRESULT) _DELETEPERSISTENTSUBSCRIPTIONCOMPLETED_DELETEPERSISTENTSUBSCRIPTIONRESULT = _descriptor.EnumDescriptor( name='DeletePersistentSubscriptionResult', full_name='EventStore.Client.Messages.DeletePersistentSubscriptionCompleted.DeletePersistentSubscriptionResult', filename=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key, values=[ _descriptor.EnumValueDescriptor( name='Success', index=0, number=0, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='DoesNotExist', index=1, number=1, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='Fail', index=2, number=2, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='AccessDenied', index=3, number=3, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), ], containing_type=None, serialized_options=None, serialized_start=6361, serialized_end=6456, ) _sym_db.RegisterEnumDescriptor(_DELETEPERSISTENTSUBSCRIPTIONCOMPLETED_DELETEPERSISTENTSUBSCRIPTIONRESULT) _PERSISTENTSUBSCRIPTIONNAKEVENTS_NAKACTION = _descriptor.EnumDescriptor( name='NakAction', full_name='EventStore.Client.Messages.PersistentSubscriptionNakEvents.NakAction', filename=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key, values=[ _descriptor.EnumValueDescriptor( name='Unknown', index=0, number=0, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='Park', index=1, number=1, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='Retry', index=2, number=2, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='Skip', index=3, number=3, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='Stop', index=4, number=4, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), ], containing_type=None, serialized_options=None, serialized_start=6871, serialized_end=6936, ) _sym_db.RegisterEnumDescriptor(_PERSISTENTSUBSCRIPTIONNAKEVENTS_NAKACTION) _SUBSCRIPTIONDROPPED_SUBSCRIPTIONDROPREASON = _descriptor.EnumDescriptor( name='SubscriptionDropReason', full_name='EventStore.Client.Messages.SubscriptionDropped.SubscriptionDropReason', filename=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key, values=[ _descriptor.EnumValueDescriptor( name='Unsubscribed', index=0, number=0, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='AccessDenied', index=1, number=1, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='NotFound', index=2, number=2, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='PersistentSubscriptionDeleted', index=3, number=3, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='SubscriberMaxCountReached', index=4, number=4, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), ], containing_type=None, serialized_options=None, serialized_start=7813, serialized_end=7953, ) _sym_db.RegisterEnumDescriptor(_SUBSCRIPTIONDROPPED_SUBSCRIPTIONDROPREASON) _NOTHANDLED_NOTHANDLEDREASON = _descriptor.EnumDescriptor( name='NotHandledReason', full_name='EventStore.Client.Messages.NotHandled.NotHandledReason', filename=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key, values=[ _descriptor.EnumValueDescriptor( name='NotReady', index=0, number=0, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='TooBusy', index=1, number=1, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='NotLeader', index=2, number=2, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='IsReadOnly', index=3, number=3, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), ], containing_type=None, serialized_options=None, serialized_start=8252, serialized_end=8328, ) _sym_db.RegisterEnumDescriptor(_NOTHANDLED_NOTHANDLEDREASON) _SCAVENGEDATABASERESPONSE_SCAVENGERESULT = _descriptor.EnumDescriptor( name='ScavengeResult', full_name='EventStore.Client.Messages.ScavengeDatabaseResponse.ScavengeResult', filename=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key, values=[ _descriptor.EnumValueDescriptor( name='Started', index=0, number=0, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='InProgress', index=1, number=1, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='Unauthorized', index=2, number=2, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), ], containing_type=None, serialized_options=None, serialized_start=8484, serialized_end=8547, ) _sym_db.RegisterEnumDescriptor(_SCAVENGEDATABASERESPONSE_SCAVENGERESULT) _NEWEVENT = _descriptor.Descriptor( name='NewEvent', full_name='EventStore.Client.Messages.NewEvent', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='event_id', full_name='EventStore.Client.Messages.NewEvent.event_id', index=0, number=1, type=12, cpp_type=9, label=2, has_default_value=False, default_value=b"", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='event_type', full_name='EventStore.Client.Messages.NewEvent.event_type', index=1, number=2, type=9, cpp_type=9, label=2, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='data_content_type', full_name='EventStore.Client.Messages.NewEvent.data_content_type', index=2, number=3, type=5, cpp_type=1, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='metadata_content_type', full_name='EventStore.Client.Messages.NewEvent.metadata_content_type', index=3, number=4, type=5, cpp_type=1, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='data', full_name='EventStore.Client.Messages.NewEvent.data', index=4, number=5, type=12, cpp_type=9, label=2, has_default_value=False, default_value=b"", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='metadata', full_name='EventStore.Client.Messages.NewEvent.metadata', index=5, number=6, type=12, cpp_type=9, label=1, has_default_value=False, default_value=b"", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=56, serialized_end=194, ) _EVENTRECORD = _descriptor.Descriptor( name='EventRecord', full_name='EventStore.Client.Messages.EventRecord', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='event_stream_id', full_name='EventStore.Client.Messages.EventRecord.event_stream_id', index=0, number=1, type=9, cpp_type=9, label=2, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='event_number', full_name='EventStore.Client.Messages.EventRecord.event_number', index=1, number=2, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='event_id', full_name='EventStore.Client.Messages.EventRecord.event_id', index=2, number=3, type=12, cpp_type=9, label=2, has_default_value=False, default_value=b"", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='event_type', full_name='EventStore.Client.Messages.EventRecord.event_type', index=3, number=4, type=9, cpp_type=9, label=2, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='data_content_type', full_name='EventStore.Client.Messages.EventRecord.data_content_type', index=4, number=5, type=5, cpp_type=1, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='metadata_content_type', full_name='EventStore.Client.Messages.EventRecord.metadata_content_type', index=5, number=6, type=5, cpp_type=1, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='data', full_name='EventStore.Client.Messages.EventRecord.data', index=6, number=7, type=12, cpp_type=9, label=2, has_default_value=False, default_value=b"", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='metadata', full_name='EventStore.Client.Messages.EventRecord.metadata', index=7, number=8, type=12, cpp_type=9, label=1, has_default_value=False, default_value=b"", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='created', full_name='EventStore.Client.Messages.EventRecord.created', index=8, number=9, type=3, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='created_epoch', full_name='EventStore.Client.Messages.EventRecord.created_epoch', index=9, number=10, type=3, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=197, serialized_end=425, ) _RESOLVEDINDEXEDEVENT = _descriptor.Descriptor( name='ResolvedIndexedEvent', full_name='EventStore.Client.Messages.ResolvedIndexedEvent', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='event', full_name='EventStore.Client.Messages.ResolvedIndexedEvent.event', index=0, number=1, type=11, cpp_type=10, label=2, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='link', full_name='EventStore.Client.Messages.ResolvedIndexedEvent.link', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=428, serialized_end=561, ) _RESOLVEDEVENT = _descriptor.Descriptor( name='ResolvedEvent', full_name='EventStore.Client.Messages.ResolvedEvent', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='event', full_name='EventStore.Client.Messages.ResolvedEvent.event', index=0, number=1, type=11, cpp_type=10, label=2, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='link', full_name='EventStore.Client.Messages.ResolvedEvent.link', index=1, number=2, type=11, cpp_type=10, label=1, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='commit_position', full_name='EventStore.Client.Messages.ResolvedEvent.commit_position', index=2, number=3, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='prepare_position', full_name='EventStore.Client.Messages.ResolvedEvent.prepare_position', index=3, number=4, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=564, serialized_end=741, ) _WRITEEVENTS = _descriptor.Descriptor( name='WriteEvents', full_name='EventStore.Client.Messages.WriteEvents', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='event_stream_id', full_name='EventStore.Client.Messages.WriteEvents.event_stream_id', index=0, number=1, type=9, cpp_type=9, label=2, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='expected_version', full_name='EventStore.Client.Messages.WriteEvents.expected_version', index=1, number=2, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='events', full_name='EventStore.Client.Messages.WriteEvents.events', index=2, number=3, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='require_leader', full_name='EventStore.Client.Messages.WriteEvents.require_leader', index=3, number=4, type=8, cpp_type=7, label=2, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=744, serialized_end=886, ) _WRITEEVENTSCOMPLETED = _descriptor.Descriptor( name='WriteEventsCompleted', full_name='EventStore.Client.Messages.WriteEventsCompleted', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='result', full_name='EventStore.Client.Messages.WriteEventsCompleted.result', index=0, number=1, type=14, cpp_type=8, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='message', full_name='EventStore.Client.Messages.WriteEventsCompleted.message', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='first_event_number', full_name='EventStore.Client.Messages.WriteEventsCompleted.first_event_number', index=2, number=3, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='last_event_number', full_name='EventStore.Client.Messages.WriteEventsCompleted.last_event_number', index=3, number=4, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='prepare_position', full_name='EventStore.Client.Messages.WriteEventsCompleted.prepare_position', index=4, number=5, type=3, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='commit_position', full_name='EventStore.Client.Messages.WriteEventsCompleted.commit_position', index=5, number=6, type=3, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='current_version', full_name='EventStore.Client.Messages.WriteEventsCompleted.current_version', index=6, number=7, type=3, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=889, serialized_end=1120, ) _DELETESTREAM = _descriptor.Descriptor( name='DeleteStream', full_name='EventStore.Client.Messages.DeleteStream', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='event_stream_id', full_name='EventStore.Client.Messages.DeleteStream.event_stream_id', index=0, number=1, type=9, cpp_type=9, label=2, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='expected_version', full_name='EventStore.Client.Messages.DeleteStream.expected_version', index=1, number=2, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='require_leader', full_name='EventStore.Client.Messages.DeleteStream.require_leader', index=2, number=3, type=8, cpp_type=7, label=2, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='hard_delete', full_name='EventStore.Client.Messages.DeleteStream.hard_delete', index=3, number=4, type=8, cpp_type=7, label=1, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=1122, serialized_end=1232, ) _DELETESTREAMCOMPLETED = _descriptor.Descriptor( name='DeleteStreamCompleted', full_name='EventStore.Client.Messages.DeleteStreamCompleted', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='result', full_name='EventStore.Client.Messages.DeleteStreamCompleted.result', index=0, number=1, type=14, cpp_type=8, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='message', full_name='EventStore.Client.Messages.DeleteStreamCompleted.message', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='prepare_position', full_name='EventStore.Client.Messages.DeleteStreamCompleted.prepare_position', index=2, number=3, type=3, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='commit_position', full_name='EventStore.Client.Messages.DeleteStreamCompleted.commit_position', index=3, number=4, type=3, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=1235, serialized_end=1387, ) _TRANSACTIONSTART = _descriptor.Descriptor( name='TransactionStart', full_name='EventStore.Client.Messages.TransactionStart', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='event_stream_id', full_name='EventStore.Client.Messages.TransactionStart.event_stream_id', index=0, number=1, type=9, cpp_type=9, label=2, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='expected_version', full_name='EventStore.Client.Messages.TransactionStart.expected_version', index=1, number=2, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='require_leader', full_name='EventStore.Client.Messages.TransactionStart.require_leader', index=2, number=3, type=8, cpp_type=7, label=2, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=1389, serialized_end=1482, ) _TRANSACTIONSTARTCOMPLETED = _descriptor.Descriptor( name='TransactionStartCompleted', full_name='EventStore.Client.Messages.TransactionStartCompleted', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='transaction_id', full_name='EventStore.Client.Messages.TransactionStartCompleted.transaction_id', index=0, number=1, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='result', full_name='EventStore.Client.Messages.TransactionStartCompleted.result', index=1, number=2, type=14, cpp_type=8, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='message', full_name='EventStore.Client.Messages.TransactionStartCompleted.message', index=2, number=3, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=1485, serialized_end=1614, ) _TRANSACTIONWRITE = _descriptor.Descriptor( name='TransactionWrite', full_name='EventStore.Client.Messages.TransactionWrite', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='transaction_id', full_name='EventStore.Client.Messages.TransactionWrite.transaction_id', index=0, number=1, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='events', full_name='EventStore.Client.Messages.TransactionWrite.events', index=1, number=2, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='require_leader', full_name='EventStore.Client.Messages.TransactionWrite.require_leader', index=2, number=3, type=8, cpp_type=7, label=2, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=1616, serialized_end=1736, ) _TRANSACTIONWRITECOMPLETED = _descriptor.Descriptor( name='TransactionWriteCompleted', full_name='EventStore.Client.Messages.TransactionWriteCompleted', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='transaction_id', full_name='EventStore.Client.Messages.TransactionWriteCompleted.transaction_id', index=0, number=1, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='result', full_name='EventStore.Client.Messages.TransactionWriteCompleted.result', index=1, number=2, type=14, cpp_type=8, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='message', full_name='EventStore.Client.Messages.TransactionWriteCompleted.message', index=2, number=3, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=1739, serialized_end=1868, ) _TRANSACTIONCOMMIT = _descriptor.Descriptor( name='TransactionCommit', full_name='EventStore.Client.Messages.TransactionCommit', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='transaction_id', full_name='EventStore.Client.Messages.TransactionCommit.transaction_id', index=0, number=1, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='require_leader', full_name='EventStore.Client.Messages.TransactionCommit.require_leader', index=1, number=2, type=8, cpp_type=7, label=2, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=1870, serialized_end=1937, ) _TRANSACTIONCOMMITCOMPLETED = _descriptor.Descriptor( name='TransactionCommitCompleted', full_name='EventStore.Client.Messages.TransactionCommitCompleted', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='transaction_id', full_name='EventStore.Client.Messages.TransactionCommitCompleted.transaction_id', index=0, number=1, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='result', full_name='EventStore.Client.Messages.TransactionCommitCompleted.result', index=1, number=2, type=14, cpp_type=8, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='message', full_name='EventStore.Client.Messages.TransactionCommitCompleted.message', index=2, number=3, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='first_event_number', full_name='EventStore.Client.Messages.TransactionCommitCompleted.first_event_number', index=3, number=4, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='last_event_number', full_name='EventStore.Client.Messages.TransactionCommitCompleted.last_event_number', index=4, number=5, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='prepare_position', full_name='EventStore.Client.Messages.TransactionCommitCompleted.prepare_position', index=5, number=6, type=3, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='commit_position', full_name='EventStore.Client.Messages.TransactionCommitCompleted.commit_position', index=6, number=7, type=3, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=1940, serialized_end=2176, ) _READEVENT = _descriptor.Descriptor( name='ReadEvent', full_name='EventStore.Client.Messages.ReadEvent', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='event_stream_id', full_name='EventStore.Client.Messages.ReadEvent.event_stream_id', index=0, number=1, type=9, cpp_type=9, label=2, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='event_number', full_name='EventStore.Client.Messages.ReadEvent.event_number', index=1, number=2, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='resolve_link_tos', full_name='EventStore.Client.Messages.ReadEvent.resolve_link_tos', index=2, number=3, type=8, cpp_type=7, label=2, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='require_leader', full_name='EventStore.Client.Messages.ReadEvent.require_leader', index=3, number=4, type=8, cpp_type=7, label=2, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=2178, serialized_end=2286, ) _READEVENTCOMPLETED = _descriptor.Descriptor( name='ReadEventCompleted', full_name='EventStore.Client.Messages.ReadEventCompleted', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='result', full_name='EventStore.Client.Messages.ReadEventCompleted.result', index=0, number=1, type=14, cpp_type=8, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='event', full_name='EventStore.Client.Messages.ReadEventCompleted.event', index=1, number=2, type=11, cpp_type=10, label=2, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='error', full_name='EventStore.Client.Messages.ReadEventCompleted.error', index=2, number=3, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ _READEVENTCOMPLETED_READEVENTRESULT, ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=2289, serialized_end=2577, ) _READSTREAMEVENTS = _descriptor.Descriptor( name='ReadStreamEvents', full_name='EventStore.Client.Messages.ReadStreamEvents', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='event_stream_id', full_name='EventStore.Client.Messages.ReadStreamEvents.event_stream_id', index=0, number=1, type=9, cpp_type=9, label=2, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='from_event_number', full_name='EventStore.Client.Messages.ReadStreamEvents.from_event_number', index=1, number=2, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='max_count', full_name='EventStore.Client.Messages.ReadStreamEvents.max_count', index=2, number=3, type=5, cpp_type=1, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='resolve_link_tos', full_name='EventStore.Client.Messages.ReadStreamEvents.resolve_link_tos', index=3, number=4, type=8, cpp_type=7, label=2, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='require_leader', full_name='EventStore.Client.Messages.ReadStreamEvents.require_leader', index=4, number=5, type=8, cpp_type=7, label=2, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=2580, serialized_end=2719, ) _READSTREAMEVENTSCOMPLETED = _descriptor.Descriptor( name='ReadStreamEventsCompleted', full_name='EventStore.Client.Messages.ReadStreamEventsCompleted', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='events', full_name='EventStore.Client.Messages.ReadStreamEventsCompleted.events', index=0, number=1, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='result', full_name='EventStore.Client.Messages.ReadStreamEventsCompleted.result', index=1, number=2, type=14, cpp_type=8, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='next_event_number', full_name='EventStore.Client.Messages.ReadStreamEventsCompleted.next_event_number', index=2, number=3, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='last_event_number', full_name='EventStore.Client.Messages.ReadStreamEventsCompleted.last_event_number', index=3, number=4, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='is_end_of_stream', full_name='EventStore.Client.Messages.ReadStreamEventsCompleted.is_end_of_stream', index=4, number=5, type=8, cpp_type=7, label=2, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='last_commit_position', full_name='EventStore.Client.Messages.ReadStreamEventsCompleted.last_commit_position', index=5, number=6, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='error', full_name='EventStore.Client.Messages.ReadStreamEventsCompleted.error', index=6, number=7, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ _READSTREAMEVENTSCOMPLETED_READSTREAMRESULT, ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=2722, serialized_end=3140, ) _READALLEVENTS = _descriptor.Descriptor( name='ReadAllEvents', full_name='EventStore.Client.Messages.ReadAllEvents', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='commit_position', full_name='EventStore.Client.Messages.ReadAllEvents.commit_position', index=0, number=1, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='prepare_position', full_name='EventStore.Client.Messages.ReadAllEvents.prepare_position', index=1, number=2, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='max_count', full_name='EventStore.Client.Messages.ReadAllEvents.max_count', index=2, number=3, type=5, cpp_type=1, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='resolve_link_tos', full_name='EventStore.Client.Messages.ReadAllEvents.resolve_link_tos', index=3, number=4, type=8, cpp_type=7, label=2, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='require_leader', full_name='EventStore.Client.Messages.ReadAllEvents.require_leader', index=4, number=5, type=8, cpp_type=7, label=2, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=3143, serialized_end=3278, ) _READALLEVENTSCOMPLETED = _descriptor.Descriptor( name='ReadAllEventsCompleted', full_name='EventStore.Client.Messages.ReadAllEventsCompleted', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='commit_position', full_name='EventStore.Client.Messages.ReadAllEventsCompleted.commit_position', index=0, number=1, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='prepare_position', full_name='EventStore.Client.Messages.ReadAllEventsCompleted.prepare_position', index=1, number=2, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='events', full_name='EventStore.Client.Messages.ReadAllEventsCompleted.events', index=2, number=3, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='next_commit_position', full_name='EventStore.Client.Messages.ReadAllEventsCompleted.next_commit_position', index=3, number=4, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='next_prepare_position', full_name='EventStore.Client.Messages.ReadAllEventsCompleted.next_prepare_position', index=4, number=5, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='result', full_name='EventStore.Client.Messages.ReadAllEventsCompleted.result', index=5, number=6, type=14, cpp_type=8, label=1, has_default_value=True, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='error', full_name='EventStore.Client.Messages.ReadAllEventsCompleted.error', index=6, number=7, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ _READALLEVENTSCOMPLETED_READALLRESULT, ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=3281, serialized_end=3658, ) _FILTER = _descriptor.Descriptor( name='Filter', full_name='EventStore.Client.Messages.Filter', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='context', full_name='EventStore.Client.Messages.Filter.context', index=0, number=1, type=14, cpp_type=8, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='type', full_name='EventStore.Client.Messages.Filter.type', index=1, number=2, type=14, cpp_type=8, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='data', full_name='EventStore.Client.Messages.Filter.data', index=2, number=3, type=9, cpp_type=9, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ _FILTER_FILTERCONTEXT, _FILTER_FILTERTYPE, ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=3661, serialized_end=3894, ) _FILTEREDREADALLEVENTS = _descriptor.Descriptor( name='FilteredReadAllEvents', full_name='EventStore.Client.Messages.FilteredReadAllEvents', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='commit_position', full_name='EventStore.Client.Messages.FilteredReadAllEvents.commit_position', index=0, number=1, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='prepare_position', full_name='EventStore.Client.Messages.FilteredReadAllEvents.prepare_position', index=1, number=2, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='max_count', full_name='EventStore.Client.Messages.FilteredReadAllEvents.max_count', index=2, number=3, type=5, cpp_type=1, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='max_search_window', full_name='EventStore.Client.Messages.FilteredReadAllEvents.max_search_window', index=3, number=4, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='resolve_link_tos', full_name='EventStore.Client.Messages.FilteredReadAllEvents.resolve_link_tos', index=4, number=5, type=8, cpp_type=7, label=2, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='require_leader', full_name='EventStore.Client.Messages.FilteredReadAllEvents.require_leader', index=5, number=6, type=8, cpp_type=7, label=2, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='filter', full_name='EventStore.Client.Messages.FilteredReadAllEvents.filter', index=6, number=7, type=11, cpp_type=10, label=2, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=3897, serialized_end=4119, ) _FILTEREDREADALLEVENTSCOMPLETED = _descriptor.Descriptor( name='FilteredReadAllEventsCompleted', full_name='EventStore.Client.Messages.FilteredReadAllEventsCompleted', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='commit_position', full_name='EventStore.Client.Messages.FilteredReadAllEventsCompleted.commit_position', index=0, number=1, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='prepare_position', full_name='EventStore.Client.Messages.FilteredReadAllEventsCompleted.prepare_position', index=1, number=2, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='events', full_name='EventStore.Client.Messages.FilteredReadAllEventsCompleted.events', index=2, number=3, type=11, cpp_type=10, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='next_commit_position', full_name='EventStore.Client.Messages.FilteredReadAllEventsCompleted.next_commit_position', index=3, number=4, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='next_prepare_position', full_name='EventStore.Client.Messages.FilteredReadAllEventsCompleted.next_prepare_position', index=4, number=5, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='is_end_of_stream', full_name='EventStore.Client.Messages.FilteredReadAllEventsCompleted.is_end_of_stream', index=5, number=6, type=8, cpp_type=7, label=2, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='result', full_name='EventStore.Client.Messages.FilteredReadAllEventsCompleted.result', index=6, number=7, type=14, cpp_type=8, label=1, has_default_value=True, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='error', full_name='EventStore.Client.Messages.FilteredReadAllEventsCompleted.error', index=7, number=8, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ _FILTEREDREADALLEVENTSCOMPLETED_FILTEREDREADALLRESULT, ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=4122, serialized_end=4557, ) _CREATEPERSISTENTSUBSCRIPTION = _descriptor.Descriptor( name='CreatePersistentSubscription', full_name='EventStore.Client.Messages.CreatePersistentSubscription', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='subscription_group_name', full_name='EventStore.Client.Messages.CreatePersistentSubscription.subscription_group_name', index=0, number=1, type=9, cpp_type=9, label=2, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='event_stream_id', full_name='EventStore.Client.Messages.CreatePersistentSubscription.event_stream_id', index=1, number=2, type=9, cpp_type=9, label=2, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='resolve_link_tos', full_name='EventStore.Client.Messages.CreatePersistentSubscription.resolve_link_tos', index=2, number=3, type=8, cpp_type=7, label=2, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='start_from', full_name='EventStore.Client.Messages.CreatePersistentSubscription.start_from', index=3, number=4, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='message_timeout_milliseconds', full_name='EventStore.Client.Messages.CreatePersistentSubscription.message_timeout_milliseconds', index=4, number=5, type=5, cpp_type=1, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='record_statistics', full_name='EventStore.Client.Messages.CreatePersistentSubscription.record_statistics', index=5, number=6, type=8, cpp_type=7, label=2, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='live_buffer_size', full_name='EventStore.Client.Messages.CreatePersistentSubscription.live_buffer_size', index=6, number=7, type=5, cpp_type=1, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='read_batch_size', full_name='EventStore.Client.Messages.CreatePersistentSubscription.read_batch_size', index=7, number=8, type=5, cpp_type=1, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='buffer_size', full_name='EventStore.Client.Messages.CreatePersistentSubscription.buffer_size', index=8, number=9, type=5, cpp_type=1, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='max_retry_count', full_name='EventStore.Client.Messages.CreatePersistentSubscription.max_retry_count', index=9, number=10, type=5, cpp_type=1, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='prefer_round_robin', full_name='EventStore.Client.Messages.CreatePersistentSubscription.prefer_round_robin', index=10, number=11, type=8, cpp_type=7, label=2, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='checkpoint_after_time', full_name='EventStore.Client.Messages.CreatePersistentSubscription.checkpoint_after_time', index=11, number=12, type=5, cpp_type=1, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='checkpoint_max_count', full_name='EventStore.Client.Messages.CreatePersistentSubscription.checkpoint_max_count', index=12, number=13, type=5, cpp_type=1, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='checkpoint_min_count', full_name='EventStore.Client.Messages.CreatePersistentSubscription.checkpoint_min_count', index=13, number=14, type=5, cpp_type=1, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='subscriber_max_count', full_name='EventStore.Client.Messages.CreatePersistentSubscription.subscriber_max_count', index=14, number=15, type=5, cpp_type=1, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='named_consumer_strategy', full_name='EventStore.Client.Messages.CreatePersistentSubscription.named_consumer_strategy', index=15, number=16, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=4560, serialized_end=5038, ) _DELETEPERSISTENTSUBSCRIPTION = _descriptor.Descriptor( name='DeletePersistentSubscription', full_name='EventStore.Client.Messages.DeletePersistentSubscription', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='subscription_group_name', full_name='EventStore.Client.Messages.DeletePersistentSubscription.subscription_group_name', index=0, number=1, type=9, cpp_type=9, label=2, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='event_stream_id', full_name='EventStore.Client.Messages.DeletePersistentSubscription.event_stream_id', index=1, number=2, type=9, cpp_type=9, label=2, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=5040, serialized_end=5128, ) _UPDATEPERSISTENTSUBSCRIPTION = _descriptor.Descriptor( name='UpdatePersistentSubscription', full_name='EventStore.Client.Messages.UpdatePersistentSubscription', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='subscription_group_name', full_name='EventStore.Client.Messages.UpdatePersistentSubscription.subscription_group_name', index=0, number=1, type=9, cpp_type=9, label=2, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='event_stream_id', full_name='EventStore.Client.Messages.UpdatePersistentSubscription.event_stream_id', index=1, number=2, type=9, cpp_type=9, label=2, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='resolve_link_tos', full_name='EventStore.Client.Messages.UpdatePersistentSubscription.resolve_link_tos', index=2, number=3, type=8, cpp_type=7, label=2, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='start_from', full_name='EventStore.Client.Messages.UpdatePersistentSubscription.start_from', index=3, number=4, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='message_timeout_milliseconds', full_name='EventStore.Client.Messages.UpdatePersistentSubscription.message_timeout_milliseconds', index=4, number=5, type=5, cpp_type=1, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='record_statistics', full_name='EventStore.Client.Messages.UpdatePersistentSubscription.record_statistics', index=5, number=6, type=8, cpp_type=7, label=2, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='live_buffer_size', full_name='EventStore.Client.Messages.UpdatePersistentSubscription.live_buffer_size', index=6, number=7, type=5, cpp_type=1, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='read_batch_size', full_name='EventStore.Client.Messages.UpdatePersistentSubscription.read_batch_size', index=7, number=8, type=5, cpp_type=1, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='buffer_size', full_name='EventStore.Client.Messages.UpdatePersistentSubscription.buffer_size', index=8, number=9, type=5, cpp_type=1, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='max_retry_count', full_name='EventStore.Client.Messages.UpdatePersistentSubscription.max_retry_count', index=9, number=10, type=5, cpp_type=1, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='prefer_round_robin', full_name='EventStore.Client.Messages.UpdatePersistentSubscription.prefer_round_robin', index=10, number=11, type=8, cpp_type=7, label=2, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='checkpoint_after_time', full_name='EventStore.Client.Messages.UpdatePersistentSubscription.checkpoint_after_time', index=11, number=12, type=5, cpp_type=1, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='checkpoint_max_count', full_name='EventStore.Client.Messages.UpdatePersistentSubscription.checkpoint_max_count', index=12, number=13, type=5, cpp_type=1, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='checkpoint_min_count', full_name='EventStore.Client.Messages.UpdatePersistentSubscription.checkpoint_min_count', index=13, number=14, type=5, cpp_type=1, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='subscriber_max_count', full_name='EventStore.Client.Messages.UpdatePersistentSubscription.subscriber_max_count', index=14, number=15, type=5, cpp_type=1, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='named_consumer_strategy', full_name='EventStore.Client.Messages.UpdatePersistentSubscription.named_consumer_strategy', index=15, number=16, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=5131, serialized_end=5609, ) _UPDATEPERSISTENTSUBSCRIPTIONCOMPLETED = _descriptor.Descriptor( name='UpdatePersistentSubscriptionCompleted', full_name='EventStore.Client.Messages.UpdatePersistentSubscriptionCompleted', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='result', full_name='EventStore.Client.Messages.UpdatePersistentSubscriptionCompleted.result', index=0, number=1, type=14, cpp_type=8, label=2, has_default_value=True, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='reason', full_name='EventStore.Client.Messages.UpdatePersistentSubscriptionCompleted.reason', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ _UPDATEPERSISTENTSUBSCRIPTIONCOMPLETED_UPDATEPERSISTENTSUBSCRIPTIONRESULT, ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=5612, serialized_end=5891, ) _CREATEPERSISTENTSUBSCRIPTIONCOMPLETED = _descriptor.Descriptor( name='CreatePersistentSubscriptionCompleted', full_name='EventStore.Client.Messages.CreatePersistentSubscriptionCompleted', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='result', full_name='EventStore.Client.Messages.CreatePersistentSubscriptionCompleted.result', index=0, number=1, type=14, cpp_type=8, label=2, has_default_value=True, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='reason', full_name='EventStore.Client.Messages.CreatePersistentSubscriptionCompleted.reason', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ _CREATEPERSISTENTSUBSCRIPTIONCOMPLETED_CREATEPERSISTENTSUBSCRIPTIONRESULT, ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=5894, serialized_end=6174, ) _DELETEPERSISTENTSUBSCRIPTIONCOMPLETED = _descriptor.Descriptor( name='DeletePersistentSubscriptionCompleted', full_name='EventStore.Client.Messages.DeletePersistentSubscriptionCompleted', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='result', full_name='EventStore.Client.Messages.DeletePersistentSubscriptionCompleted.result', index=0, number=1, type=14, cpp_type=8, label=2, has_default_value=True, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='reason', full_name='EventStore.Client.Messages.DeletePersistentSubscriptionCompleted.reason', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ _DELETEPERSISTENTSUBSCRIPTIONCOMPLETED_DELETEPERSISTENTSUBSCRIPTIONRESULT, ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=6177, serialized_end=6456, ) _CONNECTTOPERSISTENTSUBSCRIPTION = _descriptor.Descriptor( name='ConnectToPersistentSubscription', full_name='EventStore.Client.Messages.ConnectToPersistentSubscription', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='subscription_id', full_name='EventStore.Client.Messages.ConnectToPersistentSubscription.subscription_id', index=0, number=1, type=9, cpp_type=9, label=2, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='event_stream_id', full_name='EventStore.Client.Messages.ConnectToPersistentSubscription.event_stream_id', index=1, number=2, type=9, cpp_type=9, label=2, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='allowed_in_flight_messages', full_name='EventStore.Client.Messages.ConnectToPersistentSubscription.allowed_in_flight_messages', index=2, number=3, type=5, cpp_type=1, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=6458, serialized_end=6577, ) _PERSISTENTSUBSCRIPTIONACKEVENTS = _descriptor.Descriptor( name='PersistentSubscriptionAckEvents', full_name='EventStore.Client.Messages.PersistentSubscriptionAckEvents', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='subscription_id', full_name='EventStore.Client.Messages.PersistentSubscriptionAckEvents.subscription_id', index=0, number=1, type=9, cpp_type=9, label=2, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='processed_event_ids', full_name='EventStore.Client.Messages.PersistentSubscriptionAckEvents.processed_event_ids', index=1, number=2, type=12, cpp_type=9, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=6579, serialized_end=6666, ) _PERSISTENTSUBSCRIPTIONNAKEVENTS = _descriptor.Descriptor( name='PersistentSubscriptionNakEvents', full_name='EventStore.Client.Messages.PersistentSubscriptionNakEvents', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='subscription_id', full_name='EventStore.Client.Messages.PersistentSubscriptionNakEvents.subscription_id', index=0, number=1, type=9, cpp_type=9, label=2, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='processed_event_ids', full_name='EventStore.Client.Messages.PersistentSubscriptionNakEvents.processed_event_ids', index=1, number=2, type=12, cpp_type=9, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='message', full_name='EventStore.Client.Messages.PersistentSubscriptionNakEvents.message', index=2, number=3, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='action', full_name='EventStore.Client.Messages.PersistentSubscriptionNakEvents.action', index=3, number=4, type=14, cpp_type=8, label=2, has_default_value=True, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ _PERSISTENTSUBSCRIPTIONNAKEVENTS_NAKACTION, ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=6669, serialized_end=6936, ) _PERSISTENTSUBSCRIPTIONCONFIRMATION = _descriptor.Descriptor( name='PersistentSubscriptionConfirmation', full_name='EventStore.Client.Messages.PersistentSubscriptionConfirmation', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='last_commit_position', full_name='EventStore.Client.Messages.PersistentSubscriptionConfirmation.last_commit_position', index=0, number=1, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='subscription_id', full_name='EventStore.Client.Messages.PersistentSubscriptionConfirmation.subscription_id', index=1, number=2, type=9, cpp_type=9, label=2, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='last_event_number', full_name='EventStore.Client.Messages.PersistentSubscriptionConfirmation.last_event_number', index=2, number=3, type=3, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=6938, serialized_end=7056, ) _PERSISTENTSUBSCRIPTIONSTREAMEVENTAPPEARED = _descriptor.Descriptor( name='PersistentSubscriptionStreamEventAppeared', full_name='EventStore.Client.Messages.PersistentSubscriptionStreamEventAppeared', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='event', full_name='EventStore.Client.Messages.PersistentSubscriptionStreamEventAppeared.event', index=0, number=1, type=11, cpp_type=10, label=2, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='retryCount', full_name='EventStore.Client.Messages.PersistentSubscriptionStreamEventAppeared.retryCount', index=1, number=2, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=7059, serialized_end=7187, ) _SUBSCRIBETOSTREAM = _descriptor.Descriptor( name='SubscribeToStream', full_name='EventStore.Client.Messages.SubscribeToStream', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='event_stream_id', full_name='EventStore.Client.Messages.SubscribeToStream.event_stream_id', index=0, number=1, type=9, cpp_type=9, label=2, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='resolve_link_tos', full_name='EventStore.Client.Messages.SubscribeToStream.resolve_link_tos', index=1, number=2, type=8, cpp_type=7, label=2, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=7189, serialized_end=7259, ) _FILTEREDSUBSCRIBETOSTREAM = _descriptor.Descriptor( name='FilteredSubscribeToStream', full_name='EventStore.Client.Messages.FilteredSubscribeToStream', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='event_stream_id', full_name='EventStore.Client.Messages.FilteredSubscribeToStream.event_stream_id', index=0, number=1, type=9, cpp_type=9, label=2, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='resolve_link_tos', full_name='EventStore.Client.Messages.FilteredSubscribeToStream.resolve_link_tos', index=1, number=2, type=8, cpp_type=7, label=2, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='filter', full_name='EventStore.Client.Messages.FilteredSubscribeToStream.filter', index=2, number=3, type=11, cpp_type=10, label=2, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='checkpoint_interval', full_name='EventStore.Client.Messages.FilteredSubscribeToStream.checkpoint_interval', index=3, number=4, type=5, cpp_type=1, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=7262, serialized_end=7421, ) _CHECKPOINTREACHED = _descriptor.Descriptor( name='CheckpointReached', full_name='EventStore.Client.Messages.CheckpointReached', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='commit_position', full_name='EventStore.Client.Messages.CheckpointReached.commit_position', index=0, number=1, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='prepare_position', full_name='EventStore.Client.Messages.CheckpointReached.prepare_position', index=1, number=2, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=7423, serialized_end=7493, ) _SUBSCRIPTIONCONFIRMATION = _descriptor.Descriptor( name='SubscriptionConfirmation', full_name='EventStore.Client.Messages.SubscriptionConfirmation', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='last_commit_position', full_name='EventStore.Client.Messages.SubscriptionConfirmation.last_commit_position', index=0, number=1, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='last_event_number', full_name='EventStore.Client.Messages.SubscriptionConfirmation.last_event_number', index=1, number=2, type=3, cpp_type=2, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=7495, serialized_end=7578, ) _STREAMEVENTAPPEARED = _descriptor.Descriptor( name='StreamEventAppeared', full_name='EventStore.Client.Messages.StreamEventAppeared', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='event', full_name='EventStore.Client.Messages.StreamEventAppeared.event', index=0, number=1, type=11, cpp_type=10, label=2, has_default_value=False, default_value=None, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=7580, serialized_end=7659, ) _UNSUBSCRIBEFROMSTREAM = _descriptor.Descriptor( name='UnsubscribeFromStream', full_name='EventStore.Client.Messages.UnsubscribeFromStream', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=7661, serialized_end=7684, ) _SUBSCRIPTIONDROPPED = _descriptor.Descriptor( name='SubscriptionDropped', full_name='EventStore.Client.Messages.SubscriptionDropped', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='reason', full_name='EventStore.Client.Messages.SubscriptionDropped.reason', index=0, number=1, type=14, cpp_type=8, label=1, has_default_value=True, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ _SUBSCRIPTIONDROPPED_SUBSCRIPTIONDROPREASON, ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=7687, serialized_end=7953, ) _NOTHANDLED_LEADERINFO = _descriptor.Descriptor( name='LeaderInfo', full_name='EventStore.Client.Messages.NotHandled.LeaderInfo', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='external_tcp_address', full_name='EventStore.Client.Messages.NotHandled.LeaderInfo.external_tcp_address', index=0, number=1, type=9, cpp_type=9, label=2, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='external_tcp_port', full_name='EventStore.Client.Messages.NotHandled.LeaderInfo.external_tcp_port', index=1, number=2, type=5, cpp_type=1, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='http_address', full_name='EventStore.Client.Messages.NotHandled.LeaderInfo.http_address', index=2, number=3, type=9, cpp_type=9, label=2, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='http_port', full_name='EventStore.Client.Messages.NotHandled.LeaderInfo.http_port', index=3, number=4, type=5, cpp_type=1, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='external_secure_tcp_address', full_name='EventStore.Client.Messages.NotHandled.LeaderInfo.external_secure_tcp_address', index=4, number=5, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='external_secure_tcp_port', full_name='EventStore.Client.Messages.NotHandled.LeaderInfo.external_secure_tcp_port', index=5, number=6, type=5, cpp_type=1, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=8069, serialized_end=8250, ) _NOTHANDLED = _descriptor.Descriptor( name='NotHandled', full_name='EventStore.Client.Messages.NotHandled', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='reason', full_name='EventStore.Client.Messages.NotHandled.reason', index=0, number=1, type=14, cpp_type=8, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='additional_info', full_name='EventStore.Client.Messages.NotHandled.additional_info', index=1, number=2, type=12, cpp_type=9, label=1, has_default_value=False, default_value=b"", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[_NOTHANDLED_LEADERINFO, ], enum_types=[ _NOTHANDLED_NOTHANDLEDREASON, ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=7956, serialized_end=8328, ) _SCAVENGEDATABASE = _descriptor.Descriptor( name='ScavengeDatabase', full_name='EventStore.Client.Messages.ScavengeDatabase', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=8330, serialized_end=8348, ) _SCAVENGEDATABASERESPONSE = _descriptor.Descriptor( name='ScavengeDatabaseResponse', full_name='EventStore.Client.Messages.ScavengeDatabaseResponse', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='result', full_name='EventStore.Client.Messages.ScavengeDatabaseResponse.result', index=0, number=1, type=14, cpp_type=8, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='scavengeId', full_name='EventStore.Client.Messages.ScavengeDatabaseResponse.scavengeId', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ _SCAVENGEDATABASERESPONSE_SCAVENGERESULT, ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=8351, serialized_end=8547, ) _IDENTIFYCLIENT = _descriptor.Descriptor( name='IdentifyClient', full_name='EventStore.Client.Messages.IdentifyClient', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='version', full_name='EventStore.Client.Messages.IdentifyClient.version', index=0, number=1, type=5, cpp_type=1, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='connection_name', full_name='EventStore.Client.Messages.IdentifyClient.connection_name', index=1, number=2, type=9, cpp_type=9, label=1, has_default_value=False, default_value=b"".decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=8549, serialized_end=8607, ) _CLIENTIDENTIFIED = _descriptor.Descriptor( name='ClientIdentified', full_name='EventStore.Client.Messages.ClientIdentified', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=8609, serialized_end=8627, ) _RESOLVEDINDEXEDEVENT.fields_by_name['event'].message_type = _EVENTRECORD _RESOLVEDINDEXEDEVENT.fields_by_name['link'].message_type = _EVENTRECORD _RESOLVEDEVENT.fields_by_name['event'].message_type = _EVENTRECORD _RESOLVEDEVENT.fields_by_name['link'].message_type = _EVENTRECORD _WRITEEVENTS.fields_by_name['events'].message_type = _NEWEVENT _WRITEEVENTSCOMPLETED.fields_by_name['result'].enum_type = _OPERATIONRESULT _DELETESTREAMCOMPLETED.fields_by_name['result'].enum_type = _OPERATIONRESULT _TRANSACTIONSTARTCOMPLETED.fields_by_name['result'].enum_type = _OPERATIONRESULT _TRANSACTIONWRITE.fields_by_name['events'].message_type = _NEWEVENT _TRANSACTIONWRITECOMPLETED.fields_by_name['result'].enum_type = _OPERATIONRESULT _TRANSACTIONCOMMITCOMPLETED.fields_by_name['result'].enum_type = _OPERATIONRESULT _READEVENTCOMPLETED.fields_by_name['result'].enum_type = _READEVENTCOMPLETED_READEVENTRESULT _READEVENTCOMPLETED.fields_by_name['event'].message_type = _RESOLVEDINDEXEDEVENT _READEVENTCOMPLETED_READEVENTRESULT.containing_type = _READEVENTCOMPLETED _READSTREAMEVENTSCOMPLETED.fields_by_name['events'].message_type = _RESOLVEDINDEXEDEVENT _READSTREAMEVENTSCOMPLETED.fields_by_name['result'].enum_type = _READSTREAMEVENTSCOMPLETED_READSTREAMRESULT _READSTREAMEVENTSCOMPLETED_READSTREAMRESULT.containing_type = _READSTREAMEVENTSCOMPLETED _READALLEVENTSCOMPLETED.fields_by_name['events'].message_type = _RESOLVEDEVENT _READALLEVENTSCOMPLETED.fields_by_name['result'].enum_type = _READALLEVENTSCOMPLETED_READALLRESULT _READALLEVENTSCOMPLETED_READALLRESULT.containing_type = _READALLEVENTSCOMPLETED _FILTER.fields_by_name['context'].enum_type = _FILTER_FILTERCONTEXT _FILTER.fields_by_name['type'].enum_type = _FILTER_FILTERTYPE _FILTER_FILTERCONTEXT.containing_type = _FILTER _FILTER_FILTERTYPE.containing_type = _FILTER _FILTEREDREADALLEVENTS.fields_by_name['filter'].message_type = _FILTER _FILTEREDREADALLEVENTSCOMPLETED.fields_by_name['events'].message_type = _RESOLVEDEVENT _FILTEREDREADALLEVENTSCOMPLETED.fields_by_name['result'].enum_type = _FILTEREDREADALLEVENTSCOMPLETED_FILTEREDREADALLRESULT _FILTEREDREADALLEVENTSCOMPLETED_FILTEREDREADALLRESULT.containing_type = _FILTEREDREADALLEVENTSCOMPLETED _UPDATEPERSISTENTSUBSCRIPTIONCOMPLETED.fields_by_name['result'].enum_type = _UPDATEPERSISTENTSUBSCRIPTIONCOMPLETED_UPDATEPERSISTENTSUBSCRIPTIONRESULT _UPDATEPERSISTENTSUBSCRIPTIONCOMPLETED_UPDATEPERSISTENTSUBSCRIPTIONRESULT.containing_type = _UPDATEPERSISTENTSUBSCRIPTIONCOMPLETED _CREATEPERSISTENTSUBSCRIPTIONCOMPLETED.fields_by_name['result'].enum_type = _CREATEPERSISTENTSUBSCRIPTIONCOMPLETED_CREATEPERSISTENTSUBSCRIPTIONRESULT _CREATEPERSISTENTSUBSCRIPTIONCOMPLETED_CREATEPERSISTENTSUBSCRIPTIONRESULT.containing_type = _CREATEPERSISTENTSUBSCRIPTIONCOMPLETED _DELETEPERSISTENTSUBSCRIPTIONCOMPLETED.fields_by_name['result'].enum_type = _DELETEPERSISTENTSUBSCRIPTIONCOMPLETED_DELETEPERSISTENTSUBSCRIPTIONRESULT _DELETEPERSISTENTSUBSCRIPTIONCOMPLETED_DELETEPERSISTENTSUBSCRIPTIONRESULT.containing_type = _DELETEPERSISTENTSUBSCRIPTIONCOMPLETED _PERSISTENTSUBSCRIPTIONNAKEVENTS.fields_by_name['action'].enum_type = _PERSISTENTSUBSCRIPTIONNAKEVENTS_NAKACTION _PERSISTENTSUBSCRIPTIONNAKEVENTS_NAKACTION.containing_type = _PERSISTENTSUBSCRIPTIONNAKEVENTS _PERSISTENTSUBSCRIPTIONSTREAMEVENTAPPEARED.fields_by_name['event'].message_type = _RESOLVEDINDEXEDEVENT _FILTEREDSUBSCRIBETOSTREAM.fields_by_name['filter'].message_type = _FILTER _STREAMEVENTAPPEARED.fields_by_name['event'].message_type = _RESOLVEDEVENT _SUBSCRIPTIONDROPPED.fields_by_name['reason'].enum_type = _SUBSCRIPTIONDROPPED_SUBSCRIPTIONDROPREASON _SUBSCRIPTIONDROPPED_SUBSCRIPTIONDROPREASON.containing_type = _SUBSCRIPTIONDROPPED _NOTHANDLED_LEADERINFO.containing_type = _NOTHANDLED _NOTHANDLED.fields_by_name['reason'].enum_type = _NOTHANDLED_NOTHANDLEDREASON _NOTHANDLED_NOTHANDLEDREASON.containing_type = _NOTHANDLED _SCAVENGEDATABASERESPONSE.fields_by_name['result'].enum_type = _SCAVENGEDATABASERESPONSE_SCAVENGERESULT _SCAVENGEDATABASERESPONSE_SCAVENGERESULT.containing_type = _SCAVENGEDATABASERESPONSE DESCRIPTOR.message_types_by_name['NewEvent'] = _NEWEVENT DESCRIPTOR.message_types_by_name['EventRecord'] = _EVENTRECORD DESCRIPTOR.message_types_by_name['ResolvedIndexedEvent'] = _RESOLVEDINDEXEDEVENT DESCRIPTOR.message_types_by_name['ResolvedEvent'] = _RESOLVEDEVENT DESCRIPTOR.message_types_by_name['WriteEvents'] = _WRITEEVENTS DESCRIPTOR.message_types_by_name['WriteEventsCompleted'] = _WRITEEVENTSCOMPLETED DESCRIPTOR.message_types_by_name['DeleteStream'] = _DELETESTREAM DESCRIPTOR.message_types_by_name['DeleteStreamCompleted'] = _DELETESTREAMCOMPLETED DESCRIPTOR.message_types_by_name['TransactionStart'] = _TRANSACTIONSTART DESCRIPTOR.message_types_by_name['TransactionStartCompleted'] = _TRANSACTIONSTARTCOMPLETED DESCRIPTOR.message_types_by_name['TransactionWrite'] = _TRANSACTIONWRITE DESCRIPTOR.message_types_by_name['TransactionWriteCompleted'] = _TRANSACTIONWRITECOMPLETED DESCRIPTOR.message_types_by_name['TransactionCommit'] = _TRANSACTIONCOMMIT DESCRIPTOR.message_types_by_name['TransactionCommitCompleted'] = _TRANSACTIONCOMMITCOMPLETED DESCRIPTOR.message_types_by_name['ReadEvent'] = _READEVENT DESCRIPTOR.message_types_by_name['ReadEventCompleted'] = _READEVENTCOMPLETED DESCRIPTOR.message_types_by_name['ReadStreamEvents'] = _READSTREAMEVENTS DESCRIPTOR.message_types_by_name['ReadStreamEventsCompleted'] = _READSTREAMEVENTSCOMPLETED DESCRIPTOR.message_types_by_name['ReadAllEvents'] = _READALLEVENTS DESCRIPTOR.message_types_by_name['ReadAllEventsCompleted'] = _READALLEVENTSCOMPLETED DESCRIPTOR.message_types_by_name['Filter'] = _FILTER DESCRIPTOR.message_types_by_name['FilteredReadAllEvents'] = _FILTEREDREADALLEVENTS DESCRIPTOR.message_types_by_name['FilteredReadAllEventsCompleted'] = _FILTEREDREADALLEVENTSCOMPLETED DESCRIPTOR.message_types_by_name['CreatePersistentSubscription'] = _CREATEPERSISTENTSUBSCRIPTION DESCRIPTOR.message_types_by_name['DeletePersistentSubscription'] = _DELETEPERSISTENTSUBSCRIPTION DESCRIPTOR.message_types_by_name['UpdatePersistentSubscription'] = _UPDATEPERSISTENTSUBSCRIPTION DESCRIPTOR.message_types_by_name['UpdatePersistentSubscriptionCompleted'] = _UPDATEPERSISTENTSUBSCRIPTIONCOMPLETED DESCRIPTOR.message_types_by_name['CreatePersistentSubscriptionCompleted'] = _CREATEPERSISTENTSUBSCRIPTIONCOMPLETED DESCRIPTOR.message_types_by_name['DeletePersistentSubscriptionCompleted'] = _DELETEPERSISTENTSUBSCRIPTIONCOMPLETED DESCRIPTOR.message_types_by_name['ConnectToPersistentSubscription'] = _CONNECTTOPERSISTENTSUBSCRIPTION DESCRIPTOR.message_types_by_name['PersistentSubscriptionAckEvents'] = _PERSISTENTSUBSCRIPTIONACKEVENTS DESCRIPTOR.message_types_by_name['PersistentSubscriptionNakEvents'] = _PERSISTENTSUBSCRIPTIONNAKEVENTS DESCRIPTOR.message_types_by_name['PersistentSubscriptionConfirmation'] = _PERSISTENTSUBSCRIPTIONCONFIRMATION DESCRIPTOR.message_types_by_name['PersistentSubscriptionStreamEventAppeared'] = _PERSISTENTSUBSCRIPTIONSTREAMEVENTAPPEARED DESCRIPTOR.message_types_by_name['SubscribeToStream'] = _SUBSCRIBETOSTREAM DESCRIPTOR.message_types_by_name['FilteredSubscribeToStream'] = _FILTEREDSUBSCRIBETOSTREAM DESCRIPTOR.message_types_by_name['CheckpointReached'] = _CHECKPOINTREACHED DESCRIPTOR.message_types_by_name['SubscriptionConfirmation'] = _SUBSCRIPTIONCONFIRMATION DESCRIPTOR.message_types_by_name['StreamEventAppeared'] = _STREAMEVENTAPPEARED DESCRIPTOR.message_types_by_name['UnsubscribeFromStream'] = _UNSUBSCRIBEFROMSTREAM DESCRIPTOR.message_types_by_name['SubscriptionDropped'] = _SUBSCRIPTIONDROPPED DESCRIPTOR.message_types_by_name['NotHandled'] = _NOTHANDLED DESCRIPTOR.message_types_by_name['ScavengeDatabase'] = _SCAVENGEDATABASE DESCRIPTOR.message_types_by_name['ScavengeDatabaseResponse'] = _SCAVENGEDATABASERESPONSE DESCRIPTOR.message_types_by_name['IdentifyClient'] = _IDENTIFYCLIENT DESCRIPTOR.message_types_by_name['ClientIdentified'] = _CLIENTIDENTIFIED DESCRIPTOR.enum_types_by_name['OperationResult'] = _OPERATIONRESULT _sym_db.RegisterFileDescriptor(DESCRIPTOR) NewEvent = _reflection.GeneratedProtocolMessageType('NewEvent', (_message.Message,), { 'DESCRIPTOR' : _NEWEVENT, '__module__' : 'ClientMessageDtos_pb2' }) _sym_db.RegisterMessage(NewEvent) EventRecord = _reflection.GeneratedProtocolMessageType('EventRecord', (_message.Message,), { 'DESCRIPTOR' : _EVENTRECORD, '__module__' : 'ClientMessageDtos_pb2' }) _sym_db.RegisterMessage(EventRecord) ResolvedIndexedEvent = _reflection.GeneratedProtocolMessageType('ResolvedIndexedEvent', (_message.Message,), { 'DESCRIPTOR' : _RESOLVEDINDEXEDEVENT, '__module__' : 'ClientMessageDtos_pb2' }) _sym_db.RegisterMessage(ResolvedIndexedEvent) ResolvedEvent = _reflection.GeneratedProtocolMessageType('ResolvedEvent', (_message.Message,), { 'DESCRIPTOR' : _RESOLVEDEVENT, '__module__' : 'ClientMessageDtos_pb2' }) _sym_db.RegisterMessage(ResolvedEvent) WriteEvents = _reflection.GeneratedProtocolMessageType('WriteEvents', (_message.Message,), { 'DESCRIPTOR' : _WRITEEVENTS, '__module__' : 'ClientMessageDtos_pb2' }) _sym_db.RegisterMessage(WriteEvents) WriteEventsCompleted = _reflection.GeneratedProtocolMessageType('WriteEventsCompleted', (_message.Message,), { 'DESCRIPTOR' : _WRITEEVENTSCOMPLETED, '__module__' : 'ClientMessageDtos_pb2' }) _sym_db.RegisterMessage(WriteEventsCompleted) DeleteStream = _reflection.GeneratedProtocolMessageType('DeleteStream', (_message.Message,), { 'DESCRIPTOR' : _DELETESTREAM, '__module__' : 'ClientMessageDtos_pb2' }) _sym_db.RegisterMessage(DeleteStream) DeleteStreamCompleted = _reflection.GeneratedProtocolMessageType('DeleteStreamCompleted', (_message.Message,), { 'DESCRIPTOR' : _DELETESTREAMCOMPLETED, '__module__' : 'ClientMessageDtos_pb2' }) _sym_db.RegisterMessage(DeleteStreamCompleted) TransactionStart = _reflection.GeneratedProtocolMessageType('TransactionStart', (_message.Message,), { 'DESCRIPTOR' : _TRANSACTIONSTART, '__module__' : 'ClientMessageDtos_pb2' }) _sym_db.RegisterMessage(TransactionStart) TransactionStartCompleted = _reflection.GeneratedProtocolMessageType('TransactionStartCompleted', (_message.Message,), { 'DESCRIPTOR' : _TRANSACTIONSTARTCOMPLETED, '__module__' : 'ClientMessageDtos_pb2' }) _sym_db.RegisterMessage(TransactionStartCompleted) TransactionWrite = _reflection.GeneratedProtocolMessageType('TransactionWrite', (_message.Message,), { 'DESCRIPTOR' : _TRANSACTIONWRITE, '__module__' : 'ClientMessageDtos_pb2' }) _sym_db.RegisterMessage(TransactionWrite) TransactionWriteCompleted = _reflection.GeneratedProtocolMessageType('TransactionWriteCompleted', (_message.Message,), { 'DESCRIPTOR' : _TRANSACTIONWRITECOMPLETED, '__module__' : 'ClientMessageDtos_pb2' }) _sym_db.RegisterMessage(TransactionWriteCompleted) TransactionCommit = _reflection.GeneratedProtocolMessageType('TransactionCommit', (_message.Message,), { 'DESCRIPTOR' : _TRANSACTIONCOMMIT, '__module__' : 'ClientMessageDtos_pb2' }) _sym_db.RegisterMessage(TransactionCommit) TransactionCommitCompleted = _reflection.GeneratedProtocolMessageType('TransactionCommitCompleted', (_message.Message,), { 'DESCRIPTOR' : _TRANSACTIONCOMMITCOMPLETED, '__module__' : 'ClientMessageDtos_pb2' }) _sym_db.RegisterMessage(TransactionCommitCompleted) ReadEvent = _reflection.GeneratedProtocolMessageType('ReadEvent', (_message.Message,), { 'DESCRIPTOR' : _READEVENT, '__module__' : 'ClientMessageDtos_pb2' }) _sym_db.RegisterMessage(ReadEvent) ReadEventCompleted = _reflection.GeneratedProtocolMessageType('ReadEventCompleted', (_message.Message,), { 'DESCRIPTOR' : _READEVENTCOMPLETED, '__module__' : 'ClientMessageDtos_pb2' }) _sym_db.RegisterMessage(ReadEventCompleted) ReadStreamEvents = _reflection.GeneratedProtocolMessageType('ReadStreamEvents', (_message.Message,), { 'DESCRIPTOR' : _READSTREAMEVENTS, '__module__' : 'ClientMessageDtos_pb2' }) _sym_db.RegisterMessage(ReadStreamEvents) ReadStreamEventsCompleted = _reflection.GeneratedProtocolMessageType('ReadStreamEventsCompleted', (_message.Message,), { 'DESCRIPTOR' : _READSTREAMEVENTSCOMPLETED, '__module__' : 'ClientMessageDtos_pb2' }) _sym_db.RegisterMessage(ReadStreamEventsCompleted) ReadAllEvents = _reflection.GeneratedProtocolMessageType('ReadAllEvents', (_message.Message,), { 'DESCRIPTOR' : _READALLEVENTS, '__module__' : 'ClientMessageDtos_pb2' }) _sym_db.RegisterMessage(ReadAllEvents) ReadAllEventsCompleted = _reflection.GeneratedProtocolMessageType('ReadAllEventsCompleted', (_message.Message,), { 'DESCRIPTOR' : _READALLEVENTSCOMPLETED, '__module__' : 'ClientMessageDtos_pb2' }) _sym_db.RegisterMessage(ReadAllEventsCompleted) Filter = _reflection.GeneratedProtocolMessageType('Filter', (_message.Message,), { 'DESCRIPTOR' : _FILTER, '__module__' : 'ClientMessageDtos_pb2' }) _sym_db.RegisterMessage(Filter) FilteredReadAllEvents = _reflection.GeneratedProtocolMessageType('FilteredReadAllEvents', (_message.Message,), { 'DESCRIPTOR' : _FILTEREDREADALLEVENTS, '__module__' : 'ClientMessageDtos_pb2' }) _sym_db.RegisterMessage(FilteredReadAllEvents) FilteredReadAllEventsCompleted = _reflection.GeneratedProtocolMessageType('FilteredReadAllEventsCompleted', (_message.Message,), { 'DESCRIPTOR' : _FILTEREDREADALLEVENTSCOMPLETED, '__module__' : 'ClientMessageDtos_pb2' }) _sym_db.RegisterMessage(FilteredReadAllEventsCompleted) CreatePersistentSubscription = _reflection.GeneratedProtocolMessageType('CreatePersistentSubscription', (_message.Message,), { 'DESCRIPTOR' : _CREATEPERSISTENTSUBSCRIPTION, '__module__' : 'ClientMessageDtos_pb2' }) _sym_db.RegisterMessage(CreatePersistentSubscription) DeletePersistentSubscription = _reflection.GeneratedProtocolMessageType('DeletePersistentSubscription', (_message.Message,), { 'DESCRIPTOR' : _DELETEPERSISTENTSUBSCRIPTION, '__module__' : 'ClientMessageDtos_pb2' }) _sym_db.RegisterMessage(DeletePersistentSubscription) UpdatePersistentSubscription = _reflection.GeneratedProtocolMessageType('UpdatePersistentSubscription', (_message.Message,), { 'DESCRIPTOR' : _UPDATEPERSISTENTSUBSCRIPTION, '__module__' : 'ClientMessageDtos_pb2' }) _sym_db.RegisterMessage(UpdatePersistentSubscription) UpdatePersistentSubscriptionCompleted = _reflection.GeneratedProtocolMessageType('UpdatePersistentSubscriptionCompleted', (_message.Message,), { 'DESCRIPTOR' : _UPDATEPERSISTENTSUBSCRIPTIONCOMPLETED, '__module__' : 'ClientMessageDtos_pb2' }) _sym_db.RegisterMessage(UpdatePersistentSubscriptionCompleted) CreatePersistentSubscriptionCompleted = _reflection.GeneratedProtocolMessageType('CreatePersistentSubscriptionCompleted', (_message.Message,), { 'DESCRIPTOR' : _CREATEPERSISTENTSUBSCRIPTIONCOMPLETED, '__module__' : 'ClientMessageDtos_pb2' }) _sym_db.RegisterMessage(CreatePersistentSubscriptionCompleted) DeletePersistentSubscriptionCompleted = _reflection.GeneratedProtocolMessageType('DeletePersistentSubscriptionCompleted', (_message.Message,), { 'DESCRIPTOR' : _DELETEPERSISTENTSUBSCRIPTIONCOMPLETED, '__module__' : 'ClientMessageDtos_pb2' }) _sym_db.RegisterMessage(DeletePersistentSubscriptionCompleted) ConnectToPersistentSubscription = _reflection.GeneratedProtocolMessageType('ConnectToPersistentSubscription', (_message.Message,), { 'DESCRIPTOR' : _CONNECTTOPERSISTENTSUBSCRIPTION, '__module__' : 'ClientMessageDtos_pb2' }) _sym_db.RegisterMessage(ConnectToPersistentSubscription) PersistentSubscriptionAckEvents = _reflection.GeneratedProtocolMessageType('PersistentSubscriptionAckEvents', (_message.Message,), { 'DESCRIPTOR' : _PERSISTENTSUBSCRIPTIONACKEVENTS, '__module__' : 'ClientMessageDtos_pb2' }) _sym_db.RegisterMessage(PersistentSubscriptionAckEvents) PersistentSubscriptionNakEvents = _reflection.GeneratedProtocolMessageType('PersistentSubscriptionNakEvents', (_message.Message,), { 'DESCRIPTOR' : _PERSISTENTSUBSCRIPTIONNAKEVENTS, '__module__' : 'ClientMessageDtos_pb2' }) _sym_db.RegisterMessage(PersistentSubscriptionNakEvents) PersistentSubscriptionConfirmation = _reflection.GeneratedProtocolMessageType('PersistentSubscriptionConfirmation', (_message.Message,), { 'DESCRIPTOR' : _PERSISTENTSUBSCRIPTIONCONFIRMATION, '__module__' : 'ClientMessageDtos_pb2' }) _sym_db.RegisterMessage(PersistentSubscriptionConfirmation) PersistentSubscriptionStreamEventAppeared = _reflection.GeneratedProtocolMessageType('PersistentSubscriptionStreamEventAppeared', (_message.Message,), { 'DESCRIPTOR' : _PERSISTENTSUBSCRIPTIONSTREAMEVENTAPPEARED, '__module__' : 'ClientMessageDtos_pb2' }) _sym_db.RegisterMessage(PersistentSubscriptionStreamEventAppeared) SubscribeToStream = _reflection.GeneratedProtocolMessageType('SubscribeToStream', (_message.Message,), { 'DESCRIPTOR' : _SUBSCRIBETOSTREAM, '__module__' : 'ClientMessageDtos_pb2' }) _sym_db.RegisterMessage(SubscribeToStream) FilteredSubscribeToStream = _reflection.GeneratedProtocolMessageType('FilteredSubscribeToStream', (_message.Message,), { 'DESCRIPTOR' : _FILTEREDSUBSCRIBETOSTREAM, '__module__' : 'ClientMessageDtos_pb2' }) _sym_db.RegisterMessage(FilteredSubscribeToStream) CheckpointReached = _reflection.GeneratedProtocolMessageType('CheckpointReached', (_message.Message,), { 'DESCRIPTOR' : _CHECKPOINTREACHED, '__module__' : 'ClientMessageDtos_pb2' }) _sym_db.RegisterMessage(CheckpointReached) SubscriptionConfirmation = _reflection.GeneratedProtocolMessageType('SubscriptionConfirmation', (_message.Message,), { 'DESCRIPTOR' : _SUBSCRIPTIONCONFIRMATION, '__module__' : 'ClientMessageDtos_pb2' }) _sym_db.RegisterMessage(SubscriptionConfirmation) StreamEventAppeared = _reflection.GeneratedProtocolMessageType('StreamEventAppeared', (_message.Message,), { 'DESCRIPTOR' : _STREAMEVENTAPPEARED, '__module__' : 'ClientMessageDtos_pb2' }) _sym_db.RegisterMessage(StreamEventAppeared) UnsubscribeFromStream = _reflection.GeneratedProtocolMessageType('UnsubscribeFromStream', (_message.Message,), { 'DESCRIPTOR' : _UNSUBSCRIBEFROMSTREAM, '__module__' : 'ClientMessageDtos_pb2' }) _sym_db.RegisterMessage(UnsubscribeFromStream) SubscriptionDropped = _reflection.GeneratedProtocolMessageType('SubscriptionDropped', (_message.Message,), { 'DESCRIPTOR' : _SUBSCRIPTIONDROPPED, '__module__' : 'ClientMessageDtos_pb2' }) _sym_db.RegisterMessage(SubscriptionDropped) NotHandled = _reflection.GeneratedProtocolMessageType('NotHandled', (_message.Message,), { 'LeaderInfo' : _reflection.GeneratedProtocolMessageType('LeaderInfo', (_message.Message,), { 'DESCRIPTOR' : _NOTHANDLED_LEADERINFO, '__module__' : 'ClientMessageDtos_pb2' }) , 'DESCRIPTOR' : _NOTHANDLED, '__module__' : 'ClientMessageDtos_pb2' }) _sym_db.RegisterMessage(NotHandled) _sym_db.RegisterMessage(NotHandled.LeaderInfo) ScavengeDatabase = _reflection.GeneratedProtocolMessageType('ScavengeDatabase', (_message.Message,), { 'DESCRIPTOR' : _SCAVENGEDATABASE, '__module__' : 'ClientMessageDtos_pb2' }) _sym_db.RegisterMessage(ScavengeDatabase) ScavengeDatabaseResponse = _reflection.GeneratedProtocolMessageType('ScavengeDatabaseResponse', (_message.Message,), { 'DESCRIPTOR' : _SCAVENGEDATABASERESPONSE, '__module__' : 'ClientMessageDtos_pb2' }) _sym_db.RegisterMessage(ScavengeDatabaseResponse) IdentifyClient = _reflection.GeneratedProtocolMessageType('IdentifyClient', (_message.Message,), { 'DESCRIPTOR' : _IDENTIFYCLIENT, '__module__' : 'ClientMessageDtos_pb2' }) _sym_db.RegisterMessage(IdentifyClient) ClientIdentified = _reflection.GeneratedProtocolMessageType('ClientIdentified', (_message.Message,), { 'DESCRIPTOR' : _CLIENTIDENTIFIED, '__module__' : 'ClientMessageDtos_pb2' }) _sym_db.RegisterMessage(ClientIdentified)
true
true
f70114a063899ff6cb8eac32ecc128bfe7a8b6c9
1,626
py
Python
mmdet3d/datasets/pipelines/__init__.py
Bachmvp/mmdetection3d
b5b1a15a885eee92749e60a5837e2ce4918119f8
[ "Apache-2.0" ]
10
2021-09-13T13:07:37.000Z
2022-03-15T06:46:30.000Z
mmdet3d/datasets/pipelines/__init__.py
Bachmvp/mmdetection3d
b5b1a15a885eee92749e60a5837e2ce4918119f8
[ "Apache-2.0" ]
1
2021-11-10T07:14:32.000Z
2021-11-10T07:14:32.000Z
mmdet3d/datasets/pipelines/__init__.py
Bachmvp/mmdetection3d
b5b1a15a885eee92749e60a5837e2ce4918119f8
[ "Apache-2.0" ]
1
2021-09-01T08:27:24.000Z
2021-09-01T08:27:24.000Z
from mmdet.datasets.pipelines import Compose from .dbsampler import DataBaseSampler from .formating import Collect3D, DefaultFormatBundle, DefaultFormatBundle3D from .loading import (LoadAnnotations3D, LoadImageFromFileMono3D, LoadMultiViewImageFromFiles, LoadPointsFromFile, LoadPointsFromMultiSweeps, NormalizePointsColor, PointSegClassMapping) from .test_time_aug import MultiScaleFlipAug3D from .transforms_3d import (BackgroundPointsFilter, GlobalAlignment, GlobalRotScaleTrans, IndoorPatchPointSample, IndoorPointSample, ObjectNameFilter, ObjectNoise, ObjectRangeFilter, ObjectSample, PointSample, PointShuffle, PointsRangeFilter, RandomDropPointsColor, RandomFlip3D, RandomJitterPoints, VoxelBasedPointSampler) __all__ = [ 'ObjectSample', 'RandomFlip3D', 'ObjectNoise', 'GlobalRotScaleTrans', 'PointShuffle', 'ObjectRangeFilter', 'PointsRangeFilter', 'Collect3D', 'Compose', 'LoadMultiViewImageFromFiles', 'LoadPointsFromFile', 'DefaultFormatBundle', 'DefaultFormatBundle3D', 'DataBaseSampler', 'NormalizePointsColor', 'LoadAnnotations3D', 'IndoorPointSample', 'PointSample', 'PointSegClassMapping', 'MultiScaleFlipAug3D', 'LoadPointsFromMultiSweeps', 'BackgroundPointsFilter', 'VoxelBasedPointSampler', 'GlobalAlignment', 'IndoorPatchPointSample', 'LoadImageFromFileMono3D', 'ObjectNameFilter', 'RandomDropPointsColor', 'RandomJitterPoints' ]
56.068966
77
0.706027
from mmdet.datasets.pipelines import Compose from .dbsampler import DataBaseSampler from .formating import Collect3D, DefaultFormatBundle, DefaultFormatBundle3D from .loading import (LoadAnnotations3D, LoadImageFromFileMono3D, LoadMultiViewImageFromFiles, LoadPointsFromFile, LoadPointsFromMultiSweeps, NormalizePointsColor, PointSegClassMapping) from .test_time_aug import MultiScaleFlipAug3D from .transforms_3d import (BackgroundPointsFilter, GlobalAlignment, GlobalRotScaleTrans, IndoorPatchPointSample, IndoorPointSample, ObjectNameFilter, ObjectNoise, ObjectRangeFilter, ObjectSample, PointSample, PointShuffle, PointsRangeFilter, RandomDropPointsColor, RandomFlip3D, RandomJitterPoints, VoxelBasedPointSampler) __all__ = [ 'ObjectSample', 'RandomFlip3D', 'ObjectNoise', 'GlobalRotScaleTrans', 'PointShuffle', 'ObjectRangeFilter', 'PointsRangeFilter', 'Collect3D', 'Compose', 'LoadMultiViewImageFromFiles', 'LoadPointsFromFile', 'DefaultFormatBundle', 'DefaultFormatBundle3D', 'DataBaseSampler', 'NormalizePointsColor', 'LoadAnnotations3D', 'IndoorPointSample', 'PointSample', 'PointSegClassMapping', 'MultiScaleFlipAug3D', 'LoadPointsFromMultiSweeps', 'BackgroundPointsFilter', 'VoxelBasedPointSampler', 'GlobalAlignment', 'IndoorPatchPointSample', 'LoadImageFromFileMono3D', 'ObjectNameFilter', 'RandomDropPointsColor', 'RandomJitterPoints' ]
true
true
f70116b8f8fe8a44b68b426b5ed9ac2fcd18afd4
3,784
py
Python
tests/unit/test_views.py
awhileback/wagtail-personalisation
b84b5f9f9e29b8c6efa60b4002af8d8529565a5e
[ "MIT" ]
68
2018-01-26T22:02:09.000Z
2022-03-23T08:08:54.000Z
tests/unit/test_views.py
awhileback/wagtail-personalisation
b84b5f9f9e29b8c6efa60b4002af8d8529565a5e
[ "MIT" ]
46
2018-05-26T09:26:30.000Z
2022-02-04T15:17:45.000Z
tests/unit/test_views.py
awhileback/wagtail-personalisation
b84b5f9f9e29b8c6efa60b4002af8d8529565a5e
[ "MIT" ]
27
2018-03-28T10:14:26.000Z
2022-02-08T20:54:00.000Z
import pytest from django.core.exceptions import PermissionDenied from django.urls import reverse from wagtail.core.models import Page from wagtail_personalisation.models import Segment from wagtail_personalisation.rules import VisitCountRule from wagtail_personalisation.views import ( SegmentModelAdmin, SegmentModelDeleteView) @pytest.mark.django_db def test_segment_user_data_view_requires_admin_access(site, client, django_user_model): user = django_user_model.objects.create(username='first') segment = Segment(type=Segment.TYPE_STATIC, count=1) segment.save() client.force_login(user) url = reverse('segment:segment_user_data', args=(segment.id,)) response = client.get(url) assert response.status_code == 302 assert response.url == '/admin/login/?next=%s' % url @pytest.mark.django_db def test_segment_user_data_view(site, client, mocker, django_user_model): user1 = django_user_model.objects.create(username='first') user2 = django_user_model.objects.create(username='second') admin_user = django_user_model.objects.create( username='admin', is_superuser=True) segment = Segment(type=Segment.TYPE_STATIC, count=1) segment.save() segment.static_users.add(user1) segment.static_users.add(user2) rule1 = VisitCountRule(counted_page=site.root_page, segment=segment) rule2 = VisitCountRule(counted_page=site.root_page.get_last_child(), segment=segment) rule1.save() rule2.save() mocker.patch('wagtail_personalisation.rules.VisitCountRule.get_user_info_string', side_effect=[3, 9, 0, 1]) client.force_login(admin_user) response = client.get( reverse('segment:segment_user_data', args=(segment.id,))) assert response.status_code == 200 data_lines = response.content.decode().split("\n") assert data_lines[0] == 'Username,Visit count - Test page,Visit count - Regular page\r' assert data_lines[1] == 'first,3,9\r' assert data_lines[2] == 'second,0,1\r' @pytest.mark.django_db def test_segment_delete_view_delete_instance(rf, segmented_page, user): user.is_superuser = True user.save() segment = segmented_page.personalisation_metadata.segment canonical_page = segmented_page.personalisation_metadata.canonical_page variants_metadata = segment.get_used_pages() page_variants = Page.objects.filter(pk__in=( variants_metadata.values_list('variant_id', flat=True) )) # Make sure all canonical page, variants and variants metadata exist assert canonical_page assert page_variants assert variants_metadata # Delete the segment via the method on the view. request = rf.get('/'.format(segment.pk)) request.user = user view = SegmentModelDeleteView( instance_pk=str(segment.pk), model_admin=SegmentModelAdmin() ) view.request = request view.delete_instance() # Segment has been deleted. with pytest.raises(segment.DoesNotExist): segment.refresh_from_db() # Canonical page stayed intact. canonical_page.refresh_from_db() # Variant pages and their metadata have been deleted. assert not page_variants.all() assert not variants_metadata.all() @pytest.mark.django_db def test_segment_delete_view_raises_permission_denied(rf, segmented_page, user): segment = segmented_page.personalisation_metadata.segment request = rf.get('/'.format(segment.pk)) request.user = user view = SegmentModelDeleteView( instance_pk=str(segment.pk), model_admin=SegmentModelAdmin() ) view.request = request message = 'User have no permission to delete variant page objects.' with pytest.raises(PermissionDenied): view.delete_instance()
34.09009
91
0.732558
import pytest from django.core.exceptions import PermissionDenied from django.urls import reverse from wagtail.core.models import Page from wagtail_personalisation.models import Segment from wagtail_personalisation.rules import VisitCountRule from wagtail_personalisation.views import ( SegmentModelAdmin, SegmentModelDeleteView) @pytest.mark.django_db def test_segment_user_data_view_requires_admin_access(site, client, django_user_model): user = django_user_model.objects.create(username='first') segment = Segment(type=Segment.TYPE_STATIC, count=1) segment.save() client.force_login(user) url = reverse('segment:segment_user_data', args=(segment.id,)) response = client.get(url) assert response.status_code == 302 assert response.url == '/admin/login/?next=%s' % url @pytest.mark.django_db def test_segment_user_data_view(site, client, mocker, django_user_model): user1 = django_user_model.objects.create(username='first') user2 = django_user_model.objects.create(username='second') admin_user = django_user_model.objects.create( username='admin', is_superuser=True) segment = Segment(type=Segment.TYPE_STATIC, count=1) segment.save() segment.static_users.add(user1) segment.static_users.add(user2) rule1 = VisitCountRule(counted_page=site.root_page, segment=segment) rule2 = VisitCountRule(counted_page=site.root_page.get_last_child(), segment=segment) rule1.save() rule2.save() mocker.patch('wagtail_personalisation.rules.VisitCountRule.get_user_info_string', side_effect=[3, 9, 0, 1]) client.force_login(admin_user) response = client.get( reverse('segment:segment_user_data', args=(segment.id,))) assert response.status_code == 200 data_lines = response.content.decode().split("\n") assert data_lines[0] == 'Username,Visit count - Test page,Visit count - Regular page\r' assert data_lines[1] == 'first,3,9\r' assert data_lines[2] == 'second,0,1\r' @pytest.mark.django_db def test_segment_delete_view_delete_instance(rf, segmented_page, user): user.is_superuser = True user.save() segment = segmented_page.personalisation_metadata.segment canonical_page = segmented_page.personalisation_metadata.canonical_page variants_metadata = segment.get_used_pages() page_variants = Page.objects.filter(pk__in=( variants_metadata.values_list('variant_id', flat=True) )) assert canonical_page assert page_variants assert variants_metadata request = rf.get('/'.format(segment.pk)) request.user = user view = SegmentModelDeleteView( instance_pk=str(segment.pk), model_admin=SegmentModelAdmin() ) view.request = request view.delete_instance() with pytest.raises(segment.DoesNotExist): segment.refresh_from_db() canonical_page.refresh_from_db() assert not page_variants.all() assert not variants_metadata.all() @pytest.mark.django_db def test_segment_delete_view_raises_permission_denied(rf, segmented_page, user): segment = segmented_page.personalisation_metadata.segment request = rf.get('/'.format(segment.pk)) request.user = user view = SegmentModelDeleteView( instance_pk=str(segment.pk), model_admin=SegmentModelAdmin() ) view.request = request message = 'User have no permission to delete variant page objects.' with pytest.raises(PermissionDenied): view.delete_instance()
true
true
f701181c6eef70d93952ea9363a6ca7cc7ec5a6b
892
py
Python
sa/profiles/HP/1910/get_chassis_id.py
xUndero/noc
9fb34627721149fcf7064860bd63887e38849131
[ "BSD-3-Clause" ]
1
2019-09-20T09:36:48.000Z
2019-09-20T09:36:48.000Z
sa/profiles/HP/1910/get_chassis_id.py
ewwwcha/noc
aba08dc328296bb0e8e181c2ac9a766e1ec2a0bb
[ "BSD-3-Clause" ]
null
null
null
sa/profiles/HP/1910/get_chassis_id.py
ewwwcha/noc
aba08dc328296bb0e8e181c2ac9a766e1ec2a0bb
[ "BSD-3-Clause" ]
null
null
null
# -*- coding: utf-8 -*- # --------------------------------------------------------------------- # HP.1910.get_chassis_id # --------------------------------------------------------------------- # Copyright (C) 2007-2018 The NOC Project # See LICENSE for details # --------------------------------------------------------------------- # Python modules import re # NOC modules from noc.core.script.base import BaseScript from noc.sa.interfaces.igetchassisid import IGetChassisID class Script(BaseScript): name = "HP.1910.get_chassis_id" interface = IGetChassisID cache = True rx_mac = re.compile(r"^MAC_ADDRESS\s+:\s+(?P<mac>\S+)$", re.MULTILINE) def execute_cli(self): v = self.cli("display device manuinfo", cached=True) match = self.rx_mac.search(v) mac = match.group("mac") return {"first_chassis_mac": mac, "last_chassis_mac": mac}
30.758621
74
0.516816
import re from noc.core.script.base import BaseScript from noc.sa.interfaces.igetchassisid import IGetChassisID class Script(BaseScript): name = "HP.1910.get_chassis_id" interface = IGetChassisID cache = True rx_mac = re.compile(r"^MAC_ADDRESS\s+:\s+(?P<mac>\S+)$", re.MULTILINE) def execute_cli(self): v = self.cli("display device manuinfo", cached=True) match = self.rx_mac.search(v) mac = match.group("mac") return {"first_chassis_mac": mac, "last_chassis_mac": mac}
true
true
f701185265bce3cd160efe75f4ec708cd0600c87
1,661
py
Python
day-14/part-2/skasch.py
lypnol/adventofcode-2021
8ba277d698e8c59ca9cd554acc135473f5964b87
[ "MIT" ]
6
2021-11-29T15:32:27.000Z
2021-12-10T12:24:26.000Z
day-14/part-2/skasch.py
lypnol/adventofcode-2021
8ba277d698e8c59ca9cd554acc135473f5964b87
[ "MIT" ]
9
2021-11-29T15:38:04.000Z
2021-12-13T14:54:16.000Z
day-14/part-2/skasch.py
lypnol/adventofcode-2021
8ba277d698e8c59ca9cd554acc135473f5964b87
[ "MIT" ]
3
2021-12-02T19:11:44.000Z
2021-12-22T20:52:47.000Z
import collections import functools from typing import Dict, List, Tuple, Counter from tool.runners.python import SubmissionPy def parse(s: str) -> Tuple[List[str], Dict[Tuple[str, str], str]]: lines = s.splitlines() initial = list(lines[0].strip()) mapping = {} for line in lines[2:]: if stripped_line := line.strip(): left, right = stripped_line.split(" -> ", 1) mapping[left[0], left[1]] = right return initial, mapping DEPTH = 40 class SkaschSubmission(SubmissionPy): @functools.lru_cache(None) def dfs(self, left: str, right: str, depth: int) -> Counter[str]: if depth == DEPTH: return collections.Counter() mid = self.mapping[left, right] cnt = collections.Counter(mid) return cnt + self.dfs(left, mid, depth + 1) + self.dfs(mid, right, depth + 1) def run(self, s: str) -> int: """ :param s: input in string format :return: solution flag """ # Your code goes here self.dfs.cache_clear() initial, self.mapping = parse(s) cnt = collections.Counter(initial) for left, right in zip(initial, initial[1:]): cnt += self.dfs(left, right, 0) return max(cnt.values()) - min(cnt.values()) def test_skasch() -> None: """ Run `python -m pytest ./day-14/part-2/skasch.py` to test the submission. """ assert ( SkaschSubmission().run( """ NNCB CH -> B HH -> N CB -> H NH -> C HB -> C HC -> B HN -> C NN -> C BH -> H NC -> B NB -> B BN -> B BB -> N BC -> B CC -> N CN -> C """.strip() ) == 2188189693529 )
22.753425
85
0.562914
import collections import functools from typing import Dict, List, Tuple, Counter from tool.runners.python import SubmissionPy def parse(s: str) -> Tuple[List[str], Dict[Tuple[str, str], str]]: lines = s.splitlines() initial = list(lines[0].strip()) mapping = {} for line in lines[2:]: if stripped_line := line.strip(): left, right = stripped_line.split(" -> ", 1) mapping[left[0], left[1]] = right return initial, mapping DEPTH = 40 class SkaschSubmission(SubmissionPy): @functools.lru_cache(None) def dfs(self, left: str, right: str, depth: int) -> Counter[str]: if depth == DEPTH: return collections.Counter() mid = self.mapping[left, right] cnt = collections.Counter(mid) return cnt + self.dfs(left, mid, depth + 1) + self.dfs(mid, right, depth + 1) def run(self, s: str) -> int: self.dfs.cache_clear() initial, self.mapping = parse(s) cnt = collections.Counter(initial) for left, right in zip(initial, initial[1:]): cnt += self.dfs(left, right, 0) return max(cnt.values()) - min(cnt.values()) def test_skasch() -> None: assert ( SkaschSubmission().run( """ NNCB CH -> B HH -> N CB -> H NH -> C HB -> C HC -> B HN -> C NN -> C BH -> H NC -> B NB -> B BN -> B BB -> N BC -> B CC -> N CN -> C """.strip() ) == 2188189693529 )
true
true
f70118caf8da746f26ed1648131b1be816543fa5
575
py
Python
alembic_dvc/versions/7fb7364b821a_add_username_to_loadtbl.py
ASUPychron/pychron
dfe551bdeb4ff8b8ba5cdea0edab336025e8cc76
[ "Apache-2.0" ]
31
2016-03-07T02:38:17.000Z
2022-02-14T18:23:43.000Z
alembic_dvc/versions/7fb7364b821a_add_username_to_loadtbl.py
ASUPychron/pychron
dfe551bdeb4ff8b8ba5cdea0edab336025e8cc76
[ "Apache-2.0" ]
1,626
2015-01-07T04:52:35.000Z
2022-03-25T19:15:59.000Z
alembic_dvc/versions/7fb7364b821a_add_username_to_loadtbl.py
UIllinoisHALPychron/pychron
f21b79f4592a9fb9dc9a4cb2e4e943a3885ededc
[ "Apache-2.0" ]
26
2015-05-23T00:10:06.000Z
2022-03-07T16:51:57.000Z
"""add username to LoadTbl Revision ID: 7fb7364b821a Revises: 090128c02529 Create Date: 2018-10-24 17:10:03.781293 """ # revision identifiers, used by Alembic. revision = '7fb7364b821a' down_revision = '090128c02529' import sqlalchemy as sa from alembic import op def upgrade(): op.add_column('LoadTbl', sa.Column('username', sa.String(45))) op.create_foreign_key('LoadTbl_ibfk_2', 'LoadTbl', 'UserTbl', ['username'], ['name']) def downgrade(): op.drop_constraint('LoadTbl_ibfk_2', 'LoadTbl', type_='foreignkey') op.drop_column('LoadTbl', 'username')
23
89
0.721739
revision = '7fb7364b821a' down_revision = '090128c02529' import sqlalchemy as sa from alembic import op def upgrade(): op.add_column('LoadTbl', sa.Column('username', sa.String(45))) op.create_foreign_key('LoadTbl_ibfk_2', 'LoadTbl', 'UserTbl', ['username'], ['name']) def downgrade(): op.drop_constraint('LoadTbl_ibfk_2', 'LoadTbl', type_='foreignkey') op.drop_column('LoadTbl', 'username')
true
true
f701193abbbd8212633c24070a0c820f58c3a73c
877
py
Python
test/run/t519.py
timmartin/skulpt
2e3a3fbbaccc12baa29094a717ceec491a8a6750
[ "MIT" ]
2,671
2015-01-03T08:23:25.000Z
2022-03-31T06:15:48.000Z
test/run/t519.py
timmartin/skulpt
2e3a3fbbaccc12baa29094a717ceec491a8a6750
[ "MIT" ]
972
2015-01-05T08:11:00.000Z
2022-03-29T13:47:15.000Z
test/run/t519.py
timmartin/skulpt
2e3a3fbbaccc12baa29094a717ceec491a8a6750
[ "MIT" ]
845
2015-01-03T19:53:36.000Z
2022-03-29T18:34:22.000Z
l = ['h','e','l','l','o'] print (l.index('l')) print (l.index('l', 2)) print (l.index('l', 3)) print (l.index('l', 2, 3)) print (l.index('l', 3, 4)) print (l.index('l', 2, -1)) print (l.index('l', 2, -2)) print (l.index('l', 3, -1)) try: print (l.index('l', 4)) except ValueError as e: print (repr(e)) try: print (l.index('l', -1)) except ValueError as e: print (repr(e)) try: print (l.index('l', 2, 2)) except ValueError as e: print (repr(e)) try: print (l.index('l', 3, 2)) except ValueError as e: print (repr(e)) try: print (l.index('l', 3, -2)) except ValueError as e: print (repr(e)) try: print (l.index('l', 3, 0)) except ValueError as e: print (repr(e)) try: print (l.index('l', 4.3)) except TypeError as e: print (repr(e)) try: print (l.index('l', 3, 0.6)) except TypeError as e: print (repr(e))
17.196078
32
0.54618
l = ['h','e','l','l','o'] print (l.index('l')) print (l.index('l', 2)) print (l.index('l', 3)) print (l.index('l', 2, 3)) print (l.index('l', 3, 4)) print (l.index('l', 2, -1)) print (l.index('l', 2, -2)) print (l.index('l', 3, -1)) try: print (l.index('l', 4)) except ValueError as e: print (repr(e)) try: print (l.index('l', -1)) except ValueError as e: print (repr(e)) try: print (l.index('l', 2, 2)) except ValueError as e: print (repr(e)) try: print (l.index('l', 3, 2)) except ValueError as e: print (repr(e)) try: print (l.index('l', 3, -2)) except ValueError as e: print (repr(e)) try: print (l.index('l', 3, 0)) except ValueError as e: print (repr(e)) try: print (l.index('l', 4.3)) except TypeError as e: print (repr(e)) try: print (l.index('l', 3, 0.6)) except TypeError as e: print (repr(e))
true
true
f7011a28257c086b1275554dce2bc8bf4e03d72f
3,972
py
Python
src/genie/libs/parser/ios/c7600/tests/ShowModule/cli/equal/golden_output_expected.py
danielgraziano/genieparser
74d5e1ded9794561af1ac3284307c58365617673
[ "Apache-2.0" ]
4
2020-08-20T12:23:12.000Z
2021-06-15T14:10:02.000Z
src/genie/libs/parser/ios/c7600/tests/ShowModule/cli/equal/golden_output_expected.py
dalwar23/genieparser
a9df45d3ee23f107bfb55915068e90782f92fc99
[ "Apache-2.0" ]
119
2020-07-10T22:37:51.000Z
2021-03-18T02:40:05.000Z
src/genie/libs/parser/ios/c7600/tests/ShowModule/cli/equal/golden_output_expected.py
dalwar23/genieparser
a9df45d3ee23f107bfb55915068e90782f92fc99
[ "Apache-2.0" ]
2
2021-10-18T20:13:11.000Z
2022-02-07T06:05:28.000Z
expected_output = { 'slot': { '1': { 'lc': { 'card_type': 'CEF720 48 port 10/100/1000mb Ethernet', 'fw_ver': '12.2(14r)S', 'hw_ver': '2.7', 'mac_address_from': '001e.4aff.ee89', 'mac_address_to': '001e.4aff.eeb8', 'model': 'WS-X6748-GE-TX', 'online_diag_status': 'Pass', 'ports': 48, 'serial_number': 'SAL1209HMW3', 'status': 'Ok', 'subslot': { 'WS-F6700-CFC': { 'hw_ver': '4.0', 'model': 'WS-F6700-CFC', 'serial_number': 'SAL1207G5V1', 'status': 'Ok', }, }, 'sw_ver': '15.4(0.10)', }, }, '2': { 'lc': { 'card_type': '2 port adapter Enhanced FlexWAN', 'fw_ver': '15.4(0.10)S', 'hw_ver': '2.1', 'mac_address_from': '0015.2bff.e884', 'mac_address_to': '0015.2bff.e8c3', 'model': 'WS-X6582-2PA', 'online_diag_status': 'Pass', 'ports': 0, 'serial_number': 'JAE0939LYNQ', 'status': 'Ok', 'sw_ver': '15.4(0.10)S', }, }, '5': { 'rp': { 'card_type': 'Supervisor Engine 720 (Hot)', 'fw_ver': '8.1(3', 'hw_ver': '4.1', 'mac_address_from': '0011.21ff.441a', 'mac_address_to': '0011.21ff.441d', 'model': 'WS-SUP720-3BXL', 'online_diag_status': 'Pass', 'ports': 2, 'serial_number': 'SAD09020BF8', 'status': 'Ok', 'subslot': { 'WS-F6K-PFC3BXL': { 'hw_ver': '1.4', 'model': 'WS-F6K-PFC3BXL', 'serial_number': 'SAD090301K6', 'status': 'Ok', }, 'WS-SUP720': { 'hw_ver': '2.2', 'model': 'WS-SUP720', 'serial_number': 'SAD090105M6', 'status': 'Ok', }, }, 'sw_ver': '15.4(0.10)', }, }, '6': { 'rp': { 'card_type': 'Supervisor Engine 720 (Active)', 'fw_ver': '8.5(4', 'hw_ver': '5.12', 'mac_address_from': '0022.55ff.039b', 'mac_address_to': '0022.55ff.039e', 'model': 'WS-SUP720-3BXL', 'online_diag_status': 'Pass', 'ports': 2, 'serial_number': 'SAL15129MRC', 'status': 'Ok', 'subslot': { 'WS-F6K-PFC3BXL': { 'hw_ver': '1.11', 'model': 'WS-F6K-PFC3BXL', 'serial_number': 'SAL15129KW4', 'status': 'Ok', }, 'WS-SUP720': { 'hw_ver': '5.1', 'model': 'WS-SUP720', 'serial_number': 'SAL15045PYS', 'status': 'Ok', }, }, 'sw_ver': '15.4(0.10)', }, }, }, }
39.326733
73
0.293807
expected_output = { 'slot': { '1': { 'lc': { 'card_type': 'CEF720 48 port 10/100/1000mb Ethernet', 'fw_ver': '12.2(14r)S', 'hw_ver': '2.7', 'mac_address_from': '001e.4aff.ee89', 'mac_address_to': '001e.4aff.eeb8', 'model': 'WS-X6748-GE-TX', 'online_diag_status': 'Pass', 'ports': 48, 'serial_number': 'SAL1209HMW3', 'status': 'Ok', 'subslot': { 'WS-F6700-CFC': { 'hw_ver': '4.0', 'model': 'WS-F6700-CFC', 'serial_number': 'SAL1207G5V1', 'status': 'Ok', }, }, 'sw_ver': '15.4(0.10)', }, }, '2': { 'lc': { 'card_type': '2 port adapter Enhanced FlexWAN', 'fw_ver': '15.4(0.10)S', 'hw_ver': '2.1', 'mac_address_from': '0015.2bff.e884', 'mac_address_to': '0015.2bff.e8c3', 'model': 'WS-X6582-2PA', 'online_diag_status': 'Pass', 'ports': 0, 'serial_number': 'JAE0939LYNQ', 'status': 'Ok', 'sw_ver': '15.4(0.10)S', }, }, '5': { 'rp': { 'card_type': 'Supervisor Engine 720 (Hot)', 'fw_ver': '8.1(3', 'hw_ver': '4.1', 'mac_address_from': '0011.21ff.441a', 'mac_address_to': '0011.21ff.441d', 'model': 'WS-SUP720-3BXL', 'online_diag_status': 'Pass', 'ports': 2, 'serial_number': 'SAD09020BF8', 'status': 'Ok', 'subslot': { 'WS-F6K-PFC3BXL': { 'hw_ver': '1.4', 'model': 'WS-F6K-PFC3BXL', 'serial_number': 'SAD090301K6', 'status': 'Ok', }, 'WS-SUP720': { 'hw_ver': '2.2', 'model': 'WS-SUP720', 'serial_number': 'SAD090105M6', 'status': 'Ok', }, }, 'sw_ver': '15.4(0.10)', }, }, '6': { 'rp': { 'card_type': 'Supervisor Engine 720 (Active)', 'fw_ver': '8.5(4', 'hw_ver': '5.12', 'mac_address_from': '0022.55ff.039b', 'mac_address_to': '0022.55ff.039e', 'model': 'WS-SUP720-3BXL', 'online_diag_status': 'Pass', 'ports': 2, 'serial_number': 'SAL15129MRC', 'status': 'Ok', 'subslot': { 'WS-F6K-PFC3BXL': { 'hw_ver': '1.11', 'model': 'WS-F6K-PFC3BXL', 'serial_number': 'SAL15129KW4', 'status': 'Ok', }, 'WS-SUP720': { 'hw_ver': '5.1', 'model': 'WS-SUP720', 'serial_number': 'SAL15045PYS', 'status': 'Ok', }, }, 'sw_ver': '15.4(0.10)', }, }, }, }
true
true
f7011ccadfb0c0ea55e55f9439fd1c6e5b3146e0
2,473
py
Python
pbutils/test_helpers.py
phonybone/phonybone_utils
d95f226ddfc62a1d69b5ff6f53de86188fe0c8f9
[ "MIT" ]
null
null
null
pbutils/test_helpers.py
phonybone/phonybone_utils
d95f226ddfc62a1d69b5ff6f53de86188fe0c8f9
[ "MIT" ]
null
null
null
pbutils/test_helpers.py
phonybone/phonybone_utils
d95f226ddfc62a1d69b5ff6f53de86188fe0c8f9
[ "MIT" ]
null
null
null
import unittest import importlib from pbutils.streams import warn # Try to import flask settings module: settings = None try: pkg_root = __name__.split('.')[0] settings_modname = '{}.settings'.format(pkg_root) settings = importlib.import_module(settings_modname) except ImportError as e: warn('Unable to import {}: {}'.format(settings_modname, str(e))) class BaseTest(unittest.TestCase): if settings is not None: base_url = 'http://{}'.format(settings.FLASK_SERVER_NAME) else: base_url = 'http://localhost:5000' def setUp(self): self.client = app.test_client() self.client.testing = True try: self.reset_fixture() except AttributeError as e: if str(e) == 'reset_fixture': print('{} has no method "reset_fixture()", skipping'.format(self.__class__)) else: raise def make_url(cls, url): return cls.base_url + url def _test_status(self, url, method, data, status_code, content_type): ''' issue a <method> request on url and verify the expected status_code was found. return resp.json() ''' real_url = self.make_url(url) req = getattr(self.client, method.lower()) args = {'follow_redirects': True} # not needed for this site, but... if data: if content_type == 'application/json': args['data'] = json.dumps(data) elif content_type == 'application/x-www-form-urlencoded': args['data'] = data args['content_type'] = content_type resp = req(real_url, **args) self.assertEqual(resp.status_code, status_code) try: return json.loads(str(resp.data.decode())) except (TypeError, ValueError): return resp.data.decode() def _test_get_status(self, url, status_code=200): return self._test_status(url, 'GET', None, status_code, None) def _test_post_status(self, url, data, status_code=201, content_type='application/json'): return self._test_status(url, 'POST', data, status_code, content_type) def _test_put_status(self, url, data, status_code=204, content_type='application/json'): return self._test_status(url, 'PUT', data, status_code, content_type) def _test_delete_status(self, url, status_code=204): return self._test_status(url, 'DELETE', None, status_code, None)
37.469697
113
0.631217
import unittest import importlib from pbutils.streams import warn settings = None try: pkg_root = __name__.split('.')[0] settings_modname = '{}.settings'.format(pkg_root) settings = importlib.import_module(settings_modname) except ImportError as e: warn('Unable to import {}: {}'.format(settings_modname, str(e))) class BaseTest(unittest.TestCase): if settings is not None: base_url = 'http://{}'.format(settings.FLASK_SERVER_NAME) else: base_url = 'http://localhost:5000' def setUp(self): self.client = app.test_client() self.client.testing = True try: self.reset_fixture() except AttributeError as e: if str(e) == 'reset_fixture': print('{} has no method "reset_fixture()", skipping'.format(self.__class__)) else: raise def make_url(cls, url): return cls.base_url + url def _test_status(self, url, method, data, status_code, content_type): real_url = self.make_url(url) req = getattr(self.client, method.lower()) args = {'follow_redirects': True} if data: if content_type == 'application/json': args['data'] = json.dumps(data) elif content_type == 'application/x-www-form-urlencoded': args['data'] = data args['content_type'] = content_type resp = req(real_url, **args) self.assertEqual(resp.status_code, status_code) try: return json.loads(str(resp.data.decode())) except (TypeError, ValueError): return resp.data.decode() def _test_get_status(self, url, status_code=200): return self._test_status(url, 'GET', None, status_code, None) def _test_post_status(self, url, data, status_code=201, content_type='application/json'): return self._test_status(url, 'POST', data, status_code, content_type) def _test_put_status(self, url, data, status_code=204, content_type='application/json'): return self._test_status(url, 'PUT', data, status_code, content_type) def _test_delete_status(self, url, status_code=204): return self._test_status(url, 'DELETE', None, status_code, None)
true
true
f7011cec1969964cb938511820c7656d516bea92
22,444
py
Python
azure-mgmt-batch/tests/test_mgmt_batch.py
JonathanGailliez/azure-sdk-for-python
f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b
[ "MIT" ]
1
2021-09-07T18:36:04.000Z
2021-09-07T18:36:04.000Z
azure-mgmt-batch/tests/test_mgmt_batch.py
JonathanGailliez/azure-sdk-for-python
f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b
[ "MIT" ]
2
2019-10-02T23:37:38.000Z
2020-10-02T01:17:31.000Z
azure-mgmt-batch/tests/test_mgmt_batch.py
JonathanGailliez/azure-sdk-for-python
f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b
[ "MIT" ]
1
2019-06-17T22:18:23.000Z
2019-06-17T22:18:23.000Z
# coding: utf-8 #------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. #-------------------------------------------------------------------------- import io import logging import time import unittest import requests import azure.mgmt.batch from azure.mgmt.batch import models from azure.common.exceptions import CloudError from mgmt_batch_preparers import KeyVaultPreparer, SimpleBatchPreparer from devtools_testutils import ( AzureMgmtTestCase, ResourceGroupPreparer, StorageAccountPreparer ) AZURE_LOCATION = 'westcentralus' EXISTING_BATCH_ACCOUNT = {'name': 'sdktest2', 'location': 'westcentralus'} class MgmtBatchTest(AzureMgmtTestCase): def setUp(self): super(MgmtBatchTest, self).setUp() self.mgmt_batch_client = self.create_mgmt_client( azure.mgmt.batch.BatchManagementClient) self.mgmt_keyvault_client = self.create_mgmt_client( azure.mgmt.keyvault.KeyVaultManagementClient) def _get_account_name(self): return self.get_resource_name('batch')[-24:] def test_mgmt_batch_list_operations(self): operations = self.mgmt_batch_client.operations.list() all_ops = list(operations) self.assertEqual(len(all_ops), 35) self.assertEqual(all_ops[0].name, 'Microsoft.Batch/batchAccounts/providers/Microsoft.Insights/diagnosticSettings/read') self.assertEqual(all_ops[0].origin, 'system') self.assertEqual(all_ops[0].display.provider, 'Microsoft Batch') self.assertEqual(all_ops[0].display.operation, 'Read diagnostic setting') def test_mgmt_batch_subscription_quota(self): quotas = self.mgmt_batch_client.location.get_quotas(AZURE_LOCATION) self.assertIsInstance(quotas, models.BatchLocationQuota) self.assertEqual(quotas.account_quota, 3) def test_mgmt_batch_account_name(self): # Test Invalid Account Name availability = self.mgmt_batch_client.location.check_name_availability( AZURE_LOCATION, "randombatchaccount@5^$g9873495873") self.assertIsInstance(availability, models.CheckNameAvailabilityResult) self.assertFalse(availability.name_available) self.assertEqual(availability.reason, models.NameAvailabilityReason.invalid) # Test Unvailable Account Name availability = self.mgmt_batch_client.location.check_name_availability( EXISTING_BATCH_ACCOUNT['location'], EXISTING_BATCH_ACCOUNT['name']) self.assertIsInstance(availability, models.CheckNameAvailabilityResult) self.assertFalse(availability.name_available) self.assertEqual(availability.reason, models.NameAvailabilityReason.already_exists) # Test Available Account Name availability = self.mgmt_batch_client.location.check_name_availability( AZURE_LOCATION, self._get_account_name()) self.assertIsInstance(availability, models.CheckNameAvailabilityResult) self.assertTrue(availability.name_available) @ResourceGroupPreparer(location=AZURE_LOCATION) @KeyVaultPreparer(location=AZURE_LOCATION) def test_mgmt_batch_byos_account(self, resource_group, location, keyvault): if self.is_live: keyvault = keyvault.result() batch_account = models.BatchAccountCreateParameters( location=location, pool_allocation_mode=models.PoolAllocationMode.user_subscription) with self.assertRaises(Exception): # TODO: What exception creating = self.mgmt_batch_client.batch_account.create( resource_group.name, self._get_account_name(), batch_account) creating.result() keyvault_id = "/subscriptions/{}/resourceGroups/{}/providers/Microsoft.KeyVault/vaults/{}".format( self.settings.SUBSCRIPTION_ID, resource_group.name, keyvault.name) keyvault_url = "https://{}.vault.azure.net/".format(keyvault.name) batch_account = models.BatchAccountCreateParameters( location=location, pool_allocation_mode=models.PoolAllocationMode.user_subscription, key_vault_reference={'id': keyvault_id, 'url': keyvault_url}) creating = self.mgmt_batch_client.batch_account.create( resource_group.name, self._get_account_name(), batch_account) creating.result() @ResourceGroupPreparer(location=AZURE_LOCATION) def test_mgmt_batch_account(self, resource_group, location): batch_account = models.BatchAccountCreateParameters( location=location, ) account_name = self._get_account_name() account_setup = self.mgmt_batch_client.batch_account.create( resource_group.name, account_name, batch_account) account_setup.result() # Test Get Account account = self.mgmt_batch_client.batch_account.get(resource_group.name, account_name) self.assertEqual(account.dedicated_core_quota, 20) self.assertEqual(account.low_priority_core_quota, 100) self.assertEqual(account.pool_quota, 100) self.assertEqual(account.pool_allocation_mode.value, 'BatchService') # Test List Accounts by Resource Group accounts = self.mgmt_batch_client.batch_account.list_by_resource_group(resource_group.name) self.assertEqual(len(list(accounts)), 1) # Test List Account Keys keys = self.mgmt_batch_client.batch_account.get_keys(resource_group.name, account_name) self.assertIsInstance(keys, models.BatchAccountKeys) self.assertEqual(keys.account_name, account_name) secondary = keys.secondary # Test Regenerate Account Key keys = self.mgmt_batch_client.batch_account.regenerate_key( resource_group.name, account_name, 'Secondary') self.assertIsInstance(keys, models.BatchAccountKeys) self.assertFalse(keys.secondary == secondary) # Test Update Account update_tags = {'Name': 'tagName', 'Value': 'tagValue'} updated = self.mgmt_batch_client.batch_account.update(resource_group.name, account_name, update_tags) self.assertIsInstance(updated, models.BatchAccount) self.assertEqual(updated.tags['Name'], 'tagName') self.assertEqual(updated.tags['Value'], 'tagValue') # Test Delete Account response = self.mgmt_batch_client.batch_account.delete(resource_group.name, account_name) self.assertIsNone(response.result()) @ResourceGroupPreparer(location=AZURE_LOCATION) @StorageAccountPreparer(name_prefix='batch', location=AZURE_LOCATION) def test_mgmt_batch_applications(self, resource_group, location, storage_account, storage_account_key): # Test Create Account with Auto-Storage storage_resource = '/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Storage/storageAccounts/{}'.format( self.settings.SUBSCRIPTION_ID, resource_group.name, storage_account.name ) batch_account = models.BatchAccountCreateParameters( location=location, auto_storage=models.AutoStorageBaseProperties(storage_resource) ) account_name = self._get_account_name() account_setup = self.mgmt_batch_client.batch_account.create( resource_group.name, account_name, batch_account) account_setup.result() # Test Sync AutoStorage Keys response = self.mgmt_batch_client.batch_account.synchronize_auto_storage_keys( resource_group.name, account_name) self.assertIsNone(response) # Test Add Application application_id = 'my_application_id' application_name = 'my_application_name' application_ver = 'v1.0' application_properties = models.Application(display_name=application_name, allow_updates=True) application = self.mgmt_batch_client.application.create( resource_group.name, account_name, application_id, parameters=application_properties) self.assertIsInstance(application, models.Application) self.assertTrue(application_id in application.id) self.assertTrue(application_name in application.display_name) self.assertTrue(application.allow_updates) # Test Mgmt Get Application application = self.mgmt_batch_client.application.get(resource_group.name, account_name, application_id) self.assertIsInstance(application, models.Application) self.assertTrue(application_id in application.id) self.assertTrue(application_name in application.display_name) self.assertTrue(application.allow_updates) # Test Mgmt List Applications applications = self.mgmt_batch_client.application.list(resource_group.name, account_name) self.assertTrue(len(list(applications)) > 0) # Test Add Application Package package_ref = self.mgmt_batch_client.application_package.create( resource_group.name, account_name, application_id, application_ver) self.assertIsInstance(package_ref, models.ApplicationPackage) with io.BytesIO(b'Hello World') as f: headers = {'x-ms-blob-type': 'BlockBlob'} upload = requests.put(package_ref.storage_url, headers=headers, data=f.read()) if not upload: raise ValueError('Upload failed: {!r}'.format(upload)) # Test Activate Application Package response = self.mgmt_batch_client.application_package.activate( resource_group.name, account_name, application_id, application_ver, 'zip') self.assertTrue(response.state == models.PackageState.active) # Test Update Application params = models.Application( allow_updates=False, display_name='my_updated_name', default_version=application_ver ) response = self.mgmt_batch_client.application.update( resource_group.name, account_name, application_id, params) self.assertTrue(application_ver in response.default_version) self.assertTrue('my_updated_name' in response.display_name) self.assertFalse(response.allow_updates) # Test Get Application Package package_ref = self.mgmt_batch_client.application_package.get( resource_group.name, account_name, application_id, application_ver) self.assertIsInstance(package_ref, models.ApplicationPackage) self.assertTrue(application_id in package_ref.id) self.assertEqual(package_ref.format, 'zip') self.assertEqual(package_ref.state, models.PackageState.active) # Test Delete Application Package response = self.mgmt_batch_client.application_package.delete( resource_group.name, account_name, application_id, application_ver) self.assertIsNone(response) # Test Delete Application response = self.mgmt_batch_client.application.delete( resource_group.name, account_name, application_id) self.assertIsNone(response) # Test Delete Account response = self.mgmt_batch_client.batch_account.delete(resource_group.name, account_name) self.assertIsNone(response.result()) @ResourceGroupPreparer(location=AZURE_LOCATION) @SimpleBatchPreparer(location=AZURE_LOCATION) def test_mgmt_batch_certificates(self, resource_group, location, batch_account): # Test Add Certificate parameters = models.CertificateCreateOrUpdateParameters( thumbprint='cff2ab63c8c955aaf71989efa641b906558d9fb7', thumbprint_algorithm='sha1', data='MIIGMQIBAzCCBe0GCSqGSIb3DQEHAaCCBd4EggXaMIIF1jCCA8AGCSqGSIb3DQEHAaCCA7EEggOtMIIDqTCCA6UGCyqGSIb3DQEMCgECoIICtjCCArIwHAYKKoZIhvcNAQwBAzAOBAhyd3xCtln3iQICB9AEggKQhe5P10V9iV1BsDlwWT561Yu2hVq3JT8ae/ebx1ZR/gMApVereDKkS9Zg4vFyssusHebbK5pDpU8vfAqle0TM4m7wGsRj453ZorSPUfMpHvQnAOn+2pEpWdMThU7xvZ6DVpwhDOQk9166z+KnKdHGuJKh4haMT7Rw/6xZ1rsBt2423cwTrQVMQyACrEkianpuujubKltN99qRoFAxhQcnYE2KlYKw7lRcExq6mDSYAyk5xJZ1ZFdLj6MAryZroQit/0g5eyhoNEKwWbi8px5j71pRTf7yjN+deMGQKwbGl+3OgaL1UZ5fCjypbVL60kpIBxLZwIJ7p3jJ+q9pbq9zSdzshPYor5lxyUfXqaso/0/91ayNoBzg4hQGh618PhFI6RMGjwkzhB9xk74iweJ9HQyIHf8yx2RCSI22JuCMitPMWSGvOszhbNx3AEDLuiiAOHg391mprEtKZguOIr9LrJwem/YmcHbwyz5YAbZmiseKPkllfC7dafFfCFEkj6R2oegIsZo0pEKYisAXBqT0g+6/jGwuhlZcBo0f7UIZm88iA3MrJCjlXEgV5OcQdoWj+hq0lKEdnhtCKr03AIfukN6+4vjjarZeW1bs0swq0l3XFf5RHa11otshMS4mpewshB9iO9MuKWpRxuxeng4PlKZ/zuBqmPeUrjJ9454oK35Pq+dghfemt7AUpBH/KycDNIZgfdEWUZrRKBGnc519C+RTqxyt5hWL18nJk4LvSd3QKlJ1iyJxClhhb/NWEzPqNdyA5cxen+2T9bd/EqJ2KzRv5/BPVwTQkHH9W/TZElFyvFfOFIW2+03RKbVGw72Mr/0xKZ+awAnEfoU+SL/2Gj2m6PHkqFX2sOCi/tN9EA4xgdswEwYJKoZIhvcNAQkVMQYEBAEAAAAwXQYJKwYBBAGCNxEBMVAeTgBNAGkAYwByAG8AcwBvAGYAdAAgAFMAdAByAG8AbgBnACAAQwByAHkAcAB0AG8AZwByAGEAcABoAGkAYwAgAFAAcgBvAHYAaQBkAGUAcjBlBgkqhkiG9w0BCRQxWB5WAFAAdgBrAFQAbQBwADoANABjAGUANgAwADQAZABhAC0AMAA2ADgAMQAtADQANAAxADUALQBhADIAYwBhAC0ANQA3ADcAMwAwADgAZQA2AGQAOQBhAGMwggIOBgkqhkiG9w0BBwGgggH/BIIB+zCCAfcwggHzBgsqhkiG9w0BDAoBA6CCAcswggHHBgoqhkiG9w0BCRYBoIIBtwSCAbMwggGvMIIBXaADAgECAhAdka3aTQsIsUphgIXGUmeRMAkGBSsOAwIdBQAwFjEUMBIGA1UEAxMLUm9vdCBBZ2VuY3kwHhcNMTYwMTAxMDcwMDAwWhcNMTgwMTAxMDcwMDAwWjASMRAwDgYDVQQDEwdub2Rlc2RrMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQC5fhcxbJHxxBEIDzVOMc56s04U6k4GPY7yMR1m+rBGVRiAyV4RjY6U936dqXHCVD36ps2Q0Z+OeEgyCInkIyVeB1EwXcToOcyeS2YcUb0vRWZDouC3tuFdHwiK1Ed5iW/LksmXDotyV7kpqzaPhOFiMtBuMEwNJcPge9k17hRgRQIDAQABo0swSTBHBgNVHQEEQDA+gBAS5AktBh0dTwCNYSHcFmRjoRgwFjEUMBIGA1UEAxMLUm9vdCBBZ2VuY3mCEAY3bACqAGSKEc+41KpcNfQwCQYFKw4DAh0FAANBAHl2M97QbpzdnwO5HoRBsiEExOcLTNg+GKCr7HUsbzfvrUivw+JLL7qjHAIc5phnK+F5bQ8HKe0L9YXBSKl+fvwxFTATBgkqhkiG9w0BCRUxBgQEAQAAADA7MB8wBwYFKw4DAhoEFGVtyGMqiBd32fGpzlGZQoRM6UQwBBTI0YHFFqTS4Go8CoLgswn29EiuUQICB9A=', format=models.CertificateFormat.pfx, password='nodesdk') certificate = 'SHA1-cff2ab63c8c955aaf71989efa641b906558d9fb7' response = self.mgmt_batch_client.certificate.create(resource_group.name, batch_account.name, certificate, parameters) self.assertIsInstance(response.result(), models.Certificate) # Test List Certificates certs = self.mgmt_batch_client.certificate.list_by_batch_account(resource_group.name, batch_account.name) self.assertEqual(len(list(certs)), 1) # Test Get Certificate cert = self.mgmt_batch_client.certificate.get(resource_group.name, batch_account.name, certificate) self.assertIsInstance(cert, models.Certificate) self.assertEqual(cert.thumbprint.lower(), 'cff2ab63c8c955aaf71989efa641b906558d9fb7') self.assertEqual(cert.thumbprint_algorithm, 'SHA1') self.assertIsNone(cert.delete_certificate_error) # Test Update Certiciate parameters = models.CertificateCreateOrUpdateParameters( password='nodesdk', data='MIIGMQIBAzCCBe0GCSqGSIb3DQEHAaCCBd4EggXaMIIF1jCCA8AGCSqGSIb3DQEHAaCCA7EEggOtMIIDqTCCA6UGCyqGSIb3DQEMCgECoIICtjCCArIwHAYKKoZIhvcNAQwBAzAOBAhyd3xCtln3iQICB9AEggKQhe5P10V9iV1BsDlwWT561Yu2hVq3JT8ae/ebx1ZR/gMApVereDKkS9Zg4vFyssusHebbK5pDpU8vfAqle0TM4m7wGsRj453ZorSPUfMpHvQnAOn+2pEpWdMThU7xvZ6DVpwhDOQk9166z+KnKdHGuJKh4haMT7Rw/6xZ1rsBt2423cwTrQVMQyACrEkianpuujubKltN99qRoFAxhQcnYE2KlYKw7lRcExq6mDSYAyk5xJZ1ZFdLj6MAryZroQit/0g5eyhoNEKwWbi8px5j71pRTf7yjN+deMGQKwbGl+3OgaL1UZ5fCjypbVL60kpIBxLZwIJ7p3jJ+q9pbq9zSdzshPYor5lxyUfXqaso/0/91ayNoBzg4hQGh618PhFI6RMGjwkzhB9xk74iweJ9HQyIHf8yx2RCSI22JuCMitPMWSGvOszhbNx3AEDLuiiAOHg391mprEtKZguOIr9LrJwem/YmcHbwyz5YAbZmiseKPkllfC7dafFfCFEkj6R2oegIsZo0pEKYisAXBqT0g+6/jGwuhlZcBo0f7UIZm88iA3MrJCjlXEgV5OcQdoWj+hq0lKEdnhtCKr03AIfukN6+4vjjarZeW1bs0swq0l3XFf5RHa11otshMS4mpewshB9iO9MuKWpRxuxeng4PlKZ/zuBqmPeUrjJ9454oK35Pq+dghfemt7AUpBH/KycDNIZgfdEWUZrRKBGnc519C+RTqxyt5hWL18nJk4LvSd3QKlJ1iyJxClhhb/NWEzPqNdyA5cxen+2T9bd/EqJ2KzRv5/BPVwTQkHH9W/TZElFyvFfOFIW2+03RKbVGw72Mr/0xKZ+awAnEfoU+SL/2Gj2m6PHkqFX2sOCi/tN9EA4xgdswEwYJKoZIhvcNAQkVMQYEBAEAAAAwXQYJKwYBBAGCNxEBMVAeTgBNAGkAYwByAG8AcwBvAGYAdAAgAFMAdAByAG8AbgBnACAAQwByAHkAcAB0AG8AZwByAGEAcABoAGkAYwAgAFAAcgBvAHYAaQBkAGUAcjBlBgkqhkiG9w0BCRQxWB5WAFAAdgBrAFQAbQBwADoANABjAGUANgAwADQAZABhAC0AMAA2ADgAMQAtADQANAAxADUALQBhADIAYwBhAC0ANQA3ADcAMwAwADgAZQA2AGQAOQBhAGMwggIOBgkqhkiG9w0BBwGgggH/BIIB+zCCAfcwggHzBgsqhkiG9w0BDAoBA6CCAcswggHHBgoqhkiG9w0BCRYBoIIBtwSCAbMwggGvMIIBXaADAgECAhAdka3aTQsIsUphgIXGUmeRMAkGBSsOAwIdBQAwFjEUMBIGA1UEAxMLUm9vdCBBZ2VuY3kwHhcNMTYwMTAxMDcwMDAwWhcNMTgwMTAxMDcwMDAwWjASMRAwDgYDVQQDEwdub2Rlc2RrMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQC5fhcxbJHxxBEIDzVOMc56s04U6k4GPY7yMR1m+rBGVRiAyV4RjY6U936dqXHCVD36ps2Q0Z+OeEgyCInkIyVeB1EwXcToOcyeS2YcUb0vRWZDouC3tuFdHwiK1Ed5iW/LksmXDotyV7kpqzaPhOFiMtBuMEwNJcPge9k17hRgRQIDAQABo0swSTBHBgNVHQEEQDA+gBAS5AktBh0dTwCNYSHcFmRjoRgwFjEUMBIGA1UEAxMLUm9vdCBBZ2VuY3mCEAY3bACqAGSKEc+41KpcNfQwCQYFKw4DAh0FAANBAHl2M97QbpzdnwO5HoRBsiEExOcLTNg+GKCr7HUsbzfvrUivw+JLL7qjHAIc5phnK+F5bQ8HKe0L9YXBSKl+fvwxFTATBgkqhkiG9w0BCRUxBgQEAQAAADA7MB8wBwYFKw4DAhoEFGVtyGMqiBd32fGpzlGZQoRM6UQwBBTI0YHFFqTS4Go8CoLgswn29EiuUQICB9A=',) response = self.mgmt_batch_client.certificate.update(resource_group.name, batch_account.name, certificate, parameters) self.assertIsInstance(response, models.Certificate) # Test Cancel Certificate Delete #with self.assertRaises(models.DeleteCertificateError): self.mgmt_batch_client.certificate.cancel_deletion( resource_group.name, batch_account.name, certificate) # Test Delete Certificate response = self.mgmt_batch_client.certificate.delete(resource_group.name, batch_account.name, certificate) self.assertIsNone(response.result()) @ResourceGroupPreparer(location=AZURE_LOCATION) @SimpleBatchPreparer(location=AZURE_LOCATION) def test_mgmt_batch_pools(self, resource_group, location, batch_account): # Test create PAAS pool paas_pool = "test_paas_pool" parameters = models.Pool( display_name="test_pool", vm_size='small', deployment_configuration=models.DeploymentConfiguration( cloud_service_configuration=models.CloudServiceConfiguration(os_family='5') ), start_task=models.StartTask( command_line="cmd.exe /c \"echo hello world\"", resource_files=[models.ResourceFile(http_url='https://blobsource.com', file_path='filename.txt')], environment_settings=[models.EnvironmentSetting('ENV_VAR', 'env_value')], user_identity=models.UserIdentity( auto_user=models.AutoUserSpecification( elevation_level=models.ElevationLevel.admin ) ) ), user_accounts=[models.UserAccount('UserName', 'p@55wOrd')], scale_settings=models.ScaleSettings( fixed_scale=models.FixedScaleSettings( target_dedicated_nodes=0, target_low_priority_nodes=0 ) ) ) response = self.mgmt_batch_client.pool.create( resource_group.name, batch_account.name, paas_pool, parameters) self.assertIsInstance(response.result(), models.Pool) # Test create IAAS pool iaas_pool = "test_iaas_pool" parameters = models.Pool( display_name="test_pool", vm_size='Standard_A1', deployment_configuration=models.DeploymentConfiguration( virtual_machine_configuration=models.VirtualMachineConfiguration( image_reference=models.ImageReference( publisher='MicrosoftWindowsServer', offer='WindowsServer', sku='2016-Datacenter-smalldisk' ), node_agent_sku_id='batch.node.windows amd64', windows_configuration=models.WindowsConfiguration(True) ) ), scale_settings=models.ScaleSettings( fixed_scale=models.FixedScaleSettings( target_dedicated_nodes=0, target_low_priority_nodes=0 ) ) ) response = self.mgmt_batch_client.pool.create( resource_group.name, batch_account.name, iaas_pool, parameters) self.assertIsInstance(response.result(), models.Pool) # Test list pools pools = self.mgmt_batch_client.pool.list_by_batch_account(resource_group.name, batch_account.name) self.assertEqual(len(list(pools)), 2) # Test Update pool parameters = models.Pool( scale_settings=models.ScaleSettings( auto_scale=models.AutoScaleSettings( formula='$TargetDedicatedNodes=1' ) ) ) response = self.mgmt_batch_client.pool.update( resource_group.name, batch_account.name, iaas_pool, parameters) self.assertIsInstance(response, models.Pool) # Test Get pool pool = self.mgmt_batch_client.pool.get( resource_group.name, batch_account.name, iaas_pool) self.assertIsInstance(pool, models.Pool) self.assertEqual(pool.vm_size, 'STANDARD_A1'), self.assertIsNone(pool.display_name), self.assertEqual(pool.allocation_state, models.AllocationState.resizing) self.assertEqual( pool.deployment_configuration.virtual_machine_configuration.node_agent_sku_id, 'batch.node.windows amd64') # Test stop resizing with self.assertRaises(CloudError): self.mgmt_batch_client.pool.stop_resize(resource_group.name, batch_account.name, iaas_pool) if self.is_live: time.sleep(300) # Test disable auto-scale response = self.mgmt_batch_client.pool.disable_auto_scale( resource_group.name, batch_account.name, iaas_pool) self.assertIsInstance(response, models.Pool) # Test delete pool response = self.mgmt_batch_client.pool.delete( resource_group.name, batch_account.name, iaas_pool) self.assertIsNone(response.result())
57.255102
2,141
0.740866
import io import logging import time import unittest import requests import azure.mgmt.batch from azure.mgmt.batch import models from azure.common.exceptions import CloudError from mgmt_batch_preparers import KeyVaultPreparer, SimpleBatchPreparer from devtools_testutils import ( AzureMgmtTestCase, ResourceGroupPreparer, StorageAccountPreparer ) AZURE_LOCATION = 'westcentralus' EXISTING_BATCH_ACCOUNT = {'name': 'sdktest2', 'location': 'westcentralus'} class MgmtBatchTest(AzureMgmtTestCase): def setUp(self): super(MgmtBatchTest, self).setUp() self.mgmt_batch_client = self.create_mgmt_client( azure.mgmt.batch.BatchManagementClient) self.mgmt_keyvault_client = self.create_mgmt_client( azure.mgmt.keyvault.KeyVaultManagementClient) def _get_account_name(self): return self.get_resource_name('batch')[-24:] def test_mgmt_batch_list_operations(self): operations = self.mgmt_batch_client.operations.list() all_ops = list(operations) self.assertEqual(len(all_ops), 35) self.assertEqual(all_ops[0].name, 'Microsoft.Batch/batchAccounts/providers/Microsoft.Insights/diagnosticSettings/read') self.assertEqual(all_ops[0].origin, 'system') self.assertEqual(all_ops[0].display.provider, 'Microsoft Batch') self.assertEqual(all_ops[0].display.operation, 'Read diagnostic setting') def test_mgmt_batch_subscription_quota(self): quotas = self.mgmt_batch_client.location.get_quotas(AZURE_LOCATION) self.assertIsInstance(quotas, models.BatchLocationQuota) self.assertEqual(quotas.account_quota, 3) def test_mgmt_batch_account_name(self): availability = self.mgmt_batch_client.location.check_name_availability( AZURE_LOCATION, "randombatchaccount@5^$g9873495873") self.assertIsInstance(availability, models.CheckNameAvailabilityResult) self.assertFalse(availability.name_available) self.assertEqual(availability.reason, models.NameAvailabilityReason.invalid) availability = self.mgmt_batch_client.location.check_name_availability( EXISTING_BATCH_ACCOUNT['location'], EXISTING_BATCH_ACCOUNT['name']) self.assertIsInstance(availability, models.CheckNameAvailabilityResult) self.assertFalse(availability.name_available) self.assertEqual(availability.reason, models.NameAvailabilityReason.already_exists) availability = self.mgmt_batch_client.location.check_name_availability( AZURE_LOCATION, self._get_account_name()) self.assertIsInstance(availability, models.CheckNameAvailabilityResult) self.assertTrue(availability.name_available) @ResourceGroupPreparer(location=AZURE_LOCATION) @KeyVaultPreparer(location=AZURE_LOCATION) def test_mgmt_batch_byos_account(self, resource_group, location, keyvault): if self.is_live: keyvault = keyvault.result() batch_account = models.BatchAccountCreateParameters( location=location, pool_allocation_mode=models.PoolAllocationMode.user_subscription) with self.assertRaises(Exception): creating = self.mgmt_batch_client.batch_account.create( resource_group.name, self._get_account_name(), batch_account) creating.result() keyvault_id = "/subscriptions/{}/resourceGroups/{}/providers/Microsoft.KeyVault/vaults/{}".format( self.settings.SUBSCRIPTION_ID, resource_group.name, keyvault.name) keyvault_url = "https://{}.vault.azure.net/".format(keyvault.name) batch_account = models.BatchAccountCreateParameters( location=location, pool_allocation_mode=models.PoolAllocationMode.user_subscription, key_vault_reference={'id': keyvault_id, 'url': keyvault_url}) creating = self.mgmt_batch_client.batch_account.create( resource_group.name, self._get_account_name(), batch_account) creating.result() @ResourceGroupPreparer(location=AZURE_LOCATION) def test_mgmt_batch_account(self, resource_group, location): batch_account = models.BatchAccountCreateParameters( location=location, ) account_name = self._get_account_name() account_setup = self.mgmt_batch_client.batch_account.create( resource_group.name, account_name, batch_account) account_setup.result() account = self.mgmt_batch_client.batch_account.get(resource_group.name, account_name) self.assertEqual(account.dedicated_core_quota, 20) self.assertEqual(account.low_priority_core_quota, 100) self.assertEqual(account.pool_quota, 100) self.assertEqual(account.pool_allocation_mode.value, 'BatchService') accounts = self.mgmt_batch_client.batch_account.list_by_resource_group(resource_group.name) self.assertEqual(len(list(accounts)), 1) keys = self.mgmt_batch_client.batch_account.get_keys(resource_group.name, account_name) self.assertIsInstance(keys, models.BatchAccountKeys) self.assertEqual(keys.account_name, account_name) secondary = keys.secondary keys = self.mgmt_batch_client.batch_account.regenerate_key( resource_group.name, account_name, 'Secondary') self.assertIsInstance(keys, models.BatchAccountKeys) self.assertFalse(keys.secondary == secondary) update_tags = {'Name': 'tagName', 'Value': 'tagValue'} updated = self.mgmt_batch_client.batch_account.update(resource_group.name, account_name, update_tags) self.assertIsInstance(updated, models.BatchAccount) self.assertEqual(updated.tags['Name'], 'tagName') self.assertEqual(updated.tags['Value'], 'tagValue') response = self.mgmt_batch_client.batch_account.delete(resource_group.name, account_name) self.assertIsNone(response.result()) @ResourceGroupPreparer(location=AZURE_LOCATION) @StorageAccountPreparer(name_prefix='batch', location=AZURE_LOCATION) def test_mgmt_batch_applications(self, resource_group, location, storage_account, storage_account_key): storage_resource = '/subscriptions/{}/resourceGroups/{}/providers/Microsoft.Storage/storageAccounts/{}'.format( self.settings.SUBSCRIPTION_ID, resource_group.name, storage_account.name ) batch_account = models.BatchAccountCreateParameters( location=location, auto_storage=models.AutoStorageBaseProperties(storage_resource) ) account_name = self._get_account_name() account_setup = self.mgmt_batch_client.batch_account.create( resource_group.name, account_name, batch_account) account_setup.result() response = self.mgmt_batch_client.batch_account.synchronize_auto_storage_keys( resource_group.name, account_name) self.assertIsNone(response) application_id = 'my_application_id' application_name = 'my_application_name' application_ver = 'v1.0' application_properties = models.Application(display_name=application_name, allow_updates=True) application = self.mgmt_batch_client.application.create( resource_group.name, account_name, application_id, parameters=application_properties) self.assertIsInstance(application, models.Application) self.assertTrue(application_id in application.id) self.assertTrue(application_name in application.display_name) self.assertTrue(application.allow_updates) application = self.mgmt_batch_client.application.get(resource_group.name, account_name, application_id) self.assertIsInstance(application, models.Application) self.assertTrue(application_id in application.id) self.assertTrue(application_name in application.display_name) self.assertTrue(application.allow_updates) applications = self.mgmt_batch_client.application.list(resource_group.name, account_name) self.assertTrue(len(list(applications)) > 0) package_ref = self.mgmt_batch_client.application_package.create( resource_group.name, account_name, application_id, application_ver) self.assertIsInstance(package_ref, models.ApplicationPackage) with io.BytesIO(b'Hello World') as f: headers = {'x-ms-blob-type': 'BlockBlob'} upload = requests.put(package_ref.storage_url, headers=headers, data=f.read()) if not upload: raise ValueError('Upload failed: {!r}'.format(upload)) response = self.mgmt_batch_client.application_package.activate( resource_group.name, account_name, application_id, application_ver, 'zip') self.assertTrue(response.state == models.PackageState.active) params = models.Application( allow_updates=False, display_name='my_updated_name', default_version=application_ver ) response = self.mgmt_batch_client.application.update( resource_group.name, account_name, application_id, params) self.assertTrue(application_ver in response.default_version) self.assertTrue('my_updated_name' in response.display_name) self.assertFalse(response.allow_updates) package_ref = self.mgmt_batch_client.application_package.get( resource_group.name, account_name, application_id, application_ver) self.assertIsInstance(package_ref, models.ApplicationPackage) self.assertTrue(application_id in package_ref.id) self.assertEqual(package_ref.format, 'zip') self.assertEqual(package_ref.state, models.PackageState.active) response = self.mgmt_batch_client.application_package.delete( resource_group.name, account_name, application_id, application_ver) self.assertIsNone(response) response = self.mgmt_batch_client.application.delete( resource_group.name, account_name, application_id) self.assertIsNone(response) response = self.mgmt_batch_client.batch_account.delete(resource_group.name, account_name) self.assertIsNone(response.result()) @ResourceGroupPreparer(location=AZURE_LOCATION) @SimpleBatchPreparer(location=AZURE_LOCATION) def test_mgmt_batch_certificates(self, resource_group, location, batch_account): parameters = models.CertificateCreateOrUpdateParameters( thumbprint='cff2ab63c8c955aaf71989efa641b906558d9fb7', thumbprint_algorithm='sha1', data='MIIGMQIBAzCCBe0GCSqGSIb3DQEHAaCCBd4EggXaMIIF1jCCA8AGCSqGSIb3DQEHAaCCA7EEggOtMIIDqTCCA6UGCyqGSIb3DQEMCgECoIICtjCCArIwHAYKKoZIhvcNAQwBAzAOBAhyd3xCtln3iQICB9AEggKQhe5P10V9iV1BsDlwWT561Yu2hVq3JT8ae/ebx1ZR/gMApVereDKkS9Zg4vFyssusHebbK5pDpU8vfAqle0TM4m7wGsRj453ZorSPUfMpHvQnAOn+2pEpWdMThU7xvZ6DVpwhDOQk9166z+KnKdHGuJKh4haMT7Rw/6xZ1rsBt2423cwTrQVMQyACrEkianpuujubKltN99qRoFAxhQcnYE2KlYKw7lRcExq6mDSYAyk5xJZ1ZFdLj6MAryZroQit/0g5eyhoNEKwWbi8px5j71pRTf7yjN+deMGQKwbGl+3OgaL1UZ5fCjypbVL60kpIBxLZwIJ7p3jJ+q9pbq9zSdzshPYor5lxyUfXqaso/0/91ayNoBzg4hQGh618PhFI6RMGjwkzhB9xk74iweJ9HQyIHf8yx2RCSI22JuCMitPMWSGvOszhbNx3AEDLuiiAOHg391mprEtKZguOIr9LrJwem/YmcHbwyz5YAbZmiseKPkllfC7dafFfCFEkj6R2oegIsZo0pEKYisAXBqT0g+6/jGwuhlZcBo0f7UIZm88iA3MrJCjlXEgV5OcQdoWj+hq0lKEdnhtCKr03AIfukN6+4vjjarZeW1bs0swq0l3XFf5RHa11otshMS4mpewshB9iO9MuKWpRxuxeng4PlKZ/zuBqmPeUrjJ9454oK35Pq+dghfemt7AUpBH/KycDNIZgfdEWUZrRKBGnc519C+RTqxyt5hWL18nJk4LvSd3QKlJ1iyJxClhhb/NWEzPqNdyA5cxen+2T9bd/EqJ2KzRv5/BPVwTQkHH9W/TZElFyvFfOFIW2+03RKbVGw72Mr/0xKZ+awAnEfoU+SL/2Gj2m6PHkqFX2sOCi/tN9EA4xgdswEwYJKoZIhvcNAQkVMQYEBAEAAAAwXQYJKwYBBAGCNxEBMVAeTgBNAGkAYwByAG8AcwBvAGYAdAAgAFMAdAByAG8AbgBnACAAQwByAHkAcAB0AG8AZwByAGEAcABoAGkAYwAgAFAAcgBvAHYAaQBkAGUAcjBlBgkqhkiG9w0BCRQxWB5WAFAAdgBrAFQAbQBwADoANABjAGUANgAwADQAZABhAC0AMAA2ADgAMQAtADQANAAxADUALQBhADIAYwBhAC0ANQA3ADcAMwAwADgAZQA2AGQAOQBhAGMwggIOBgkqhkiG9w0BBwGgggH/BIIB+zCCAfcwggHzBgsqhkiG9w0BDAoBA6CCAcswggHHBgoqhkiG9w0BCRYBoIIBtwSCAbMwggGvMIIBXaADAgECAhAdka3aTQsIsUphgIXGUmeRMAkGBSsOAwIdBQAwFjEUMBIGA1UEAxMLUm9vdCBBZ2VuY3kwHhcNMTYwMTAxMDcwMDAwWhcNMTgwMTAxMDcwMDAwWjASMRAwDgYDVQQDEwdub2Rlc2RrMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQC5fhcxbJHxxBEIDzVOMc56s04U6k4GPY7yMR1m+rBGVRiAyV4RjY6U936dqXHCVD36ps2Q0Z+OeEgyCInkIyVeB1EwXcToOcyeS2YcUb0vRWZDouC3tuFdHwiK1Ed5iW/LksmXDotyV7kpqzaPhOFiMtBuMEwNJcPge9k17hRgRQIDAQABo0swSTBHBgNVHQEEQDA+gBAS5AktBh0dTwCNYSHcFmRjoRgwFjEUMBIGA1UEAxMLUm9vdCBBZ2VuY3mCEAY3bACqAGSKEc+41KpcNfQwCQYFKw4DAh0FAANBAHl2M97QbpzdnwO5HoRBsiEExOcLTNg+GKCr7HUsbzfvrUivw+JLL7qjHAIc5phnK+F5bQ8HKe0L9YXBSKl+fvwxFTATBgkqhkiG9w0BCRUxBgQEAQAAADA7MB8wBwYFKw4DAhoEFGVtyGMqiBd32fGpzlGZQoRM6UQwBBTI0YHFFqTS4Go8CoLgswn29EiuUQICB9A=', format=models.CertificateFormat.pfx, password='nodesdk') certificate = 'SHA1-cff2ab63c8c955aaf71989efa641b906558d9fb7' response = self.mgmt_batch_client.certificate.create(resource_group.name, batch_account.name, certificate, parameters) self.assertIsInstance(response.result(), models.Certificate) certs = self.mgmt_batch_client.certificate.list_by_batch_account(resource_group.name, batch_account.name) self.assertEqual(len(list(certs)), 1) cert = self.mgmt_batch_client.certificate.get(resource_group.name, batch_account.name, certificate) self.assertIsInstance(cert, models.Certificate) self.assertEqual(cert.thumbprint.lower(), 'cff2ab63c8c955aaf71989efa641b906558d9fb7') self.assertEqual(cert.thumbprint_algorithm, 'SHA1') self.assertIsNone(cert.delete_certificate_error) parameters = models.CertificateCreateOrUpdateParameters( password='nodesdk', data='MIIGMQIBAzCCBe0GCSqGSIb3DQEHAaCCBd4EggXaMIIF1jCCA8AGCSqGSIb3DQEHAaCCA7EEggOtMIIDqTCCA6UGCyqGSIb3DQEMCgECoIICtjCCArIwHAYKKoZIhvcNAQwBAzAOBAhyd3xCtln3iQICB9AEggKQhe5P10V9iV1BsDlwWT561Yu2hVq3JT8ae/ebx1ZR/gMApVereDKkS9Zg4vFyssusHebbK5pDpU8vfAqle0TM4m7wGsRj453ZorSPUfMpHvQnAOn+2pEpWdMThU7xvZ6DVpwhDOQk9166z+KnKdHGuJKh4haMT7Rw/6xZ1rsBt2423cwTrQVMQyACrEkianpuujubKltN99qRoFAxhQcnYE2KlYKw7lRcExq6mDSYAyk5xJZ1ZFdLj6MAryZroQit/0g5eyhoNEKwWbi8px5j71pRTf7yjN+deMGQKwbGl+3OgaL1UZ5fCjypbVL60kpIBxLZwIJ7p3jJ+q9pbq9zSdzshPYor5lxyUfXqaso/0/91ayNoBzg4hQGh618PhFI6RMGjwkzhB9xk74iweJ9HQyIHf8yx2RCSI22JuCMitPMWSGvOszhbNx3AEDLuiiAOHg391mprEtKZguOIr9LrJwem/YmcHbwyz5YAbZmiseKPkllfC7dafFfCFEkj6R2oegIsZo0pEKYisAXBqT0g+6/jGwuhlZcBo0f7UIZm88iA3MrJCjlXEgV5OcQdoWj+hq0lKEdnhtCKr03AIfukN6+4vjjarZeW1bs0swq0l3XFf5RHa11otshMS4mpewshB9iO9MuKWpRxuxeng4PlKZ/zuBqmPeUrjJ9454oK35Pq+dghfemt7AUpBH/KycDNIZgfdEWUZrRKBGnc519C+RTqxyt5hWL18nJk4LvSd3QKlJ1iyJxClhhb/NWEzPqNdyA5cxen+2T9bd/EqJ2KzRv5/BPVwTQkHH9W/TZElFyvFfOFIW2+03RKbVGw72Mr/0xKZ+awAnEfoU+SL/2Gj2m6PHkqFX2sOCi/tN9EA4xgdswEwYJKoZIhvcNAQkVMQYEBAEAAAAwXQYJKwYBBAGCNxEBMVAeTgBNAGkAYwByAG8AcwBvAGYAdAAgAFMAdAByAG8AbgBnACAAQwByAHkAcAB0AG8AZwByAGEAcABoAGkAYwAgAFAAcgBvAHYAaQBkAGUAcjBlBgkqhkiG9w0BCRQxWB5WAFAAdgBrAFQAbQBwADoANABjAGUANgAwADQAZABhAC0AMAA2ADgAMQAtADQANAAxADUALQBhADIAYwBhAC0ANQA3ADcAMwAwADgAZQA2AGQAOQBhAGMwggIOBgkqhkiG9w0BBwGgggH/BIIB+zCCAfcwggHzBgsqhkiG9w0BDAoBA6CCAcswggHHBgoqhkiG9w0BCRYBoIIBtwSCAbMwggGvMIIBXaADAgECAhAdka3aTQsIsUphgIXGUmeRMAkGBSsOAwIdBQAwFjEUMBIGA1UEAxMLUm9vdCBBZ2VuY3kwHhcNMTYwMTAxMDcwMDAwWhcNMTgwMTAxMDcwMDAwWjASMRAwDgYDVQQDEwdub2Rlc2RrMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQC5fhcxbJHxxBEIDzVOMc56s04U6k4GPY7yMR1m+rBGVRiAyV4RjY6U936dqXHCVD36ps2Q0Z+OeEgyCInkIyVeB1EwXcToOcyeS2YcUb0vRWZDouC3tuFdHwiK1Ed5iW/LksmXDotyV7kpqzaPhOFiMtBuMEwNJcPge9k17hRgRQIDAQABo0swSTBHBgNVHQEEQDA+gBAS5AktBh0dTwCNYSHcFmRjoRgwFjEUMBIGA1UEAxMLUm9vdCBBZ2VuY3mCEAY3bACqAGSKEc+41KpcNfQwCQYFKw4DAh0FAANBAHl2M97QbpzdnwO5HoRBsiEExOcLTNg+GKCr7HUsbzfvrUivw+JLL7qjHAIc5phnK+F5bQ8HKe0L9YXBSKl+fvwxFTATBgkqhkiG9w0BCRUxBgQEAQAAADA7MB8wBwYFKw4DAhoEFGVtyGMqiBd32fGpzlGZQoRM6UQwBBTI0YHFFqTS4Go8CoLgswn29EiuUQICB9A=',) response = self.mgmt_batch_client.certificate.update(resource_group.name, batch_account.name, certificate, parameters) self.assertIsInstance(response, models.Certificate) self.mgmt_batch_client.certificate.cancel_deletion( resource_group.name, batch_account.name, certificate) response = self.mgmt_batch_client.certificate.delete(resource_group.name, batch_account.name, certificate) self.assertIsNone(response.result()) @ResourceGroupPreparer(location=AZURE_LOCATION) @SimpleBatchPreparer(location=AZURE_LOCATION) def test_mgmt_batch_pools(self, resource_group, location, batch_account): paas_pool = "test_paas_pool" parameters = models.Pool( display_name="test_pool", vm_size='small', deployment_configuration=models.DeploymentConfiguration( cloud_service_configuration=models.CloudServiceConfiguration(os_family='5') ), start_task=models.StartTask( command_line="cmd.exe /c \"echo hello world\"", resource_files=[models.ResourceFile(http_url='https://blobsource.com', file_path='filename.txt')], environment_settings=[models.EnvironmentSetting('ENV_VAR', 'env_value')], user_identity=models.UserIdentity( auto_user=models.AutoUserSpecification( elevation_level=models.ElevationLevel.admin ) ) ), user_accounts=[models.UserAccount('UserName', 'p@55wOrd')], scale_settings=models.ScaleSettings( fixed_scale=models.FixedScaleSettings( target_dedicated_nodes=0, target_low_priority_nodes=0 ) ) ) response = self.mgmt_batch_client.pool.create( resource_group.name, batch_account.name, paas_pool, parameters) self.assertIsInstance(response.result(), models.Pool) iaas_pool = "test_iaas_pool" parameters = models.Pool( display_name="test_pool", vm_size='Standard_A1', deployment_configuration=models.DeploymentConfiguration( virtual_machine_configuration=models.VirtualMachineConfiguration( image_reference=models.ImageReference( publisher='MicrosoftWindowsServer', offer='WindowsServer', sku='2016-Datacenter-smalldisk' ), node_agent_sku_id='batch.node.windows amd64', windows_configuration=models.WindowsConfiguration(True) ) ), scale_settings=models.ScaleSettings( fixed_scale=models.FixedScaleSettings( target_dedicated_nodes=0, target_low_priority_nodes=0 ) ) ) response = self.mgmt_batch_client.pool.create( resource_group.name, batch_account.name, iaas_pool, parameters) self.assertIsInstance(response.result(), models.Pool) pools = self.mgmt_batch_client.pool.list_by_batch_account(resource_group.name, batch_account.name) self.assertEqual(len(list(pools)), 2) parameters = models.Pool( scale_settings=models.ScaleSettings( auto_scale=models.AutoScaleSettings( formula='$TargetDedicatedNodes=1' ) ) ) response = self.mgmt_batch_client.pool.update( resource_group.name, batch_account.name, iaas_pool, parameters) self.assertIsInstance(response, models.Pool) pool = self.mgmt_batch_client.pool.get( resource_group.name, batch_account.name, iaas_pool) self.assertIsInstance(pool, models.Pool) self.assertEqual(pool.vm_size, 'STANDARD_A1'), self.assertIsNone(pool.display_name), self.assertEqual(pool.allocation_state, models.AllocationState.resizing) self.assertEqual( pool.deployment_configuration.virtual_machine_configuration.node_agent_sku_id, 'batch.node.windows amd64') with self.assertRaises(CloudError): self.mgmt_batch_client.pool.stop_resize(resource_group.name, batch_account.name, iaas_pool) if self.is_live: time.sleep(300) response = self.mgmt_batch_client.pool.disable_auto_scale( resource_group.name, batch_account.name, iaas_pool) self.assertIsInstance(response, models.Pool) response = self.mgmt_batch_client.pool.delete( resource_group.name, batch_account.name, iaas_pool) self.assertIsNone(response.result())
true
true
f7011e2f58481d64d3b47d08cd59dda7a5d57e00
1,021
py
Python
tests/kit/timestamp.py
untzag/WrightTools
05480d2f91ceeca422d9e5ac381fce1840207cb0
[ "MIT" ]
12
2017-07-11T15:58:12.000Z
2021-05-10T20:33:26.000Z
tests/kit/timestamp.py
untzag/WrightTools
05480d2f91ceeca422d9e5ac381fce1840207cb0
[ "MIT" ]
808
2015-04-12T00:36:08.000Z
2022-03-27T21:06:06.000Z
tests/kit/timestamp.py
untzag/WrightTools
05480d2f91ceeca422d9e5ac381fce1840207cb0
[ "MIT" ]
9
2017-07-22T18:54:23.000Z
2022-02-17T20:31:05.000Z
"""Test timestamp.""" # --- import ------------------------------------------------------------------------------------- import WrightTools as wt # --- test --------------------------------------------------------------------------------------- def test_now(): wt.kit.TimeStamp() # exception will be raised upon failure def test_utc(): wt.kit.timestamp_from_RFC3339("2017-11-13 16:09:17Z") # exception will be raised upon failure def test_date(): ts = wt.kit.timestamp_from_RFC3339("2017-11-13 16:09:17-6") assert len(ts.date) == 10 def test_hms(): ts = wt.kit.timestamp_from_RFC3339("2017-11-13 16:33:44-6") assert len(ts.hms) == 8 def test_human(): ts = wt.kit.TimeStamp() assert len(ts.human) == 19 def test_RFC3339(): ts = wt.kit.TimeStamp() assert ts.RFC3339 assert wt.kit.timestamp_from_RFC3339(ts.RFC3339) == ts def test_RFC5322(): ts = wt.kit.TimeStamp() assert ts.RFC5322 def test_path(): ts = wt.kit.TimeStamp() assert ts.path
20.42
98
0.540646
import WrightTools as wt def test_now(): wt.kit.TimeStamp() def test_utc(): wt.kit.timestamp_from_RFC3339("2017-11-13 16:09:17Z") def test_date(): ts = wt.kit.timestamp_from_RFC3339("2017-11-13 16:09:17-6") assert len(ts.date) == 10 def test_hms(): ts = wt.kit.timestamp_from_RFC3339("2017-11-13 16:33:44-6") assert len(ts.hms) == 8 def test_human(): ts = wt.kit.TimeStamp() assert len(ts.human) == 19 def test_RFC3339(): ts = wt.kit.TimeStamp() assert ts.RFC3339 assert wt.kit.timestamp_from_RFC3339(ts.RFC3339) == ts def test_RFC5322(): ts = wt.kit.TimeStamp() assert ts.RFC5322 def test_path(): ts = wt.kit.TimeStamp() assert ts.path
true
true
f7011e34147f8bf8224865d41dddee78c3e658d3
4,960
py
Python
hack/release/wizard/poll-mirrors.py
LaudateCorpus1/solr-operator
86107e4c3d2945a15d7f08bbb081eb1418bd6ead
[ "Apache-2.0" ]
103
2019-06-14T20:36:28.000Z
2021-01-12T23:47:54.000Z
hack/release/wizard/poll-mirrors.py
Enterprism/solr-operator
e06464994dbb963db564f65a7d9bec8d0a7fca30
[ "Apache-2.0" ]
127
2019-06-21T16:04:02.000Z
2021-01-12T15:41:42.000Z
hack/release/wizard/poll-mirrors.py
Enterprism/solr-operator
e06464994dbb963db564f65a7d9bec8d0a7fca30
[ "Apache-2.0" ]
44
2019-06-19T23:12:46.000Z
2021-01-06T12:11:41.000Z
#!/usr/bin/env python3 # # vim: softtabstop=2 shiftwidth=2 expandtab # # Python port of poll-mirrors.pl # # This script is designed to poll download sites after posting a release # and print out notice as each becomes available. The RM can use this # script to delay the release announcement until the release can be # downloaded. # # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import argparse import datetime import ftplib import re import sys import time from urllib.parse import urlparse from multiprocessing import Pool import http.client as http def p(s): sys.stdout.write(s) sys.stdout.flush() def mirror_contains_file(url): url = urlparse(url) if url.scheme == 'https': return https_file_exists(url) elif url.scheme == 'http': return http_file_exists(url) elif url.scheme == 'ftp': return ftp_file_exists(url) def http_file_exists(url): exists = False try: conn = http.HTTPConnection(url.netloc) conn.request('HEAD', url.path) response = conn.getresponse() exists = response.status == 200 except: pass return exists def https_file_exists(url): exists = False try: conn = http.HTTPSConnection(url.netloc) conn.request('HEAD', url.path) response = conn.getresponse() exists = response.status == 200 except: pass return exists def ftp_file_exists(url): listing = [] try: conn = ftplib.FTP(url.netloc) conn.login() listing = conn.nlst(url.path) conn.quit() except Exception as e: pass return len(listing) > 0 def check_mirror(url): if mirror_contains_file(url): p('.') return None else: # p('\nFAIL: ' + url + '\n') p('X') return url if __name__ == '__main__': desc = 'Periodically checks that all Solr mirrors contain either a copy of a release or a specified path' parser = argparse.ArgumentParser(description=desc) parser.add_argument('-version', '-v', help='Solr Operator version to check') parser.add_argument('-path', '-p', help='instead of a versioned release, check for some/explicit/path') parser.add_argument('-interval', '-i', help='seconds to wait before re-querying mirrors', type=int, default=300) parser.add_argument('-once', '-o', help='run only once', action='store_true', default=False) args = parser.parse_args() if (args.version is None and args.path is None) \ or (args.version is not None and args.path is not None): p('You must specify either -version or -path but not both!\n') sys.exit(1) try: conn = http.HTTPSConnection('www.apache.org') conn.request('GET', '/mirrors/') response = conn.getresponse() html = response.read() except Exception as e: p('Unable to fetch the Apache mirrors list!\n') sys.exit(1) mirror_path = args.path if args.path is not None else 'solr/solr-operator/{}/solr-operator-{}.tgz'.format(args.version, args.version) pending_mirrors = [] for match in re.finditer('<TR>(.*?)</TR>', str(html), re.MULTILINE | re.IGNORECASE | re.DOTALL): row = match.group(1) if not '<TD>ok</TD>' in row: # skip bad mirrors continue match = re.search('<A\s+HREF\s*=\s*"([^"]+)"\s*>', row, re.MULTILINE | re.IGNORECASE) if match: pending_mirrors.append(match.group(1) + mirror_path) total_mirrors = len(pending_mirrors) label = args.version if args.version is not None else args.path while True: p('\n{:%Y-%m-%d %H:%M:%S}'.format(datetime.datetime.now())) p('\nPolling {} Apache Mirrors'.format(len(pending_mirrors))) p('...\n') start = time.time() with Pool(processes=5) as pool: pending_mirrors = list(filter(lambda x: x is not None, pool.map(check_mirror, pending_mirrors))) stop = time.time() remaining = args.interval - (stop - start) available_mirrors = total_mirrors - len(pending_mirrors) p('\n{} is downloadable from {}/{} Apache Mirrors ({:.2f}%)\n' .format(label, available_mirrors, total_mirrors, available_mirrors * 100 / (1 if total_mirrors == 0 else total_mirrors) )) if len(pending_mirrors) == 0 or args.once == True: break if remaining > 0: p('Sleeping for {:d} seconds...\n'.format(int(remaining + 0.5))) time.sleep(remaining)
29.879518
135
0.688306
import argparse import datetime import ftplib import re import sys import time from urllib.parse import urlparse from multiprocessing import Pool import http.client as http def p(s): sys.stdout.write(s) sys.stdout.flush() def mirror_contains_file(url): url = urlparse(url) if url.scheme == 'https': return https_file_exists(url) elif url.scheme == 'http': return http_file_exists(url) elif url.scheme == 'ftp': return ftp_file_exists(url) def http_file_exists(url): exists = False try: conn = http.HTTPConnection(url.netloc) conn.request('HEAD', url.path) response = conn.getresponse() exists = response.status == 200 except: pass return exists def https_file_exists(url): exists = False try: conn = http.HTTPSConnection(url.netloc) conn.request('HEAD', url.path) response = conn.getresponse() exists = response.status == 200 except: pass return exists def ftp_file_exists(url): listing = [] try: conn = ftplib.FTP(url.netloc) conn.login() listing = conn.nlst(url.path) conn.quit() except Exception as e: pass return len(listing) > 0 def check_mirror(url): if mirror_contains_file(url): p('.') return None else: p('X') return url if __name__ == '__main__': desc = 'Periodically checks that all Solr mirrors contain either a copy of a release or a specified path' parser = argparse.ArgumentParser(description=desc) parser.add_argument('-version', '-v', help='Solr Operator version to check') parser.add_argument('-path', '-p', help='instead of a versioned release, check for some/explicit/path') parser.add_argument('-interval', '-i', help='seconds to wait before re-querying mirrors', type=int, default=300) parser.add_argument('-once', '-o', help='run only once', action='store_true', default=False) args = parser.parse_args() if (args.version is None and args.path is None) \ or (args.version is not None and args.path is not None): p('You must specify either -version or -path but not both!\n') sys.exit(1) try: conn = http.HTTPSConnection('www.apache.org') conn.request('GET', '/mirrors/') response = conn.getresponse() html = response.read() except Exception as e: p('Unable to fetch the Apache mirrors list!\n') sys.exit(1) mirror_path = args.path if args.path is not None else 'solr/solr-operator/{}/solr-operator-{}.tgz'.format(args.version, args.version) pending_mirrors = [] for match in re.finditer('<TR>(.*?)</TR>', str(html), re.MULTILINE | re.IGNORECASE | re.DOTALL): row = match.group(1) if not '<TD>ok</TD>' in row: continue match = re.search('<A\s+HREF\s*=\s*"([^"]+)"\s*>', row, re.MULTILINE | re.IGNORECASE) if match: pending_mirrors.append(match.group(1) + mirror_path) total_mirrors = len(pending_mirrors) label = args.version if args.version is not None else args.path while True: p('\n{:%Y-%m-%d %H:%M:%S}'.format(datetime.datetime.now())) p('\nPolling {} Apache Mirrors'.format(len(pending_mirrors))) p('...\n') start = time.time() with Pool(processes=5) as pool: pending_mirrors = list(filter(lambda x: x is not None, pool.map(check_mirror, pending_mirrors))) stop = time.time() remaining = args.interval - (stop - start) available_mirrors = total_mirrors - len(pending_mirrors) p('\n{} is downloadable from {}/{} Apache Mirrors ({:.2f}%)\n' .format(label, available_mirrors, total_mirrors, available_mirrors * 100 / (1 if total_mirrors == 0 else total_mirrors) )) if len(pending_mirrors) == 0 or args.once == True: break if remaining > 0: p('Sleeping for {:d} seconds...\n'.format(int(remaining + 0.5))) time.sleep(remaining)
true
true
f7011e8857c6587e833bd628c80b1cdb7915be5c
1,324
py
Python
yb_sysprocs_column_stats.py
eloemosynator/YbEasyCli
b35ebe03da07898cfa06ff687cba29cd83268c31
[ "MIT" ]
null
null
null
yb_sysprocs_column_stats.py
eloemosynator/YbEasyCli
b35ebe03da07898cfa06ff687cba29cd83268c31
[ "MIT" ]
4
2020-06-03T18:11:29.000Z
2022-03-07T20:41:16.000Z
yb_sysprocs_column_stats.py
eloemosynator/YbEasyCli
b35ebe03da07898cfa06ff687cba29cd83268c31
[ "MIT" ]
2
2020-05-27T23:43:03.000Z
2022-03-03T23:16:15.000Z
#!/usr/bin/env python3 """ USAGE: yb_sysprocs_column_stats.py [options] PURPOSE: Table column metdata including estimates from statistics. OPTIONS: See the command line help message for all options. (yb_sysprocs_column_stats.py --help) Output: The report as a formatted table, pipe seperated value rows, or inserted into a database table. """ from yb_sp_report_util import SPReportUtil class report_column_stats(SPReportUtil): """Issue the ybsql commands used to create the column distribution report.""" config = { 'description': 'Table column metdata including estimates from statistics.' , 'report_sp_location': 'sysviews' , 'report_default_order': 'table_schema|table_name' , 'required_args_single': ['database'] , 'optional_args_multi': ['schema', 'table'] , 'db_filter_args': {'database':'db_name', 'schema':'table_schema', 'table':'table_name'} , 'usage_example_extra': {'cmd_line_args': "--database acme --schema_in dev --table_like 'cust%'" } } def execute(self): return self.build({ '_db_name': self.args_handler.args.database , '_yb_util_filter' : self.db_filter_sql() }) def main(): print(report_column_stats().execute()) exit(0) if __name__ == "__main__": main()
33.948718
109
0.674471
from yb_sp_report_util import SPReportUtil class report_column_stats(SPReportUtil): config = { 'description': 'Table column metdata including estimates from statistics.' , 'report_sp_location': 'sysviews' , 'report_default_order': 'table_schema|table_name' , 'required_args_single': ['database'] , 'optional_args_multi': ['schema', 'table'] , 'db_filter_args': {'database':'db_name', 'schema':'table_schema', 'table':'table_name'} , 'usage_example_extra': {'cmd_line_args': "--database acme --schema_in dev --table_like 'cust%'" } } def execute(self): return self.build({ '_db_name': self.args_handler.args.database , '_yb_util_filter' : self.db_filter_sql() }) def main(): print(report_column_stats().execute()) exit(0) if __name__ == "__main__": main()
true
true
f7011fcabf6ac4b97fa6842e6d6194f74077db62
12,668
py
Python
sdk/python/pulumi_azure_nextgen/network/v20171101/virtual_network.py
test-wiz-sec/pulumi-azure-nextgen
20a695af0d020b34b0f1c336e1b69702755174cc
[ "Apache-2.0" ]
null
null
null
sdk/python/pulumi_azure_nextgen/network/v20171101/virtual_network.py
test-wiz-sec/pulumi-azure-nextgen
20a695af0d020b34b0f1c336e1b69702755174cc
[ "Apache-2.0" ]
null
null
null
sdk/python/pulumi_azure_nextgen/network/v20171101/virtual_network.py
test-wiz-sec/pulumi-azure-nextgen
20a695af0d020b34b0f1c336e1b69702755174cc
[ "Apache-2.0" ]
null
null
null
# coding=utf-8 # *** WARNING: this file was generated by the Pulumi SDK Generator. *** # *** Do not edit by hand unless you're certain you know what you are doing! *** import warnings import pulumi import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union from ... import _utilities, _tables from . import outputs from ._inputs import * __all__ = ['VirtualNetwork'] class VirtualNetwork(pulumi.CustomResource): def __init__(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions] = None, address_space: Optional[pulumi.Input[pulumi.InputType['AddressSpaceArgs']]] = None, dhcp_options: Optional[pulumi.Input[pulumi.InputType['DhcpOptionsArgs']]] = None, enable_ddos_protection: Optional[pulumi.Input[bool]] = None, enable_vm_protection: Optional[pulumi.Input[bool]] = None, etag: Optional[pulumi.Input[str]] = None, id: Optional[pulumi.Input[str]] = None, location: Optional[pulumi.Input[str]] = None, provisioning_state: Optional[pulumi.Input[str]] = None, resource_group_name: Optional[pulumi.Input[str]] = None, resource_guid: Optional[pulumi.Input[str]] = None, subnets: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['SubnetArgs']]]]] = None, tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None, virtual_network_name: Optional[pulumi.Input[str]] = None, virtual_network_peerings: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['VirtualNetworkPeeringArgs']]]]] = None, __props__=None, __name__=None, __opts__=None): """ Virtual Network resource. :param str resource_name: The name of the resource. :param pulumi.ResourceOptions opts: Options for the resource. :param pulumi.Input[pulumi.InputType['AddressSpaceArgs']] address_space: The AddressSpace that contains an array of IP address ranges that can be used by subnets. :param pulumi.Input[pulumi.InputType['DhcpOptionsArgs']] dhcp_options: The dhcpOptions that contains an array of DNS servers available to VMs deployed in the virtual network. :param pulumi.Input[bool] enable_ddos_protection: Indicates if DDoS protection is enabled for all the protected resources in a Virtual Network. :param pulumi.Input[bool] enable_vm_protection: Indicates if Vm protection is enabled for all the subnets in a Virtual Network. :param pulumi.Input[str] etag: Gets a unique read-only string that changes whenever the resource is updated. :param pulumi.Input[str] id: Resource ID. :param pulumi.Input[str] location: Resource location. :param pulumi.Input[str] provisioning_state: The provisioning state of the PublicIP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :param pulumi.Input[str] resource_group_name: The name of the resource group. :param pulumi.Input[str] resource_guid: The resourceGuid property of the Virtual Network resource. :param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['SubnetArgs']]]] subnets: A list of subnets in a Virtual Network. :param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Resource tags. :param pulumi.Input[str] virtual_network_name: The name of the virtual network. :param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['VirtualNetworkPeeringArgs']]]] virtual_network_peerings: A list of peerings in a Virtual Network. """ if __name__ is not None: warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning) resource_name = __name__ if __opts__ is not None: warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning) opts = __opts__ if opts is None: opts = pulumi.ResourceOptions() if not isinstance(opts, pulumi.ResourceOptions): raise TypeError('Expected resource options to be a ResourceOptions instance') if opts.version is None: opts.version = _utilities.get_version() if opts.id is None: if __props__ is not None: raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource') __props__ = dict() __props__['address_space'] = address_space __props__['dhcp_options'] = dhcp_options __props__['enable_ddos_protection'] = enable_ddos_protection __props__['enable_vm_protection'] = enable_vm_protection __props__['etag'] = etag __props__['id'] = id __props__['location'] = location __props__['provisioning_state'] = provisioning_state if resource_group_name is None: raise TypeError("Missing required property 'resource_group_name'") __props__['resource_group_name'] = resource_group_name __props__['resource_guid'] = resource_guid __props__['subnets'] = subnets __props__['tags'] = tags if virtual_network_name is None: raise TypeError("Missing required property 'virtual_network_name'") __props__['virtual_network_name'] = virtual_network_name __props__['virtual_network_peerings'] = virtual_network_peerings __props__['name'] = None __props__['type'] = None alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:network/latest:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20150501preview:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20150615:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20160330:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20160601:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20160901:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20161201:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20170301:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20170601:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20170801:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20170901:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20171001:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20180101:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20180201:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20180401:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20180601:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20180701:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20180801:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20181001:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20181101:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20181201:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20190201:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20190401:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20190601:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20190701:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20190801:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20190901:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20191101:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20191201:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20200301:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20200401:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20200501:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20200601:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20200701:VirtualNetwork")]) opts = pulumi.ResourceOptions.merge(opts, alias_opts) super(VirtualNetwork, __self__).__init__( 'azure-nextgen:network/v20171101:VirtualNetwork', resource_name, __props__, opts) @staticmethod def get(resource_name: str, id: pulumi.Input[str], opts: Optional[pulumi.ResourceOptions] = None) -> 'VirtualNetwork': """ Get an existing VirtualNetwork resource's state with the given name, id, and optional extra properties used to qualify the lookup. :param str resource_name: The unique name of the resulting resource. :param pulumi.Input[str] id: The unique provider ID of the resource to lookup. :param pulumi.ResourceOptions opts: Options for the resource. """ opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id)) __props__ = dict() return VirtualNetwork(resource_name, opts=opts, __props__=__props__) @property @pulumi.getter(name="addressSpace") def address_space(self) -> pulumi.Output[Optional['outputs.AddressSpaceResponse']]: """ The AddressSpace that contains an array of IP address ranges that can be used by subnets. """ return pulumi.get(self, "address_space") @property @pulumi.getter(name="dhcpOptions") def dhcp_options(self) -> pulumi.Output[Optional['outputs.DhcpOptionsResponse']]: """ The dhcpOptions that contains an array of DNS servers available to VMs deployed in the virtual network. """ return pulumi.get(self, "dhcp_options") @property @pulumi.getter(name="enableDdosProtection") def enable_ddos_protection(self) -> pulumi.Output[Optional[bool]]: """ Indicates if DDoS protection is enabled for all the protected resources in a Virtual Network. """ return pulumi.get(self, "enable_ddos_protection") @property @pulumi.getter(name="enableVmProtection") def enable_vm_protection(self) -> pulumi.Output[Optional[bool]]: """ Indicates if Vm protection is enabled for all the subnets in a Virtual Network. """ return pulumi.get(self, "enable_vm_protection") @property @pulumi.getter def etag(self) -> pulumi.Output[Optional[str]]: """ Gets a unique read-only string that changes whenever the resource is updated. """ return pulumi.get(self, "etag") @property @pulumi.getter def location(self) -> pulumi.Output[Optional[str]]: """ Resource location. """ return pulumi.get(self, "location") @property @pulumi.getter def name(self) -> pulumi.Output[str]: """ Resource name. """ return pulumi.get(self, "name") @property @pulumi.getter(name="provisioningState") def provisioning_state(self) -> pulumi.Output[Optional[str]]: """ The provisioning state of the PublicIP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. """ return pulumi.get(self, "provisioning_state") @property @pulumi.getter(name="resourceGuid") def resource_guid(self) -> pulumi.Output[Optional[str]]: """ The resourceGuid property of the Virtual Network resource. """ return pulumi.get(self, "resource_guid") @property @pulumi.getter def subnets(self) -> pulumi.Output[Optional[Sequence['outputs.SubnetResponse']]]: """ A list of subnets in a Virtual Network. """ return pulumi.get(self, "subnets") @property @pulumi.getter def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]: """ Resource tags. """ return pulumi.get(self, "tags") @property @pulumi.getter def type(self) -> pulumi.Output[str]: """ Resource type. """ return pulumi.get(self, "type") @property @pulumi.getter(name="virtualNetworkPeerings") def virtual_network_peerings(self) -> pulumi.Output[Optional[Sequence['outputs.VirtualNetworkPeeringResponse']]]: """ A list of peerings in a Virtual Network. """ return pulumi.get(self, "virtual_network_peerings") def translate_output_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop def translate_input_property(self, prop): return _tables.SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
55.078261
2,437
0.68748
import warnings import pulumi import pulumi.runtime from typing import Any, Mapping, Optional, Sequence, Union from ... import _utilities, _tables from . import outputs from ._inputs import * __all__ = ['VirtualNetwork'] class VirtualNetwork(pulumi.CustomResource): def __init__(__self__, resource_name: str, opts: Optional[pulumi.ResourceOptions] = None, address_space: Optional[pulumi.Input[pulumi.InputType['AddressSpaceArgs']]] = None, dhcp_options: Optional[pulumi.Input[pulumi.InputType['DhcpOptionsArgs']]] = None, enable_ddos_protection: Optional[pulumi.Input[bool]] = None, enable_vm_protection: Optional[pulumi.Input[bool]] = None, etag: Optional[pulumi.Input[str]] = None, id: Optional[pulumi.Input[str]] = None, location: Optional[pulumi.Input[str]] = None, provisioning_state: Optional[pulumi.Input[str]] = None, resource_group_name: Optional[pulumi.Input[str]] = None, resource_guid: Optional[pulumi.Input[str]] = None, subnets: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['SubnetArgs']]]]] = None, tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None, virtual_network_name: Optional[pulumi.Input[str]] = None, virtual_network_peerings: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['VirtualNetworkPeeringArgs']]]]] = None, __props__=None, __name__=None, __opts__=None): if __name__ is not None: warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning) resource_name = __name__ if __opts__ is not None: warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning) opts = __opts__ if opts is None: opts = pulumi.ResourceOptions() if not isinstance(opts, pulumi.ResourceOptions): raise TypeError('Expected resource options to be a ResourceOptions instance') if opts.version is None: opts.version = _utilities.get_version() if opts.id is None: if __props__ is not None: raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource') __props__ = dict() __props__['address_space'] = address_space __props__['dhcp_options'] = dhcp_options __props__['enable_ddos_protection'] = enable_ddos_protection __props__['enable_vm_protection'] = enable_vm_protection __props__['etag'] = etag __props__['id'] = id __props__['location'] = location __props__['provisioning_state'] = provisioning_state if resource_group_name is None: raise TypeError("Missing required property 'resource_group_name'") __props__['resource_group_name'] = resource_group_name __props__['resource_guid'] = resource_guid __props__['subnets'] = subnets __props__['tags'] = tags if virtual_network_name is None: raise TypeError("Missing required property 'virtual_network_name'") __props__['virtual_network_name'] = virtual_network_name __props__['virtual_network_peerings'] = virtual_network_peerings __props__['name'] = None __props__['type'] = None alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:network/latest:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20150501preview:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20150615:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20160330:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20160601:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20160901:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20161201:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20170301:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20170601:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20170801:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20170901:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20171001:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20180101:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20180201:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20180401:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20180601:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20180701:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20180801:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20181001:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20181101:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20181201:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20190201:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20190401:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20190601:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20190701:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20190801:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20190901:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20191101:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20191201:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20200301:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20200401:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20200501:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20200601:VirtualNetwork"), pulumi.Alias(type_="azure-nextgen:network/v20200701:VirtualNetwork")]) opts = pulumi.ResourceOptions.merge(opts, alias_opts) super(VirtualNetwork, __self__).__init__( 'azure-nextgen:network/v20171101:VirtualNetwork', resource_name, __props__, opts) @staticmethod def get(resource_name: str, id: pulumi.Input[str], opts: Optional[pulumi.ResourceOptions] = None) -> 'VirtualNetwork': opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id)) __props__ = dict() return VirtualNetwork(resource_name, opts=opts, __props__=__props__) @property @pulumi.getter(name="addressSpace") def address_space(self) -> pulumi.Output[Optional['outputs.AddressSpaceResponse']]: return pulumi.get(self, "address_space") @property @pulumi.getter(name="dhcpOptions") def dhcp_options(self) -> pulumi.Output[Optional['outputs.DhcpOptionsResponse']]: return pulumi.get(self, "dhcp_options") @property @pulumi.getter(name="enableDdosProtection") def enable_ddos_protection(self) -> pulumi.Output[Optional[bool]]: return pulumi.get(self, "enable_ddos_protection") @property @pulumi.getter(name="enableVmProtection") def enable_vm_protection(self) -> pulumi.Output[Optional[bool]]: return pulumi.get(self, "enable_vm_protection") @property @pulumi.getter def etag(self) -> pulumi.Output[Optional[str]]: return pulumi.get(self, "etag") @property @pulumi.getter def location(self) -> pulumi.Output[Optional[str]]: return pulumi.get(self, "location") @property @pulumi.getter def name(self) -> pulumi.Output[str]: return pulumi.get(self, "name") @property @pulumi.getter(name="provisioningState") def provisioning_state(self) -> pulumi.Output[Optional[str]]: return pulumi.get(self, "provisioning_state") @property @pulumi.getter(name="resourceGuid") def resource_guid(self) -> pulumi.Output[Optional[str]]: return pulumi.get(self, "resource_guid") @property @pulumi.getter def subnets(self) -> pulumi.Output[Optional[Sequence['outputs.SubnetResponse']]]: return pulumi.get(self, "subnets") @property @pulumi.getter def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]: return pulumi.get(self, "tags") @property @pulumi.getter def type(self) -> pulumi.Output[str]: return pulumi.get(self, "type") @property @pulumi.getter(name="virtualNetworkPeerings") def virtual_network_peerings(self) -> pulumi.Output[Optional[Sequence['outputs.VirtualNetworkPeeringResponse']]]: return pulumi.get(self, "virtual_network_peerings") def translate_output_property(self, prop): return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop def translate_input_property(self, prop): return _tables.SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
true
true
f70120e6a200f8a97e0c72c99bfc18970159cb7d
18,984
py
Python
tests/units/cli/test_cli_utils.py
NiallRees/pipelinewise
47126592ada914e43014f601e237bdbe67f3c581
[ "Apache-2.0" ]
null
null
null
tests/units/cli/test_cli_utils.py
NiallRees/pipelinewise
47126592ada914e43014f601e237bdbe67f3c581
[ "Apache-2.0" ]
null
null
null
tests/units/cli/test_cli_utils.py
NiallRees/pipelinewise
47126592ada914e43014f601e237bdbe67f3c581
[ "Apache-2.0" ]
null
null
null
import os import re import pipelinewise.cli as cli import pytest VIRTUALENVS_DIR = './virtualenvs-dummy' # pylint: disable=no-self-use,fixme class TestUtils: """ Unit Tests for PipelineWise CLI utility functions """ def test_json_detectors(self): """Testing JSON detector functions""" assert cli.utils.is_json('{Invalid JSON}') is False assert cli.utils.is_json('[]') is True assert cli.utils.is_json('{}') is True assert cli.utils.is_json('{"prop": 123}') is True assert cli.utils.is_json('{"prop-str":"dummy-string","prop-int":123,"prop-bool":true}') is True assert cli.utils.is_json_file('./dummy-json') is False assert cli.utils.is_json_file('{}/resources/example.json'.format(os.path.dirname(__file__))) is True assert cli.utils.is_json_file('{}/resources/invalid.json'.format(os.path.dirname(__file__))) is False assert cli.utils.is_json_file('{}/resources'.format(os.path.dirname(__file__))) is False def test_json_loader(self): """Testing JSON loader functions""" # Loading JSON file that not exist should return None assert cli.utils.load_json('/invalid/location/to/json') is None # Loading JSON file with invalid JSON syntax should raise exception with pytest.raises(Exception): cli.utils.load_json('{}/resources/invalid.json'.format(os.path.dirname(__file__))) # Loading JSON should return python dict assert \ cli.utils.load_json('{}/resources/example.json'.format(os.path.dirname(__file__))) == \ { 'glossary': { 'title': 'example glossary', 'GlossDiv': { 'title': 'S', 'GlossList': { 'GlossEntry': { 'ID': 'SGML', 'SortAs': 'SGML', 'GlossTerm': 'Standard Generalized Markup Language', 'Acronym': 'SGML', 'Abbrev': 'ISO 8879:1986', 'GlossDef': { 'para': 'A meta-markup language, used to create markup languages such as DocBook.', 'GlossSeeAlso': ['GML', 'XML'] }, 'GlossSee': 'markup' } } } } } def test_json_saver(self): """Testing JSON save functions""" obj = {'foo': 'bar'} # Saving to invalid path should raise exception with pytest.raises(Exception): cli.utils.save_json(obj, '/invalid/path') # Saving and reloading should match cli.utils.save_json(obj, 'test-json.json') assert cli.utils.load_json('test-json.json') == obj # Delete output file, it's not required os.remove('test-json.json') def test_yaml_detectors(self): """Testing YAML detector functions""" assert cli.utils.is_yaml(""" foo: -bar""") is False assert cli.utils.is_yaml('id: 123') is True assert cli.utils.is_yaml(""" id: 123 details: - prop1: 123 - prop2: 456 """) is True assert cli.utils.is_yaml_file('./dummy-yaml') is False assert cli.utils.is_yaml_file('{}/resources/example.yml'.format(os.path.dirname(__file__))) is True assert cli.utils.is_yaml_file('{}/resources/invalid.yml'.format(os.path.dirname(__file__))) is False assert cli.utils.is_yaml_file('{}/resources'.format(os.path.dirname(__file__))) is False def test_yaml_loader(self): """Testing YAML loader functions""" # Loading YAML file that not exist should return None assert cli.utils.load_yaml('/invalid/location/to/yaml') is None # Loading YAML file with invalid YAML syntax should raise exception with pytest.raises(Exception): cli.utils.load_yaml('{}/resources/invalid.yml'.format(os.path.dirname(__file__))) # Loading YAML file with valid YAML syntax but invalid vault secret file should raise exception with pytest.raises(Exception): cli.utils.load_yaml('{}/resources/example.yml'.format(os.path.dirname(__file__)), 'invalid-secret-file-path') # Loading valid YAML file with no vault encryption assert \ cli.utils.load_yaml('{}/resources/example.yml'.format(os.path.dirname(__file__))) == \ ['Apple', 'Orange', 'Strawberry', 'Mango'] # Loading valid YAML file with vault encrypted properties assert \ cli.utils.load_yaml( '{}/resources/example-with-vault.yml'.format(os.path.dirname(__file__)), '{}/resources/vault-secret.txt'.format(os.path.dirname(__file__))) == \ ['Apple', 'Orange', 'Strawberry', 'Mango', 'Vault Encrypted Secret Fruit'] def test_sample_file_path(self): """Sample files must be global config, tap, target YAML or README file""" for sample in cli.utils.get_sample_file_paths(): assert os.path.isfile(sample) is True assert \ re.match('.*config.yml$', sample) or \ re.match('.*(tap|target)_.*.yml.sample$', sample) or \ re.match('.*README.md$', sample) def test_extract_log_attributes(self): """Log files must match to certain pattern with embedded attributes in the file name""" assert \ cli.utils.extract_log_attributes('snowflake-fx-20190508_000038.singer.log.success') == \ { 'filename': 'snowflake-fx-20190508_000038.singer.log.success', 'target_id': 'snowflake', 'tap_id': 'fx', 'timestamp': '2019-05-08T00:00:38', 'sync_engine': 'singer', 'status': 'success' } assert \ cli.utils.extract_log_attributes('snowflake-fx-20190508_231238.fastsync.log.running') == \ { 'filename': 'snowflake-fx-20190508_231238.fastsync.log.running', 'target_id': 'snowflake', 'tap_id': 'fx', 'timestamp': '2019-05-08T23:12:38', 'sync_engine': 'fastsync', 'status': 'running' } assert \ cli.utils.extract_log_attributes('dummy-log-file.log') == \ { 'filename': 'dummy-log-file.log', 'target_id': 'unknown', 'tap_id': 'unknown', 'timestamp': '1970-01-01T00:00:00', 'sync_engine': 'unknown', 'status': 'unknown' } def test_fastsync_bin(self): """Fastsync binary paths must point to pipelinewise virtual environments""" # Giving tap and target types should be enough to generate full path to fastsync binaries assert \ cli.utils.get_fastsync_bin(VIRTUALENVS_DIR, 'mysql', 'snowflake') == \ '{}/pipelinewise/bin/mysql-to-snowflake'.format(VIRTUALENVS_DIR) def test_vault(self): """Test vault encrypt and decrypt functionalities""" # Encrypting with not existing file with secret should exit with pytest.raises(SystemExit) as pytest_wrapped_e: cli.utils.vault_encrypt('plain_test', 'not-existing-secret-file') assert pytest_wrapped_e.type == SystemExit assert pytest_wrapped_e.value.code == 1 # Encrypted string should start with $ANSIBLE_VAULT; identifier encrypted_str = str( cli.utils.vault_encrypt('plain_text', '{}/resources/vault-secret.txt'.format(os.path.dirname(__file__)))) assert encrypted_str.startswith("b'$ANSIBLE_VAULT;") is True # Formatted encrypted string should start with special token and should keep the original vault encrypted value formatted_encrypted_str = cli.utils.vault_format_ciphertext_yaml(encrypted_str) assert formatted_encrypted_str.startswith('!vault |') and "b'$ANSIBLE_VAULT;" in formatted_encrypted_str # Optional name argument should add the name to the output string as a key formatted_encrypted_str = cli.utils.vault_format_ciphertext_yaml(encrypted_str, name='encrypted_plain_text') assert formatted_encrypted_str.startswith( 'encrypted_plain_text: !vault |') and "b'$ANSIBLE_VAULT;" in formatted_encrypted_str def test_schema_loader(self): """Test JSON Schema loader functions""" # Loading JSON schema file that not exist should exit with pytest.raises(SystemExit) as pytest_wrapped_e: assert cli.utils.load_schema('/invalid/location/to/schema') is None assert pytest_wrapped_e.type == SystemExit assert pytest_wrapped_e.value.code == 1 # Loading existing JSON schema should be loaded correctly tap_schema = cli.utils.load_json('{}/../../../pipelinewise/cli/schemas/tap.json'.format( os.path.dirname(__file__))) assert cli.utils.load_schema('tap') == tap_schema def test_json_validate(self): """Test JSON schema validator functions""" schema = cli.utils.load_schema('tap') # Valid instance should return None valid_tap = cli.utils.load_yaml('{}/resources/tap-valid-mysql.yml'.format(os.path.dirname(__file__))) assert cli.utils.validate(valid_tap, schema) is None # Invalid instance should exit invalid_tap = cli.utils.load_yaml('{}/resources/tap-invalid.yml'.format(os.path.dirname(__file__))) with pytest.raises(SystemExit) as pytest_wrapped_e: cli.utils.validate(invalid_tap, schema) assert pytest_wrapped_e.type == SystemExit assert pytest_wrapped_e.value.code == 1 def test_delete_keys(self): """Test dictionary functions""" # Delete single key with empty value assert cli.utils.delete_empty_keys({'foo': 'bar', 'foo2': None}) == {'foo': 'bar'} # Delete multiple keys with empty value assert cli.utils.delete_empty_keys({ 'foo': 'bar', 'foo2': None, 'foo3': None, 'foo4': 'bar4' }) == { 'foo': 'bar', 'foo4': 'bar4' } # Delete single key by name assert cli.utils.delete_keys_from_dict({'foo': 'bar', 'foo2': 'bar2'}, ['foo2']) == {'foo': 'bar'} # Delete single key by name assert cli.utils.delete_keys_from_dict({ 'foo': 'bar', 'foo2': 'bar2', 'foo3': None, 'foo4': 'bar4' }, ['foo2', 'foo4']) == { 'foo': 'bar', 'foo3': None } # Delete multiple keys from list of nested dictionaries assert cli.utils.delete_keys_from_dict( [{'foo': 'bar', 'foo2': 'bar2'}, {'foo3': {'nested_foo': 'nested_bar', 'nested_foo2': 'nested_bar2'}}], ['foo2', 'nested_foo']) == \ [{'foo': 'bar'}, {'foo3': {'nested_foo2': 'nested_bar2'}}] def test_silentremove(self): """Test removing functions""" # Deleting non existing file should not raise exception assert cli.utils.silentremove('this-file-not-exists.json') is None def test_tap_properties(self): """Test tap property getter functions""" tap_mysql = cli.utils.load_yaml('{}/resources/tap-valid-mysql.yml'.format(os.path.dirname(__file__))) # Every tap should have catalog argument --properties or --catalog tap_catalog_argument = cli.utils.get_tap_property(tap_mysql, 'tap_catalog_argument') assert tap_catalog_argument in ['--catalog', '--properties'] # Every tap should have extra_config_keys defined in dict assert isinstance(cli.utils.get_tap_extra_config_keys(tap_mysql), dict) is True # MySQL stream_id should be formatted as {{schema_name}}-{{table_name}} assert cli.utils.get_tap_stream_id(tap_mysql, 'dummy_db', 'dummy_schema', 'dummy_table') == \ 'dummy_schema-dummy_table' # MySQL stream_name should be formatted as {{schema_name}}-{{table_name}} assert cli.utils.get_tap_stream_name(tap_mysql, 'dummy_db', 'dummy_schema', 'dummy_table') == 'dummy_schema-dummy_table' # MySQL stream_name should be formatted as {{schema_name}}-{{table_name}} assert cli.utils.get_tap_default_replication_method(tap_mysql) == 'LOG_BASED' # Get property value by tap type assert cli.utils.get_tap_property_by_tap_type('tap-mysql', 'default_replication_method') == 'LOG_BASED' # Kafka encoding and parameterised local_store_dir should be added as default extra config keys tap_kafka = cli.utils.load_yaml('{}/resources/tap-valid-kafka.yml'.format(os.path.dirname(__file__))) assert cli.utils.get_tap_extra_config_keys(tap_kafka, temp_dir='/my/temp/dir') == { 'local_store_dir': '/my/temp/dir', 'encoding': 'utf-8' } # Snwoflake tables list should be added to tap_config_extras tap_snowflake = cli.utils.load_yaml('{}/resources/tap-valid-snowflake.yml'.format(os.path.dirname(__file__))) assert cli.utils.get_tap_extra_config_keys(tap_snowflake) == { 'tables': 'SCHEMA_1.TABLE_ONE,SCHEMA_1.TABLE_TWO' } def test_get_tap_target_names(self): """Test get tap and target yamls""" expected_tap_names = {'tap_test.yml', 'tap_2test.yml', 'tap_valid.yaml'} expected_target_names = {'target_test.yml'} tap_names, target_names = cli.utils.get_tap_target_names(f'{os.path.dirname(__file__)}' f'/resources/test_tap_target_names') assert tap_names == expected_tap_names assert target_names == expected_target_names def test_create_temp_file(self): """Test temp files created at the right location""" # By default temp files should be created in system temp directory temp_file = cli.utils.create_temp_file()[1] assert os.path.isfile(temp_file) os.remove(temp_file) # Providing extra dir argument should create the target directory even if it's not exist temp_file = cli.utils.create_temp_file(dir='./temp_dir_to_create_automatically/deep_temp_dir')[1] assert os.path.isfile(temp_file) os.remove(temp_file) # Providing dir, suffix and prefix arguments should create the target_directory with custom prefix and suffix temp_file = cli.utils.create_temp_file(dir='./temp_dir_to_create_automatically/deep_temp_dir', suffix='.json', prefix='pipelinewise_test_temp_file_')[1] assert os.path.isfile(temp_file) os.remove(temp_file) def test_find_errors_in_log_file(self): """Test reading the last n lines of a file""" # Should return an empty list if no error in the file log_file = '{}/resources/sample_log_files/tap-run-no-errors.log'.format(os.path.dirname(__file__)) assert cli.utils.find_errors_in_log_file(log_file) == [] # Should return the line with errors log_file = '{}/resources/sample_log_files/tap-run-errors.log'.format(os.path.dirname(__file__)) assert cli.utils.find_errors_in_log_file(log_file) == \ ['time=2020-07-15 11:24:43 logger_name=tap_postgres log_level=CRITICAL This is a critical error\n', 'time=2020-07-15 11:24:43 logger_name=tap_postgres log_level=EXCEPTION This is an exception\n', 'time=2020-07-15 11:24:43 logger_name=tap_postgres log_level=ERROR This is an error\n', 'pymysql.err.OperationalError: (2013, ' "'Lost connection to MySQL server during query ([Errno 104] Connection reset by peer)')\n", 'time=2020-07-15 11:24:43 logger_name=tap_postgres log_level=ERROR ' 'message=error with status PGRES_COPY_BOTH and no message from the libpq\n', 'time=2020-07-15 11:24:43 logger_name=tap_postgres log_level=CRITICAL ' 'message=error with status PGRES_COPY_BOTH and no message from the libpq\n', 'snowflake.connector.errors.ProgrammingError: 091003 (22000): ' 'Failure using stage area. Cause: [Access Denied (Status Code: 403; Error Code: AccessDenied)]\n'] # Should return the default max number of errors log_file = '{}/resources/sample_log_files/tap-run-lot-of-errors.log'.format(os.path.dirname(__file__)) assert cli.utils.find_errors_in_log_file(log_file) == \ ['time=2020-07-15 11:24:43 logger_name=tap_postgres log_level=CRITICAL This is a critical error 1\n', 'time=2020-07-15 11:24:43 logger_name=tap_postgres log_level=CRITICAL This is a critical error 2\n', 'time=2020-07-15 11:24:43 logger_name=tap_postgres log_level=CRITICAL This is a critical error 3\n', 'time=2020-07-15 11:24:43 logger_name=tap_postgres log_level=CRITICAL This is a critical error 4\n', 'time=2020-07-15 11:24:43 logger_name=tap_postgres log_level=CRITICAL This is a critical error 5\n', 'time=2020-07-15 11:24:43 logger_name=tap_postgres log_level=CRITICAL This is a critical error 6\n', 'time=2020-07-15 11:24:43 logger_name=tap_postgres log_level=CRITICAL This is a critical error 7\n', 'time=2020-07-15 11:24:43 logger_name=tap_postgres log_level=CRITICAL This is a critical error 8\n', 'time=2020-07-15 11:24:43 logger_name=tap_postgres log_level=CRITICAL This is a critical error 9\n', 'time=2020-07-15 11:24:43 logger_name=tap_postgres log_level=CRITICAL This is a critical error 10\n'] # Should return the custom max number of errors log_file = '{}/resources/sample_log_files/tap-run-lot-of-errors.log'.format(os.path.dirname(__file__)) assert cli.utils.find_errors_in_log_file(log_file, max_errors=2) == \ ['time=2020-07-15 11:24:43 logger_name=tap_postgres log_level=CRITICAL This is a critical error 1\n', 'time=2020-07-15 11:24:43 logger_name=tap_postgres log_level=CRITICAL This is a critical error 2\n'] # Should return the custom max number of errors log_file = '{}/resources/sample_log_files/tap-run-errors.log'.format(os.path.dirname(__file__)) assert cli.utils.find_errors_in_log_file(log_file, error_pattern=re.compile('CUSTOM-ERR-PATTERN')) == \ ['CUSTOM-ERR-PATTERN: This is a custom pattern error message\n']
50.08971
119
0.619838
import os import re import pipelinewise.cli as cli import pytest VIRTUALENVS_DIR = './virtualenvs-dummy' class TestUtils: def test_json_detectors(self): assert cli.utils.is_json('{Invalid JSON}') is False assert cli.utils.is_json('[]') is True assert cli.utils.is_json('{}') is True assert cli.utils.is_json('{"prop": 123}') is True assert cli.utils.is_json('{"prop-str":"dummy-string","prop-int":123,"prop-bool":true}') is True assert cli.utils.is_json_file('./dummy-json') is False assert cli.utils.is_json_file('{}/resources/example.json'.format(os.path.dirname(__file__))) is True assert cli.utils.is_json_file('{}/resources/invalid.json'.format(os.path.dirname(__file__))) is False assert cli.utils.is_json_file('{}/resources'.format(os.path.dirname(__file__))) is False def test_json_loader(self): assert cli.utils.load_json('/invalid/location/to/json') is None with pytest.raises(Exception): cli.utils.load_json('{}/resources/invalid.json'.format(os.path.dirname(__file__))) assert \ cli.utils.load_json('{}/resources/example.json'.format(os.path.dirname(__file__))) == \ { 'glossary': { 'title': 'example glossary', 'GlossDiv': { 'title': 'S', 'GlossList': { 'GlossEntry': { 'ID': 'SGML', 'SortAs': 'SGML', 'GlossTerm': 'Standard Generalized Markup Language', 'Acronym': 'SGML', 'Abbrev': 'ISO 8879:1986', 'GlossDef': { 'para': 'A meta-markup language, used to create markup languages such as DocBook.', 'GlossSeeAlso': ['GML', 'XML'] }, 'GlossSee': 'markup' } } } } } def test_json_saver(self): obj = {'foo': 'bar'} with pytest.raises(Exception): cli.utils.save_json(obj, '/invalid/path') cli.utils.save_json(obj, 'test-json.json') assert cli.utils.load_json('test-json.json') == obj os.remove('test-json.json') def test_yaml_detectors(self): assert cli.utils.is_yaml(""" foo: -bar""") is False assert cli.utils.is_yaml('id: 123') is True assert cli.utils.is_yaml(""" id: 123 details: - prop1: 123 - prop2: 456 """) is True assert cli.utils.is_yaml_file('./dummy-yaml') is False assert cli.utils.is_yaml_file('{}/resources/example.yml'.format(os.path.dirname(__file__))) is True assert cli.utils.is_yaml_file('{}/resources/invalid.yml'.format(os.path.dirname(__file__))) is False assert cli.utils.is_yaml_file('{}/resources'.format(os.path.dirname(__file__))) is False def test_yaml_loader(self): # Loading YAML file that not exist should return None assert cli.utils.load_yaml('/invalid/location/to/yaml') is None # Loading YAML file with invalid YAML syntax should raise exception with pytest.raises(Exception): cli.utils.load_yaml('{}/resources/invalid.yml'.format(os.path.dirname(__file__))) # Loading YAML file with valid YAML syntax but invalid vault secret file should raise exception with pytest.raises(Exception): cli.utils.load_yaml('{}/resources/example.yml'.format(os.path.dirname(__file__)), 'invalid-secret-file-path') # Loading valid YAML file with no vault encryption assert \ cli.utils.load_yaml('{}/resources/example.yml'.format(os.path.dirname(__file__))) == \ ['Apple', 'Orange', 'Strawberry', 'Mango'] # Loading valid YAML file with vault encrypted properties assert \ cli.utils.load_yaml( '{}/resources/example-with-vault.yml'.format(os.path.dirname(__file__)), '{}/resources/vault-secret.txt'.format(os.path.dirname(__file__))) == \ ['Apple', 'Orange', 'Strawberry', 'Mango', 'Vault Encrypted Secret Fruit'] def test_sample_file_path(self): for sample in cli.utils.get_sample_file_paths(): assert os.path.isfile(sample) is True assert \ re.match('.*config.yml$', sample) or \ re.match('.*(tap|target)_.*.yml.sample$', sample) or \ re.match('.*README.md$', sample) def test_extract_log_attributes(self): assert \ cli.utils.extract_log_attributes('snowflake-fx-20190508_000038.singer.log.success') == \ { 'filename': 'snowflake-fx-20190508_000038.singer.log.success', 'target_id': 'snowflake', 'tap_id': 'fx', 'timestamp': '2019-05-08T00:00:38', 'sync_engine': 'singer', 'status': 'success' } assert \ cli.utils.extract_log_attributes('snowflake-fx-20190508_231238.fastsync.log.running') == \ { 'filename': 'snowflake-fx-20190508_231238.fastsync.log.running', 'target_id': 'snowflake', 'tap_id': 'fx', 'timestamp': '2019-05-08T23:12:38', 'sync_engine': 'fastsync', 'status': 'running' } assert \ cli.utils.extract_log_attributes('dummy-log-file.log') == \ { 'filename': 'dummy-log-file.log', 'target_id': 'unknown', 'tap_id': 'unknown', 'timestamp': '1970-01-01T00:00:00', 'sync_engine': 'unknown', 'status': 'unknown' } def test_fastsync_bin(self): # Giving tap and target types should be enough to generate full path to fastsync binaries assert \ cli.utils.get_fastsync_bin(VIRTUALENVS_DIR, 'mysql', 'snowflake') == \ '{}/pipelinewise/bin/mysql-to-snowflake'.format(VIRTUALENVS_DIR) def test_vault(self): # Encrypting with not existing file with secret should exit with pytest.raises(SystemExit) as pytest_wrapped_e: cli.utils.vault_encrypt('plain_test', 'not-existing-secret-file') assert pytest_wrapped_e.type == SystemExit assert pytest_wrapped_e.value.code == 1 # Encrypted string should start with $ANSIBLE_VAULT; identifier encrypted_str = str( cli.utils.vault_encrypt('plain_text', '{}/resources/vault-secret.txt'.format(os.path.dirname(__file__)))) assert encrypted_str.startswith("b'$ANSIBLE_VAULT;") is True formatted_encrypted_str = cli.utils.vault_format_ciphertext_yaml(encrypted_str) assert formatted_encrypted_str.startswith('!vault |') and "b'$ANSIBLE_VAULT;" in formatted_encrypted_str # Optional name argument should add the name to the output string as a key formatted_encrypted_str = cli.utils.vault_format_ciphertext_yaml(encrypted_str, name='encrypted_plain_text') assert formatted_encrypted_str.startswith( 'encrypted_plain_text: !vault |') and "b'$ANSIBLE_VAULT;" in formatted_encrypted_str def test_schema_loader(self): with pytest.raises(SystemExit) as pytest_wrapped_e: assert cli.utils.load_schema('/invalid/location/to/schema') is None assert pytest_wrapped_e.type == SystemExit assert pytest_wrapped_e.value.code == 1 tap_schema = cli.utils.load_json('{}/../../../pipelinewise/cli/schemas/tap.json'.format( os.path.dirname(__file__))) assert cli.utils.load_schema('tap') == tap_schema def test_json_validate(self): schema = cli.utils.load_schema('tap') valid_tap = cli.utils.load_yaml('{}/resources/tap-valid-mysql.yml'.format(os.path.dirname(__file__))) assert cli.utils.validate(valid_tap, schema) is None invalid_tap = cli.utils.load_yaml('{}/resources/tap-invalid.yml'.format(os.path.dirname(__file__))) with pytest.raises(SystemExit) as pytest_wrapped_e: cli.utils.validate(invalid_tap, schema) assert pytest_wrapped_e.type == SystemExit assert pytest_wrapped_e.value.code == 1 def test_delete_keys(self): assert cli.utils.delete_empty_keys({'foo': 'bar', 'foo2': None}) == {'foo': 'bar'} assert cli.utils.delete_empty_keys({ 'foo': 'bar', 'foo2': None, 'foo3': None, 'foo4': 'bar4' }) == { 'foo': 'bar', 'foo4': 'bar4' } assert cli.utils.delete_keys_from_dict({'foo': 'bar', 'foo2': 'bar2'}, ['foo2']) == {'foo': 'bar'} assert cli.utils.delete_keys_from_dict({ 'foo': 'bar', 'foo2': 'bar2', 'foo3': None, 'foo4': 'bar4' }, ['foo2', 'foo4']) == { 'foo': 'bar', 'foo3': None } assert cli.utils.delete_keys_from_dict( [{'foo': 'bar', 'foo2': 'bar2'}, {'foo3': {'nested_foo': 'nested_bar', 'nested_foo2': 'nested_bar2'}}], ['foo2', 'nested_foo']) == \ [{'foo': 'bar'}, {'foo3': {'nested_foo2': 'nested_bar2'}}] def test_silentremove(self): assert cli.utils.silentremove('this-file-not-exists.json') is None def test_tap_properties(self): tap_mysql = cli.utils.load_yaml('{}/resources/tap-valid-mysql.yml'.format(os.path.dirname(__file__))) tap_catalog_argument = cli.utils.get_tap_property(tap_mysql, 'tap_catalog_argument') assert tap_catalog_argument in ['--catalog', '--properties'] assert isinstance(cli.utils.get_tap_extra_config_keys(tap_mysql), dict) is True assert cli.utils.get_tap_stream_id(tap_mysql, 'dummy_db', 'dummy_schema', 'dummy_table') == \ 'dummy_schema-dummy_table' assert cli.utils.get_tap_stream_name(tap_mysql, 'dummy_db', 'dummy_schema', 'dummy_table') == 'dummy_schema-dummy_table' assert cli.utils.get_tap_default_replication_method(tap_mysql) == 'LOG_BASED' assert cli.utils.get_tap_property_by_tap_type('tap-mysql', 'default_replication_method') == 'LOG_BASED' tap_kafka = cli.utils.load_yaml('{}/resources/tap-valid-kafka.yml'.format(os.path.dirname(__file__))) assert cli.utils.get_tap_extra_config_keys(tap_kafka, temp_dir='/my/temp/dir') == { 'local_store_dir': '/my/temp/dir', 'encoding': 'utf-8' } tap_snowflake = cli.utils.load_yaml('{}/resources/tap-valid-snowflake.yml'.format(os.path.dirname(__file__))) assert cli.utils.get_tap_extra_config_keys(tap_snowflake) == { 'tables': 'SCHEMA_1.TABLE_ONE,SCHEMA_1.TABLE_TWO' } def test_get_tap_target_names(self): expected_tap_names = {'tap_test.yml', 'tap_2test.yml', 'tap_valid.yaml'} expected_target_names = {'target_test.yml'} tap_names, target_names = cli.utils.get_tap_target_names(f'{os.path.dirname(__file__)}' f'/resources/test_tap_target_names') assert tap_names == expected_tap_names assert target_names == expected_target_names def test_create_temp_file(self): temp_file = cli.utils.create_temp_file()[1] assert os.path.isfile(temp_file) os.remove(temp_file) temp_file = cli.utils.create_temp_file(dir='./temp_dir_to_create_automatically/deep_temp_dir')[1] assert os.path.isfile(temp_file) os.remove(temp_file) # Providing dir, suffix and prefix arguments should create the target_directory with custom prefix and suffix temp_file = cli.utils.create_temp_file(dir='./temp_dir_to_create_automatically/deep_temp_dir', suffix='.json', prefix='pipelinewise_test_temp_file_')[1] assert os.path.isfile(temp_file) os.remove(temp_file) def test_find_errors_in_log_file(self): # Should return an empty list if no error in the file log_file = '{}/resources/sample_log_files/tap-run-no-errors.log'.format(os.path.dirname(__file__)) assert cli.utils.find_errors_in_log_file(log_file) == [] # Should return the line with errors log_file = '{}/resources/sample_log_files/tap-run-errors.log'.format(os.path.dirname(__file__)) assert cli.utils.find_errors_in_log_file(log_file) == \ ['time=2020-07-15 11:24:43 logger_name=tap_postgres log_level=CRITICAL This is a critical error\n', 'time=2020-07-15 11:24:43 logger_name=tap_postgres log_level=EXCEPTION This is an exception\n', 'time=2020-07-15 11:24:43 logger_name=tap_postgres log_level=ERROR This is an error\n', 'pymysql.err.OperationalError: (2013, ' "'Lost connection to MySQL server during query ([Errno 104] Connection reset by peer)')\n", 'time=2020-07-15 11:24:43 logger_name=tap_postgres log_level=ERROR ' 'message=error with status PGRES_COPY_BOTH and no message from the libpq\n', 'time=2020-07-15 11:24:43 logger_name=tap_postgres log_level=CRITICAL ' 'message=error with status PGRES_COPY_BOTH and no message from the libpq\n', 'snowflake.connector.errors.ProgrammingError: 091003 (22000): ' 'Failure using stage area. Cause: [Access Denied (Status Code: 403; Error Code: AccessDenied)]\n'] # Should return the default max number of errors log_file = '{}/resources/sample_log_files/tap-run-lot-of-errors.log'.format(os.path.dirname(__file__)) assert cli.utils.find_errors_in_log_file(log_file) == \ ['time=2020-07-15 11:24:43 logger_name=tap_postgres log_level=CRITICAL This is a critical error 1\n', 'time=2020-07-15 11:24:43 logger_name=tap_postgres log_level=CRITICAL This is a critical error 2\n', 'time=2020-07-15 11:24:43 logger_name=tap_postgres log_level=CRITICAL This is a critical error 3\n', 'time=2020-07-15 11:24:43 logger_name=tap_postgres log_level=CRITICAL This is a critical error 4\n', 'time=2020-07-15 11:24:43 logger_name=tap_postgres log_level=CRITICAL This is a critical error 5\n', 'time=2020-07-15 11:24:43 logger_name=tap_postgres log_level=CRITICAL This is a critical error 6\n', 'time=2020-07-15 11:24:43 logger_name=tap_postgres log_level=CRITICAL This is a critical error 7\n', 'time=2020-07-15 11:24:43 logger_name=tap_postgres log_level=CRITICAL This is a critical error 8\n', 'time=2020-07-15 11:24:43 logger_name=tap_postgres log_level=CRITICAL This is a critical error 9\n', 'time=2020-07-15 11:24:43 logger_name=tap_postgres log_level=CRITICAL This is a critical error 10\n'] # Should return the custom max number of errors log_file = '{}/resources/sample_log_files/tap-run-lot-of-errors.log'.format(os.path.dirname(__file__)) assert cli.utils.find_errors_in_log_file(log_file, max_errors=2) == \ ['time=2020-07-15 11:24:43 logger_name=tap_postgres log_level=CRITICAL This is a critical error 1\n', 'time=2020-07-15 11:24:43 logger_name=tap_postgres log_level=CRITICAL This is a critical error 2\n'] # Should return the custom max number of errors log_file = '{}/resources/sample_log_files/tap-run-errors.log'.format(os.path.dirname(__file__)) assert cli.utils.find_errors_in_log_file(log_file, error_pattern=re.compile('CUSTOM-ERR-PATTERN')) == \ ['CUSTOM-ERR-PATTERN: This is a custom pattern error message\n']
true
true
f701215b2d9a89de6d65a883700283d7fc6edbc1
16,911
py
Python
salt/states/boto_kinesis.py
l2ol33rt/salt
ff68bbd9f4bda992a3e039822fb32f141e94347c
[ "Apache-2.0" ]
1
2021-04-05T19:46:35.000Z
2021-04-05T19:46:35.000Z
salt/states/boto_kinesis.py
dv-trading/salt
f5d4334178c50d0dfcd205d5a7fb9cfb27fd369e
[ "Apache-2.0" ]
null
null
null
salt/states/boto_kinesis.py
dv-trading/salt
f5d4334178c50d0dfcd205d5a7fb9cfb27fd369e
[ "Apache-2.0" ]
null
null
null
# -*- coding: utf-8 -*- ''' Manage Kinesis Streams ====================== .. versionadded:: Nitrogen Create and destroy Kinesis streams. Be aware that this interacts with Amazon's services, and so may incur charges. This module uses ``boto3``, which can be installed via package, or pip. This module accepts explicit Kinesis credentials but can also utilize IAM roles assigned to the instance through Instance Profiles. Dynamic credentials are then automatically obtained from AWS API and no further configuration is necessary. More information available `here <http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/iam-roles-for-amazon-ec2.html>`_. If IAM roles are not used you need to specify them either in a pillar file or in the minion's config file: .. code-block:: yaml keyid: GKTADJGHEIQSXMKKRBJ08H key: askdjghsdfjkghWupUjasdflkdfklgjsdfjajkghs region: us-east-1 It's also possible to specify ``key``, ``keyid`` and ``region`` via a profile, either passed in as a dict, or as a string to pull from pillars or minion config: .. code-block:: yaml myprofile: keyid: GKTADJGHEIQSXMKKRBJ08H key: askdjghsdfjkghWupUjasdflkdfklgjsdfjajkghs region: us-east-1 .. code-block:: yaml Ensure Kinesis stream does not exist: boto_kinesis.absent: - name: new_stream - keyid: GKTADJGHEIQSXMKKRBJ08H - key: askdjghsdfjkghWupUjasdflkdfklgjsdfjajkghs - region: us-east-1 Ensure Kinesis stream exists: boto_kinesis.present: - name: new_stream - retention_hours: 168 - enhanced_monitoring: ['ALL'] - num_shards: 2 - keyid: GKTADJGHEIQSXMKKRBJ08H - key: askdjghsdfjkghWupUjasdflkdfklgjsdfjajkghs - region: us-east-1 ''' # Keep pylint from chocking on ret # pylint: disable=undefined-variable # Import Python libs from __future__ import absolute_import import logging log = logging.getLogger(__name__) def __virtual__(): ''' Only load if boto_kinesis is available. ''' ret = 'boto_kinesis' if 'boto_kinesis.exists' in __salt__ else False return ret def present(name, retention_hours=None, enhanced_monitoring=None, num_shards=None, do_reshard=True, region=None, key=None, keyid=None, profile=None): ''' Ensure the kinesis stream is properly configured and scaled. name (string) Stream name retention_hours (int) Retain data for this many hours. AWS allows minimum 24 hours, maximum 168 hours. enhanced_monitoring (list of string) Turn on enhanced monitoring for the specified shard-level metrics. Pass in ['ALL'] or True for all metrics, [] or False for no metrics. Turn on individual metrics by passing in a list: ['IncomingBytes', 'OutgoingBytes'] Note that if only some metrics are supplied, the remaining metrics will be turned off. num_shards (int) Reshard stream (if necessary) to this number of shards !!!!! Resharding is expensive! Each split or merge can take up to 30 seconds, and the reshard method balances the partition space evenly. Resharding from N to N+1 can require 2N operations. Resharding is much faster with powers of 2 (e.g. 2^N to 2^N+1) !!!!! do_reshard (boolean) If set to False, this script will NEVER reshard the stream, regardless of other input. Useful for testing. region (string) Region to connect to. key (string) Secret key to be used. keyid (string) Access key to be used. profile (dict) A dict with region, key and keyid, or a pillar key (string) that contains a dict with region, key and keyid. ''' ret = {'name': name, 'result': True, 'comment': '', 'changes': {}} comments = [] changes_old = {} changes_new = {} # Ensure stream exists exists = __salt__['boto_kinesis.exists']( name, region, key, keyid, profile ) if exists['result'] is False: if __opts__['test']: ret['result'] = None comments.append('Kinesis stream {0} would be created'.format(name)) _add_changes(ret, changes_old, changes_new, comments) return ret else: is_created = __salt__['boto_kinesis.create_stream']( name, num_shards, region, key, keyid, profile ) if 'error' in is_created: ret['result'] = False comments.append('Failed to create stream {0}: {1}'.format(name, is_created['error'])) _add_changes(ret, changes_old, changes_new, comments) return ret comments.append('Kinesis stream {0} successfully created'.format(name)) changes_new['name'] = name changes_new['num_shards'] = num_shards else: comments.append('Kinesis stream {0} already exists'.format(name)) stream_response = __salt__['boto_kinesis.get_stream_when_active']( name, region, key, keyid, profile ) if 'error' in stream_response: ret['result'] = False comments.append('Kinesis stream {0}: error getting description: {1}' .format(name, stream_response['error'])) _add_changes(ret, changes_old, changes_new, comments) return ret stream_details = stream_response['result']["StreamDescription"] # Configure retention hours if retention_hours is not None: old_retention_hours = stream_details["RetentionPeriodHours"] retention_matches = (old_retention_hours == retention_hours) if not retention_matches: if __opts__['test']: ret['result'] = None comments.append('Kinesis stream {0}: retention hours would be updated to {1}' .format(name, retention_hours)) else: if old_retention_hours > retention_hours: retention_updated = __salt__['boto_kinesis.decrease_stream_retention_period']( name, retention_hours, region, key, keyid, profile ) else: retention_updated = __salt__['boto_kinesis.increase_stream_retention_period']( name, retention_hours, region, key, keyid, profile ) if 'error' in retention_updated: ret['result'] = False comments.append('Kinesis stream {0}: failed to update retention hours: {1}' .format(name, retention_updated['error'])) _add_changes(ret, changes_old, changes_new, comments) return ret comments.append('Kinesis stream {0}: retention hours was successfully updated'.format(name)) changes_old['retention_hours'] = old_retention_hours changes_new['retention_hours'] = retention_hours # wait until active again, otherwise it will log a lot of ResourceInUseExceptions # note that this isn't required below; reshard() will itself handle waiting stream_response = __salt__['boto_kinesis.get_stream_when_active']( name, region, key, keyid, profile ) if 'error' in stream_response: ret['result'] = False comments.append('Kinesis stream {0}: error getting description: {1}' .format(name, stream_response['error'])) _add_changes(ret, changes_old, changes_new, comments) return ret stream_details = stream_response['result']["StreamDescription"] else: comments.append('Kinesis stream {0}: retention hours did not require change, already set at {1}' .format(name, old_retention_hours)) else: comments.append('Kinesis stream {0}: did not configure retention hours'.format(name)) # Configure enhanced monitoring if enhanced_monitoring is not None: if enhanced_monitoring is True or enhanced_monitoring == ['ALL']: # for ease of comparison; describe_stream will always return the full list of metrics, never 'ALL' enhanced_monitoring = [ "IncomingBytes", "OutgoingRecords", "IteratorAgeMilliseconds", "IncomingRecords", "ReadProvisionedThroughputExceeded", "WriteProvisionedThroughputExceeded", "OutgoingBytes" ] elif enhanced_monitoring is False or enhanced_monitoring == "None": enhanced_monitoring = [] old_enhanced_monitoring = stream_details.get("EnhancedMonitoring")[0]["ShardLevelMetrics"] new_monitoring_set = set(enhanced_monitoring) old_monitoring_set = set(old_enhanced_monitoring) matching_metrics = new_monitoring_set.intersection(old_monitoring_set) enable_metrics = list(new_monitoring_set.difference(matching_metrics)) disable_metrics = list(old_monitoring_set.difference(matching_metrics)) if len(enable_metrics) != 0: if __opts__['test']: ret['result'] = None comments.append('Kinesis stream {0}: would enable enhanced monitoring for {1}' .format(name, enable_metrics)) else: metrics_enabled = __salt__['boto_kinesis.enable_enhanced_monitoring']( name, enable_metrics, region, key, keyid, profile ) if 'error' in metrics_enabled: ret['result'] = False comments.append('Kinesis stream {0}: failed to enable enhanced monitoring: {1}' .format(name, metrics_enabled['error'])) _add_changes(ret, changes_old, changes_new, comments) return ret comments.append('Kinesis stream {0}: enhanced monitoring was enabled for shard-level metrics {1}' .format(name, enable_metrics)) if len(disable_metrics) != 0: if __opts__['test']: ret['result'] = None comments.append('Kinesis stream {0}: would disable enhanced monitoring for {1}' .format(name, disable_metrics)) else: metrics_disabled = __salt__['boto_kinesis.disable_enhanced_monitoring']( name, disable_metrics, region, key, keyid, profile ) if 'error' in metrics_disabled: ret['result'] = False comments.append('Kinesis stream {0}: failed to disable enhanced monitoring: {1}' .format(name, metrics_disabled['error'])) _add_changes(ret, changes_old, changes_new, comments) return ret comments.append('Kinesis stream {0}: enhanced monitoring was disabled for shard-level metrics {1}' .format(name, disable_metrics)) if len(disable_metrics) == 0 and len(enable_metrics) == 0: comments.append('Kinesis stream {0}: enhanced monitoring did not require change, already set at {1}' .format(name, (old_enhanced_monitoring if len(old_enhanced_monitoring) > 0 else "None"))) elif not __opts__['test']: changes_old['enhanced_monitoring'] = (old_enhanced_monitoring if len(old_enhanced_monitoring) > 0 else "None") changes_new['enhanced_monitoring'] = (enhanced_monitoring if len(enhanced_monitoring) > 0 else "None") else: comments.append('Kinesis stream {0}: did not configure enhanced monitoring'.format(name)) # Reshard stream if necessary min_hash_key, max_hash_key, full_stream_details = __salt__['boto_kinesis.get_info_for_reshard']( stream_details ) old_num_shards = len(full_stream_details["OpenShards"]) if num_shards is not None and do_reshard: num_shards_matches = (old_num_shards == num_shards) if not num_shards_matches: if __opts__['test']: ret['result'] = None comments.append('Kinesis stream {0}: would be resharded from {1} to {2} shards' .format(name, old_num_shards, num_shards)) else: log.info("Resharding stream from {0} to {1} shards, this could take a while" .format(old_num_shards, num_shards)) # reshard returns True when a split/merge action is taken, # or False when no more actions are required continue_reshard = True while continue_reshard: reshard_response = __salt__['boto_kinesis.reshard']( name, num_shards, do_reshard, region, key, keyid, profile) if 'error' in reshard_response: ret['result'] = False comments.append('Encountered error while resharding {0}: {1}' .format(name, reshard_response['error'])) _add_changes(ret, changes_old, changes_new, comments) return ret continue_reshard = reshard_response['result'] comments.append('Kinesis stream {0}: successfully resharded to {1} shards'.format(name, num_shards)) changes_old['num_shards'] = old_num_shards changes_new['num_shards'] = num_shards else: comments.append('Kinesis stream {0}: did not require resharding, remains at {1} shards' .format(name, old_num_shards)) else: comments.append('Kinesis stream {0}: did not reshard, remains at {1} shards'.format(name, old_num_shards)) _add_changes(ret, changes_old, changes_new, comments) return ret def absent(name, region=None, key=None, keyid=None, profile=None): ''' Delete the kinesis stream, if it exists. name (string) Stream name region (string) Region to connect to. key (string) Secret key to be used. keyid (string) Access key to be used. profile (dict) A dict with region, key and keyid, or a pillar key (string) that contains a dict with region, key and keyid. ''' ret = {'name': name, 'result': True, 'comment': '', 'changes': {}} exists = __salt__['boto_kinesis.exists']( name, region, key, keyid, profile ) if exists['result'] is False: ret['comment'] = 'Kinesis stream {0} does not exist'.format(name) return ret if __opts__['test']: ret['comment'] = 'Kinesis stream {0} would be deleted'.format(name) ret['result'] = None return ret is_deleted = __salt__['boto_kinesis.delete_stream']( name, region, key, keyid, profile ) if 'error' in is_deleted: ret['comment'] = 'Failed to delete stream {0}: {1}'.format(name, is_deleted['error']) ret['result'] = False else: ret['comment'] = 'Deleted stream {0}'.format(name) ret['changes'].setdefault('old', 'Stream {0} exists'.format(name)) ret['changes'].setdefault('new', 'Stream {0} deleted'.format(name)) return ret def _add_changes(ret, changes_old, changes_new, comments): ret['comment'] = ',\n'.join(comments) if changes_old: ret['changes']['old'] = changes_old if changes_new: ret['changes']['new'] = changes_new
37.747768
117
0.567619
from __future__ import absolute_import import logging log = logging.getLogger(__name__) def __virtual__(): ret = 'boto_kinesis' if 'boto_kinesis.exists' in __salt__ else False return ret def present(name, retention_hours=None, enhanced_monitoring=None, num_shards=None, do_reshard=True, region=None, key=None, keyid=None, profile=None): ret = {'name': name, 'result': True, 'comment': '', 'changes': {}} comments = [] changes_old = {} changes_new = {} exists = __salt__['boto_kinesis.exists']( name, region, key, keyid, profile ) if exists['result'] is False: if __opts__['test']: ret['result'] = None comments.append('Kinesis stream {0} would be created'.format(name)) _add_changes(ret, changes_old, changes_new, comments) return ret else: is_created = __salt__['boto_kinesis.create_stream']( name, num_shards, region, key, keyid, profile ) if 'error' in is_created: ret['result'] = False comments.append('Failed to create stream {0}: {1}'.format(name, is_created['error'])) _add_changes(ret, changes_old, changes_new, comments) return ret comments.append('Kinesis stream {0} successfully created'.format(name)) changes_new['name'] = name changes_new['num_shards'] = num_shards else: comments.append('Kinesis stream {0} already exists'.format(name)) stream_response = __salt__['boto_kinesis.get_stream_when_active']( name, region, key, keyid, profile ) if 'error' in stream_response: ret['result'] = False comments.append('Kinesis stream {0}: error getting description: {1}' .format(name, stream_response['error'])) _add_changes(ret, changes_old, changes_new, comments) return ret stream_details = stream_response['result']["StreamDescription"] if retention_hours is not None: old_retention_hours = stream_details["RetentionPeriodHours"] retention_matches = (old_retention_hours == retention_hours) if not retention_matches: if __opts__['test']: ret['result'] = None comments.append('Kinesis stream {0}: retention hours would be updated to {1}' .format(name, retention_hours)) else: if old_retention_hours > retention_hours: retention_updated = __salt__['boto_kinesis.decrease_stream_retention_period']( name, retention_hours, region, key, keyid, profile ) else: retention_updated = __salt__['boto_kinesis.increase_stream_retention_period']( name, retention_hours, region, key, keyid, profile ) if 'error' in retention_updated: ret['result'] = False comments.append('Kinesis stream {0}: failed to update retention hours: {1}' .format(name, retention_updated['error'])) _add_changes(ret, changes_old, changes_new, comments) return ret comments.append('Kinesis stream {0}: retention hours was successfully updated'.format(name)) changes_old['retention_hours'] = old_retention_hours changes_new['retention_hours'] = retention_hours stream_response = __salt__['boto_kinesis.get_stream_when_active']( name, region, key, keyid, profile ) if 'error' in stream_response: ret['result'] = False comments.append('Kinesis stream {0}: error getting description: {1}' .format(name, stream_response['error'])) _add_changes(ret, changes_old, changes_new, comments) return ret stream_details = stream_response['result']["StreamDescription"] else: comments.append('Kinesis stream {0}: retention hours did not require change, already set at {1}' .format(name, old_retention_hours)) else: comments.append('Kinesis stream {0}: did not configure retention hours'.format(name)) # Configure enhanced monitoring if enhanced_monitoring is not None: if enhanced_monitoring is True or enhanced_monitoring == ['ALL']: # for ease of comparison; describe_stream will always return the full list of metrics, never 'ALL' enhanced_monitoring = [ "IncomingBytes", "OutgoingRecords", "IteratorAgeMilliseconds", "IncomingRecords", "ReadProvisionedThroughputExceeded", "WriteProvisionedThroughputExceeded", "OutgoingBytes" ] elif enhanced_monitoring is False or enhanced_monitoring == "None": enhanced_monitoring = [] old_enhanced_monitoring = stream_details.get("EnhancedMonitoring")[0]["ShardLevelMetrics"] new_monitoring_set = set(enhanced_monitoring) old_monitoring_set = set(old_enhanced_monitoring) matching_metrics = new_monitoring_set.intersection(old_monitoring_set) enable_metrics = list(new_monitoring_set.difference(matching_metrics)) disable_metrics = list(old_monitoring_set.difference(matching_metrics)) if len(enable_metrics) != 0: if __opts__['test']: ret['result'] = None comments.append('Kinesis stream {0}: would enable enhanced monitoring for {1}' .format(name, enable_metrics)) else: metrics_enabled = __salt__['boto_kinesis.enable_enhanced_monitoring']( name, enable_metrics, region, key, keyid, profile ) if 'error' in metrics_enabled: ret['result'] = False comments.append('Kinesis stream {0}: failed to enable enhanced monitoring: {1}' .format(name, metrics_enabled['error'])) _add_changes(ret, changes_old, changes_new, comments) return ret comments.append('Kinesis stream {0}: enhanced monitoring was enabled for shard-level metrics {1}' .format(name, enable_metrics)) if len(disable_metrics) != 0: if __opts__['test']: ret['result'] = None comments.append('Kinesis stream {0}: would disable enhanced monitoring for {1}' .format(name, disable_metrics)) else: metrics_disabled = __salt__['boto_kinesis.disable_enhanced_monitoring']( name, disable_metrics, region, key, keyid, profile ) if 'error' in metrics_disabled: ret['result'] = False comments.append('Kinesis stream {0}: failed to disable enhanced monitoring: {1}' .format(name, metrics_disabled['error'])) _add_changes(ret, changes_old, changes_new, comments) return ret comments.append('Kinesis stream {0}: enhanced monitoring was disabled for shard-level metrics {1}' .format(name, disable_metrics)) if len(disable_metrics) == 0 and len(enable_metrics) == 0: comments.append('Kinesis stream {0}: enhanced monitoring did not require change, already set at {1}' .format(name, (old_enhanced_monitoring if len(old_enhanced_monitoring) > 0 else "None"))) elif not __opts__['test']: changes_old['enhanced_monitoring'] = (old_enhanced_monitoring if len(old_enhanced_monitoring) > 0 else "None") changes_new['enhanced_monitoring'] = (enhanced_monitoring if len(enhanced_monitoring) > 0 else "None") else: comments.append('Kinesis stream {0}: did not configure enhanced monitoring'.format(name)) # Reshard stream if necessary min_hash_key, max_hash_key, full_stream_details = __salt__['boto_kinesis.get_info_for_reshard']( stream_details ) old_num_shards = len(full_stream_details["OpenShards"]) if num_shards is not None and do_reshard: num_shards_matches = (old_num_shards == num_shards) if not num_shards_matches: if __opts__['test']: ret['result'] = None comments.append('Kinesis stream {0}: would be resharded from {1} to {2} shards' .format(name, old_num_shards, num_shards)) else: log.info("Resharding stream from {0} to {1} shards, this could take a while" .format(old_num_shards, num_shards)) # reshard returns True when a split/merge action is taken, # or False when no more actions are required continue_reshard = True while continue_reshard: reshard_response = __salt__['boto_kinesis.reshard']( name, num_shards, do_reshard, region, key, keyid, profile) if 'error' in reshard_response: ret['result'] = False comments.append('Encountered error while resharding {0}: {1}' .format(name, reshard_response['error'])) _add_changes(ret, changes_old, changes_new, comments) return ret continue_reshard = reshard_response['result'] comments.append('Kinesis stream {0}: successfully resharded to {1} shards'.format(name, num_shards)) changes_old['num_shards'] = old_num_shards changes_new['num_shards'] = num_shards else: comments.append('Kinesis stream {0}: did not require resharding, remains at {1} shards' .format(name, old_num_shards)) else: comments.append('Kinesis stream {0}: did not reshard, remains at {1} shards'.format(name, old_num_shards)) _add_changes(ret, changes_old, changes_new, comments) return ret def absent(name, region=None, key=None, keyid=None, profile=None): ret = {'name': name, 'result': True, 'comment': '', 'changes': {}} exists = __salt__['boto_kinesis.exists']( name, region, key, keyid, profile ) if exists['result'] is False: ret['comment'] = 'Kinesis stream {0} does not exist'.format(name) return ret if __opts__['test']: ret['comment'] = 'Kinesis stream {0} would be deleted'.format(name) ret['result'] = None return ret is_deleted = __salt__['boto_kinesis.delete_stream']( name, region, key, keyid, profile ) if 'error' in is_deleted: ret['comment'] = 'Failed to delete stream {0}: {1}'.format(name, is_deleted['error']) ret['result'] = False else: ret['comment'] = 'Deleted stream {0}'.format(name) ret['changes'].setdefault('old', 'Stream {0} exists'.format(name)) ret['changes'].setdefault('new', 'Stream {0} deleted'.format(name)) return ret def _add_changes(ret, changes_old, changes_new, comments): ret['comment'] = ',\n'.join(comments) if changes_old: ret['changes']['old'] = changes_old if changes_new: ret['changes']['new'] = changes_new
true
true
f701218da6eab26ef7dfeb0b8ee6c6c220ede8c3
1,890
py
Python
tensorflow_model_optimization/python/core/internal/tensor_encoding/utils/__init__.py
akarmi/model-optimization
2d3faaa361ecb3639f4a29da56e0e6ed52336318
[ "Apache-2.0" ]
1
2019-10-10T06:14:45.000Z
2019-10-10T06:14:45.000Z
tensorflow_model_optimization/python/core/internal/tensor_encoding/utils/__init__.py
akarmi/model-optimization
2d3faaa361ecb3639f4a29da56e0e6ed52336318
[ "Apache-2.0" ]
null
null
null
tensorflow_model_optimization/python/core/internal/tensor_encoding/utils/__init__.py
akarmi/model-optimization
2d3faaa361ecb3639f4a29da56e0e6ed52336318
[ "Apache-2.0" ]
1
2020-01-01T04:38:34.000Z
2020-01-01T04:38:34.000Z
# Copyright 2019, The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Utilities for the `tensor_encoding` package.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from tensorflow_model_optimization.python.core.internal.tensor_encoding.utils.py_utils import assert_compatible from tensorflow_model_optimization.python.core.internal.tensor_encoding.utils.py_utils import merge_dicts from tensorflow_model_optimization.python.core.internal.tensor_encoding.utils.py_utils import OrderedEnum from tensorflow_model_optimization.python.core.internal.tensor_encoding.utils.py_utils import split_dict_py_tf from tensorflow_model_optimization.python.core.internal.tensor_encoding.utils.py_utils import static_or_dynamic_shape from tensorflow_model_optimization.python.core.internal.tensor_encoding.utils.tf_utils import fast_walsh_hadamard_transform from tensorflow_model_optimization.python.core.internal.tensor_encoding.utils.tf_utils import random_floats from tensorflow_model_optimization.python.core.internal.tensor_encoding.utils.tf_utils import random_floats_cmwc from tensorflow_model_optimization.python.core.internal.tensor_encoding.utils.tf_utils import random_signs from tensorflow_model_optimization.python.core.internal.tensor_encoding.utils.tf_utils import random_signs_cmwc
60.967742
123
0.855556
from __future__ import absolute_import from __future__ import division from __future__ import print_function from tensorflow_model_optimization.python.core.internal.tensor_encoding.utils.py_utils import assert_compatible from tensorflow_model_optimization.python.core.internal.tensor_encoding.utils.py_utils import merge_dicts from tensorflow_model_optimization.python.core.internal.tensor_encoding.utils.py_utils import OrderedEnum from tensorflow_model_optimization.python.core.internal.tensor_encoding.utils.py_utils import split_dict_py_tf from tensorflow_model_optimization.python.core.internal.tensor_encoding.utils.py_utils import static_or_dynamic_shape from tensorflow_model_optimization.python.core.internal.tensor_encoding.utils.tf_utils import fast_walsh_hadamard_transform from tensorflow_model_optimization.python.core.internal.tensor_encoding.utils.tf_utils import random_floats from tensorflow_model_optimization.python.core.internal.tensor_encoding.utils.tf_utils import random_floats_cmwc from tensorflow_model_optimization.python.core.internal.tensor_encoding.utils.tf_utils import random_signs from tensorflow_model_optimization.python.core.internal.tensor_encoding.utils.tf_utils import random_signs_cmwc
true
true
f70122a218251691a381f37023a3eb25286312ae
4,584
py
Python
experiments/do_data_generation.py
grisoniFr/virtual_libraries
0aac0ce249f6f3bc529abb3cbdf2d3f49be84388
[ "MIT" ]
null
null
null
experiments/do_data_generation.py
grisoniFr/virtual_libraries
0aac0ce249f6f3bc529abb3cbdf2d3f49be84388
[ "MIT" ]
null
null
null
experiments/do_data_generation.py
grisoniFr/virtual_libraries
0aac0ce249f6f3bc529abb3cbdf2d3f49be84388
[ "MIT" ]
null
null
null
import os, sys import time import warnings import argparse import configparser import ast import numpy as np from math import log from rdkit import Chem from rdkit import rdBase rdBase.DisableLog('rdApp.*') from rdkit.Chem import Draw from keras.models import load_model sys.path.append('../src/') from python import helper as hp from python import fixed_parameters as FP parser = argparse.ArgumentParser(description='SMILES generation') parser.add_argument('-fn','--filename', type=str, help='Path to the fine-tuning txt file', required=True) parser.add_argument('-m','--model_path', type=str, help='Path to a pretrained model', required=True) parser.add_argument('-v','--verbose', type=bool, help='Verbose', required=True) def int_to_smile(array, indices_token, pad_char): """ From an array of int, return a list of molecules in string smile format Note: remove the padding char """ all_mols = [] for seq in array: new_mol = [indices_token[str(int(x))] for x in seq] all_mols.append(''.join(new_mol).replace(pad_char, '')) return all_mols def one_hot_encode(token_lists, n_chars): output = np.zeros((len(token_lists), len(token_lists[0]), n_chars)) for i, token_list in enumerate(token_lists): for j, token in enumerate(token_list): output[i, j, int(token)] = 1 return output def sample(model, temp, start_char, end_char, max_len, indices_token, token_indices): n_chars = len(indices_token) seed_token = [token_indices[start_char]] generated = indices_token[str(seed_token[0])] while generated[-1] != end_char and len(generated) < max_len: x_seed = one_hot_encode([seed_token], n_chars) full_preds = model.predict(x_seed, verbose=0)[0] logits = full_preds[-1] probas, next_char_ind = get_token_proba(logits, temp) next_char = indices_token[str(next_char_ind)] generated += next_char seed_token += [next_char_ind] return generated def get_token_proba(preds, temp): preds = np.asarray(preds).astype('float64') preds = np.log(preds) / temp exp_preds = np.exp(preds) probas = exp_preds / np.sum(exp_preds) char_ind = np.argmax(np.random.multinomial(1, probas, 1)) return probas, char_ind def softmax(preds): return np.exp(preds)/np.sum(np.exp(preds)) if __name__ == '__main__': start = time.time() #################################### # get back parameters args = vars(parser.parse_args()) verbose = args['verbose'] filename = args['filename'] model_path = args['model_path'] name_data = filename.split('/')[-1].replace('.txt','') config = configparser.ConfigParser() config.read('parameters.ini') if verbose: print('\nSTART SAMPLING') #################################### #################################### # path to save data save_path = f'results/{name_data}/generated_data/' os.makedirs(save_path, exist_ok=True) # path to checkpoints dir_ckpts = f'results/{name_data}/models/' #################################### #################################### # Parameters to sample novo smiles temp = float(config['EXPERIMENTS']['temp']) n_sample = int(config['EXPERIMENTS']['n_sample']) if n_sample>5000: warnings.warn('You will sample more than 5000 SMILES; this will take a while') max_len = int(config['PROCESSING']['max_len']) pad_char = FP.PROCESSING_FIXED['pad_char'] start_char = FP.PROCESSING_FIXED['start_char'] end_char = FP.PROCESSING_FIXED['end_char'] indices_token = FP.INDICES_TOKEN token_indices = FP.TOKEN_INDICES #################################### #################################### # start the sampling of new SMILES epoch = model_path.split('/')[-1].replace('.h5', '') if verbose: print(f'Sampling from model saved at epoch {epoch}') model = load_model(model_path) generated_smi = [] for n in range(n_sample): generated_smi.append(sample(model, temp, start_char, end_char, max_len+1, indices_token, token_indices)) hp.save_obj(generated_smi, f'{save_path}{epoch}_{temp}') end = time.time() if verbose: print(f'SAMPLING DONE for model from epoch {epoch} in {end-start:.2f} seconds') ####################################
30.972973
105
0.602749
import os, sys import time import warnings import argparse import configparser import ast import numpy as np from math import log from rdkit import Chem from rdkit import rdBase rdBase.DisableLog('rdApp.*') from rdkit.Chem import Draw from keras.models import load_model sys.path.append('../src/') from python import helper as hp from python import fixed_parameters as FP parser = argparse.ArgumentParser(description='SMILES generation') parser.add_argument('-fn','--filename', type=str, help='Path to the fine-tuning txt file', required=True) parser.add_argument('-m','--model_path', type=str, help='Path to a pretrained model', required=True) parser.add_argument('-v','--verbose', type=bool, help='Verbose', required=True) def int_to_smile(array, indices_token, pad_char): all_mols = [] for seq in array: new_mol = [indices_token[str(int(x))] for x in seq] all_mols.append(''.join(new_mol).replace(pad_char, '')) return all_mols def one_hot_encode(token_lists, n_chars): output = np.zeros((len(token_lists), len(token_lists[0]), n_chars)) for i, token_list in enumerate(token_lists): for j, token in enumerate(token_list): output[i, j, int(token)] = 1 return output def sample(model, temp, start_char, end_char, max_len, indices_token, token_indices): n_chars = len(indices_token) seed_token = [token_indices[start_char]] generated = indices_token[str(seed_token[0])] while generated[-1] != end_char and len(generated) < max_len: x_seed = one_hot_encode([seed_token], n_chars) full_preds = model.predict(x_seed, verbose=0)[0] logits = full_preds[-1] probas, next_char_ind = get_token_proba(logits, temp) next_char = indices_token[str(next_char_ind)] generated += next_char seed_token += [next_char_ind] return generated def get_token_proba(preds, temp): preds = np.asarray(preds).astype('float64') preds = np.log(preds) / temp exp_preds = np.exp(preds) probas = exp_preds / np.sum(exp_preds) char_ind = np.argmax(np.random.multinomial(1, probas, 1)) return probas, char_ind def softmax(preds): return np.exp(preds)/np.sum(np.exp(preds)) if __name__ == '__main__': start = time.time() args = vars(parser.parse_args()) verbose = args['verbose'] filename = args['filename'] model_path = args['model_path'] name_data = filename.split('/')[-1].replace('.txt','') config = configparser.ConfigParser() config.read('parameters.ini') if verbose: print('\nSTART SAMPLING') save_path = f'results/{name_data}/generated_data/' os.makedirs(save_path, exist_ok=True) dir_ckpts = f'results/{name_data}/models/' temp = float(config['EXPERIMENTS']['temp']) n_sample = int(config['EXPERIMENTS']['n_sample']) if n_sample>5000: warnings.warn('You will sample more than 5000 SMILES; this will take a while') max_len = int(config['PROCESSING']['max_len']) pad_char = FP.PROCESSING_FIXED['pad_char'] start_char = FP.PROCESSING_FIXED['start_char'] end_char = FP.PROCESSING_FIXED['end_char'] indices_token = FP.INDICES_TOKEN token_indices = FP.TOKEN_INDICES epoch = model_path.split('/')[-1].replace('.h5', '') if verbose: print(f'Sampling from model saved at epoch {epoch}') model = load_model(model_path) generated_smi = [] for n in range(n_sample): generated_smi.append(sample(model, temp, start_char, end_char, max_len+1, indices_token, token_indices)) hp.save_obj(generated_smi, f'{save_path}{epoch}_{temp}') end = time.time() if verbose: print(f'SAMPLING DONE for model from epoch {epoch} in {end-start:.2f} seconds')
true
true
f70122bf0cb6430caec966b97431b6569aafddfc
831
py
Python
python_code/vnev/Lib/site-packages/jdcloud_sdk/services/jdfusion/models/VpcSecurityGroupCreateTask.py
Ureimu/weather-robot
7634195af388538a566ccea9f8a8534c5fb0f4b6
[ "MIT" ]
14
2018-04-19T09:53:56.000Z
2022-01-27T06:05:48.000Z
python_code/vnev/Lib/site-packages/jdcloud_sdk/services/jdfusion/models/VpcSecurityGroupCreateTask.py
Ureimu/weather-robot
7634195af388538a566ccea9f8a8534c5fb0f4b6
[ "MIT" ]
15
2018-09-11T05:39:54.000Z
2021-07-02T12:38:02.000Z
python_code/vnev/Lib/site-packages/jdcloud_sdk/services/jdfusion/models/VpcSecurityGroupCreateTask.py
Ureimu/weather-robot
7634195af388538a566ccea9f8a8534c5fb0f4b6
[ "MIT" ]
33
2018-04-20T05:29:16.000Z
2022-02-17T09:10:05.000Z
# coding=utf8 # Copyright 2018 JDCLOUD.COM # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # NOTE: This class is auto generated by the jdcloud code generator program. class VpcSecurityGroupCreateTask(object): def __init__(self, task=None): """ :param task: (Optional) """ self.task = task
29.678571
75
0.719615
class VpcSecurityGroupCreateTask(object): def __init__(self, task=None): self.task = task
true
true
f701237fe9e15e39fca212a577bb10928413d4e7
31,565
py
Python
yolact.py
JosmarSuarez/yolact
43b694603638562ffcdc81df7b04783c9990291c
[ "MIT" ]
3
2020-04-30T06:52:52.000Z
2021-04-29T11:07:26.000Z
yolact.py
JosmarSuarez/yolact
43b694603638562ffcdc81df7b04783c9990291c
[ "MIT" ]
null
null
null
yolact.py
JosmarSuarez/yolact
43b694603638562ffcdc81df7b04783c9990291c
[ "MIT" ]
1
2021-06-18T14:00:21.000Z
2021-06-18T14:00:21.000Z
import torch, torchvision import torch.nn as nn import torch.nn.functional as F from torchvision.models.resnet import Bottleneck import numpy as np from itertools import product from math import sqrt from typing import List from collections import defaultdict from data.config import cfg, mask_type from layers import Detect from layers.interpolate import InterpolateModule from backbone import construct_backbone import torch.backends.cudnn as cudnn from utils import timer from utils.functions import MovingAverage, make_net # This is required for Pytorch 1.0.1 on Windows to initialize Cuda on some driver versions. # See the bug report here: https://github.com/pytorch/pytorch/issues/17108 torch.cuda.current_device() # As of March 10, 2019, Pytorch DataParallel still doesn't support JIT Script Modules use_jit = torch.cuda.device_count() <= 1 if not use_jit: print('Multiple GPUs detected! Turning off JIT.') ScriptModuleWrapper = torch.jit.ScriptModule if use_jit else nn.Module script_method_wrapper = torch.jit.script_method if use_jit else lambda fn, _rcn=None: fn class Concat(nn.Module): def __init__(self, nets, extra_params): super().__init__() self.nets = nn.ModuleList(nets) self.extra_params = extra_params def forward(self, x): # Concat each along the channel dimension return torch.cat([net(x) for net in self.nets], dim=1, **self.extra_params) prior_cache = defaultdict(lambda: None) class PredictionModule(nn.Module): """ The (c) prediction module adapted from DSSD: https://arxiv.org/pdf/1701.06659.pdf Note that this is slightly different to the module in the paper because the Bottleneck block actually has a 3x3 convolution in the middle instead of a 1x1 convolution. Though, I really can't be arsed to implement it myself, and, who knows, this might be better. Args: - in_channels: The input feature size. - out_channels: The output feature size (must be a multiple of 4). - aspect_ratios: A list of lists of priorbox aspect ratios (one list per scale). - scales: A list of priorbox scales relative to this layer's convsize. For instance: If this layer has convouts of size 30x30 for an image of size 600x600, the 'default' (scale of 1) for this layer would produce bounding boxes with an area of 20x20px. If the scale is .5 on the other hand, this layer would consider bounding boxes with area 10x10px, etc. - parent: If parent is a PredictionModule, this module will use all the layers from parent instead of from this module. """ def __init__(self, in_channels, out_channels=1024, aspect_ratios=[[1]], scales=[1], parent=None, index=0): super().__init__() self.num_classes = cfg.num_classes self.mask_dim = cfg.mask_dim # Defined by Yolact self.num_priors = sum(len(x)*len(scales) for x in aspect_ratios) self.parent = [parent] # Don't include this in the state dict self.index = index self.num_heads = cfg.num_heads # Defined by Yolact if cfg.mask_proto_split_prototypes_by_head and cfg.mask_type == mask_type.lincomb: self.mask_dim = self.mask_dim // self.num_heads if cfg.mask_proto_prototypes_as_features: in_channels += self.mask_dim if parent is None: if cfg.extra_head_net is None: out_channels = in_channels else: self.upfeature, out_channels = make_net(in_channels, cfg.extra_head_net) if cfg.use_prediction_module: self.block = Bottleneck(out_channels, out_channels // 4) self.conv = nn.Conv2d(out_channels, out_channels, kernel_size=1, bias=True) self.bn = nn.BatchNorm2d(out_channels) self.bbox_layer = nn.Conv2d(out_channels, self.num_priors * 4, **cfg.head_layer_params) self.conf_layer = nn.Conv2d(out_channels, self.num_priors * self.num_classes, **cfg.head_layer_params) self.mask_layer = nn.Conv2d(out_channels, self.num_priors * self.mask_dim, **cfg.head_layer_params) if cfg.use_mask_scoring: self.score_layer = nn.Conv2d(out_channels, self.num_priors, **cfg.head_layer_params) if cfg.use_instance_coeff: self.inst_layer = nn.Conv2d(out_channels, self.num_priors * cfg.num_instance_coeffs, **cfg.head_layer_params) # What is this ugly lambda doing in the middle of all this clean prediction module code? def make_extra(num_layers): if num_layers == 0: return lambda x: x else: # Looks more complicated than it is. This just creates an array of num_layers alternating conv-relu return nn.Sequential(*sum([[ nn.Conv2d(out_channels, out_channels, kernel_size=3, padding=1), nn.ReLU(inplace=True) ] for _ in range(num_layers)], [])) self.bbox_extra, self.conf_extra, self.mask_extra = [make_extra(x) for x in cfg.extra_layers] if cfg.mask_type == mask_type.lincomb and cfg.mask_proto_coeff_gate: self.gate_layer = nn.Conv2d(out_channels, self.num_priors * self.mask_dim, kernel_size=3, padding=1) self.aspect_ratios = aspect_ratios self.scales = scales self.priors = None self.last_conv_size = None self.last_img_size = None def forward(self, x): """ Args: - x: The convOut from a layer in the backbone network Size: [batch_size, in_channels, conv_h, conv_w]) Returns a tuple (bbox_coords, class_confs, mask_output, prior_boxes) with sizes - bbox_coords: [batch_size, conv_h*conv_w*num_priors, 4] - class_confs: [batch_size, conv_h*conv_w*num_priors, num_classes] - mask_output: [batch_size, conv_h*conv_w*num_priors, mask_dim] - prior_boxes: [conv_h*conv_w*num_priors, 4] """ # In case we want to use another module's layers src = self if self.parent[0] is None else self.parent[0] conv_h = x.size(2) conv_w = x.size(3) if cfg.extra_head_net is not None: x = src.upfeature(x) if cfg.use_prediction_module: # The two branches of PM design (c) a = src.block(x) b = src.conv(x) b = src.bn(b) b = F.relu(b) # TODO: Possibly switch this out for a product x = a + b bbox_x = src.bbox_extra(x) conf_x = src.conf_extra(x) mask_x = src.mask_extra(x) bbox = src.bbox_layer(bbox_x).permute(0, 2, 3, 1).contiguous().view(x.size(0), -1, 4) conf = src.conf_layer(conf_x).permute(0, 2, 3, 1).contiguous().view(x.size(0), -1, self.num_classes) if cfg.eval_mask_branch: mask = src.mask_layer(mask_x).permute(0, 2, 3, 1).contiguous().view(x.size(0), -1, self.mask_dim) else: mask = torch.zeros(x.size(0), bbox.size(1), self.mask_dim, device=bbox.device) if cfg.use_mask_scoring: score = src.score_layer(x).permute(0, 2, 3, 1).contiguous().view(x.size(0), -1, 1) if cfg.use_instance_coeff: inst = src.inst_layer(x).permute(0, 2, 3, 1).contiguous().view(x.size(0), -1, cfg.num_instance_coeffs) # See box_utils.decode for an explanation of this if cfg.use_yolo_regressors: bbox[:, :, :2] = torch.sigmoid(bbox[:, :, :2]) - 0.5 bbox[:, :, 0] /= conv_w bbox[:, :, 1] /= conv_h if cfg.eval_mask_branch: if cfg.mask_type == mask_type.direct: mask = torch.sigmoid(mask) elif cfg.mask_type == mask_type.lincomb: mask = cfg.mask_proto_coeff_activation(mask) if cfg.mask_proto_coeff_gate: gate = src.gate_layer(x).permute(0, 2, 3, 1).contiguous().view(x.size(0), -1, self.mask_dim) mask = mask * torch.sigmoid(gate) if cfg.mask_proto_split_prototypes_by_head and cfg.mask_type == mask_type.lincomb: mask = F.pad(mask, (self.index * self.mask_dim, (self.num_heads - self.index - 1) * self.mask_dim), mode='constant', value=0) priors = self.make_priors(conv_h, conv_w, x.device) preds = { 'loc': bbox, 'conf': conf, 'mask': mask, 'priors': priors } if cfg.use_mask_scoring: preds['score'] = score if cfg.use_instance_coeff: preds['inst'] = inst return preds def make_priors(self, conv_h, conv_w, device): """ Note that priors are [x,y,width,height] where (x,y) is the center of the box. """ global prior_cache size = (conv_h, conv_w) with timer.env('makepriors'): if self.last_img_size != (cfg._tmp_img_w, cfg._tmp_img_h): prior_data = [] # Iteration order is important (it has to sync up with the convout) for j, i in product(range(conv_h), range(conv_w)): # +0.5 because priors are in center-size notation x = (i + 0.5) / conv_w y = (j + 0.5) / conv_h for ars in self.aspect_ratios: for scale in self.scales: for ar in ars: if not cfg.backbone.preapply_sqrt: ar = sqrt(ar) if cfg.backbone.use_pixel_scales: w = scale * ar / cfg.max_size h = scale / ar / cfg.max_size else: w = scale * ar / conv_w h = scale / ar / conv_h # This is for backward compatability with a bug where I made everything square by accident if cfg.backbone.use_square_anchors: h = w prior_data += [x, y, w, h] self.priors = torch.Tensor(prior_data, device=device).view(-1, 4).detach() self.priors.requires_grad = False self.last_img_size = (cfg._tmp_img_w, cfg._tmp_img_h) self.last_conv_size = (conv_w, conv_h) prior_cache[size] = None elif self.priors.device != device: # This whole weird situation is so that DataParalell doesn't copy the priors each iteration if prior_cache[size] is None: prior_cache[size] = {} if device not in prior_cache[size]: prior_cache[size][device] = self.priors.to(device) self.priors = prior_cache[size][device] return self.priors class FPN(ScriptModuleWrapper): """ Implements a general version of the FPN introduced in https://arxiv.org/pdf/1612.03144.pdf Parameters (in cfg.fpn): - num_features (int): The number of output features in the fpn layers. - interpolation_mode (str): The mode to pass to F.interpolate. - num_downsample (int): The number of downsampled layers to add onto the selected layers. These extra layers are downsampled from the last selected layer. Args: - in_channels (list): For each conv layer you supply in the forward pass, how many features will it have? """ __constants__ = ['interpolation_mode', 'num_downsample', 'use_conv_downsample', 'relu_pred_layers', 'lat_layers', 'pred_layers', 'downsample_layers', 'relu_downsample_layers'] def __init__(self, in_channels): super().__init__() self.lat_layers = nn.ModuleList([ nn.Conv2d(x, cfg.fpn.num_features, kernel_size=1) for x in reversed(in_channels) ]) # This is here for backwards compatability padding = 1 if cfg.fpn.pad else 0 self.pred_layers = nn.ModuleList([ nn.Conv2d(cfg.fpn.num_features, cfg.fpn.num_features, kernel_size=3, padding=padding) for _ in in_channels ]) if cfg.fpn.use_conv_downsample: self.downsample_layers = nn.ModuleList([ nn.Conv2d(cfg.fpn.num_features, cfg.fpn.num_features, kernel_size=3, padding=1, stride=2) for _ in range(cfg.fpn.num_downsample) ]) self.interpolation_mode = cfg.fpn.interpolation_mode self.num_downsample = cfg.fpn.num_downsample self.use_conv_downsample = cfg.fpn.use_conv_downsample self.relu_downsample_layers = cfg.fpn.relu_downsample_layers self.relu_pred_layers = cfg.fpn.relu_pred_layers @script_method_wrapper def forward(self, convouts:List[torch.Tensor]): """ Args: - convouts (list): A list of convouts for the corresponding layers in in_channels. Returns: - A list of FPN convouts in the same order as x with extra downsample layers if requested. """ out = [] x = torch.zeros(1, device=convouts[0].device) for i in range(len(convouts)): out.append(x) # For backward compatability, the conv layers are stored in reverse but the input and output is # given in the correct order. Thus, use j=-i-1 for the input and output and i for the conv layers. j = len(convouts) for lat_layer in self.lat_layers: j -= 1 if j < len(convouts) - 1: _, _, h, w = convouts[j].size() x = F.interpolate(x, size=(h, w), mode=self.interpolation_mode, align_corners=False) x = x + lat_layer(convouts[j]) out[j] = x # This janky second loop is here because TorchScript. j = len(convouts) for pred_layer in self.pred_layers: j -= 1 out[j] = pred_layer(out[j]) if self.relu_pred_layers: F.relu(out[j], inplace=True) cur_idx = len(out) # In the original paper, this takes care of P6 if self.use_conv_downsample: for downsample_layer in self.downsample_layers: out.append(downsample_layer(out[-1])) else: for idx in range(self.num_downsample): # Note: this is an untested alternative to out.append(out[-1][:, :, ::2, ::2]). Thanks TorchScript. out.append(nn.functional.max_pool2d(out[-1], 1, stride=2)) if self.relu_downsample_layers: for idx in range(len(out) - cur_idx): out[idx] = F.relu(out[idx + cur_idx], inplace=False) return out class FastMaskIoUNet(ScriptModuleWrapper): def __init__(self): super().__init__() input_channels = 1 last_layer = [(cfg.num_classes-1, 1, {})] self.maskiou_net, _ = make_net(input_channels, cfg.maskiou_net + last_layer, include_last_relu=True) def forward(self, x): x = self.maskiou_net(x) maskiou_p = F.max_pool2d(x, kernel_size=x.size()[2:]).squeeze(-1).squeeze(-1) return maskiou_p class Yolact(nn.Module): """ ██╗ ██╗ ██████╗ ██╗ █████╗ ██████╗████████╗ ╚██╗ ██╔╝██╔═══██╗██║ ██╔══██╗██╔════╝╚══██╔══╝ ╚████╔╝ ██║ ██║██║ ███████║██║ ██║ ╚██╔╝ ██║ ██║██║ ██╔══██║██║ ██║ ██║ ╚██████╔╝███████╗██║ ██║╚██████╗ ██║ ╚═╝ ╚═════╝ ╚══════╝╚═╝ ╚═╝ ╚═════╝ ╚═╝ You can set the arguments by changing them in the backbone config object in config.py. Parameters (in cfg.backbone): - selected_layers: The indices of the conv layers to use for prediction. - pred_scales: A list with len(selected_layers) containing tuples of scales (see PredictionModule) - pred_aspect_ratios: A list of lists of aspect ratios with len(selected_layers) (see PredictionModule) """ def __init__(self, only_last_layer=False): super().__init__() self.only_last_layer = only_last_layer self.backbone = construct_backbone(cfg.backbone) if cfg.freeze_bn: self.freeze_bn() # Compute mask_dim here and add it back to the config. Make sure Yolact's constructor is called early! if cfg.mask_type == mask_type.direct: cfg.mask_dim = cfg.mask_size**2 elif cfg.mask_type == mask_type.lincomb: if cfg.mask_proto_use_grid: self.grid = torch.Tensor(np.load(cfg.mask_proto_grid_file)) self.num_grids = self.grid.size(0) else: self.num_grids = 0 self.proto_src = cfg.mask_proto_src if self.proto_src is None: in_channels = 3 elif cfg.fpn is not None: in_channels = cfg.fpn.num_features else: in_channels = self.backbone.channels[self.proto_src] in_channels += self.num_grids # The include_last_relu=false here is because we might want to change it to another function self.proto_net, cfg.mask_dim = make_net(in_channels, cfg.mask_proto_net, include_last_relu=False) if cfg.mask_proto_bias: cfg.mask_dim += 1 self.selected_layers = cfg.backbone.selected_layers src_channels = self.backbone.channels if cfg.use_maskiou: self.maskiou_net = FastMaskIoUNet() if cfg.fpn is not None: # Some hacky rewiring to accomodate the FPN self.fpn = FPN([src_channels[i] for i in self.selected_layers]) self.selected_layers = list(range(len(self.selected_layers) + cfg.fpn.num_downsample)) src_channels = [cfg.fpn.num_features] * len(self.selected_layers) self.prediction_layers = nn.ModuleList() cfg.num_heads = len(self.selected_layers) for idx, layer_idx in enumerate(self.selected_layers): # If we're sharing prediction module weights, have every module's parent be the first one parent = None if cfg.share_prediction_module and idx > 0: parent = self.prediction_layers[0] pred = PredictionModule(src_channels[layer_idx], src_channels[layer_idx], aspect_ratios = cfg.backbone.pred_aspect_ratios[idx], scales = cfg.backbone.pred_scales[idx], parent = parent, index = idx) self.prediction_layers.append(pred) # Extra parameters for the extra losses if cfg.use_class_existence_loss: # This comes from the smallest layer selected # Also note that cfg.num_classes includes background self.class_existence_fc = nn.Linear(src_channels[-1], cfg.num_classes - 1) if cfg.use_semantic_segmentation_loss: self.semantic_seg_conv = nn.Conv2d(src_channels[0], cfg.num_classes-1, kernel_size=1) # For use in evaluation self.detect = Detect(cfg.num_classes, bkg_label=0, top_k=cfg.nms_top_k, conf_thresh=cfg.nms_conf_thresh, nms_thresh=cfg.nms_thresh) def save_weights(self, path): """ Saves the model's weights using compression because the file sizes were getting too big. """ torch.save(self.state_dict(), path) def load_weights(self, path): """ Loads weights from a compressed save file. """ state_dict = torch.load(path) # For backward compatability, remove these (the new variable is called layers) for key in list(state_dict.keys()): if key.startswith('backbone.layer') and not key.startswith('backbone.layers'): del state_dict[key] # Also for backward compatibility with v1.0 weights, do this check if key.startswith('fpn.downsample_layers.'): if cfg.fpn is not None and int(key.split('.')[2]) >= cfg.fpn.num_downsample: del state_dict[key] # Uncomment this in normal conditions # self.load_state_dict(state_dict) # Added this for fine-tuning. Comment this in normal conditions. try: self.load_state_dict(state_dict) except RuntimeError as e: print('Ignoring "' + str(e) + '"') def init_weights(self, backbone_path): """ Initialize weights for training. """ # Initialize the backbone with the pretrained weights. self.backbone.init_backbone(backbone_path) conv_constants = getattr(nn.Conv2d(1, 1, 1), '__constants__') # Quick lambda to test if one list contains the other def all_in(x, y): for _x in x: if _x not in y: return False return True # Initialize the rest of the conv layers with xavier for name, module in self.named_modules(): # See issue #127 for why we need such a complicated condition if the module is a WeakScriptModuleProxy # Broke in 1.3 (see issue #175), WeakScriptModuleProxy was turned into just ScriptModule. # Broke in 1.4 (see issue #292), where RecursiveScriptModule is the new star of the show. # Note that this might break with future pytorch updates, so let me know if it does is_script_conv = False if 'Script' in type(module).__name__: # 1.4 workaround: now there's an original_name member so just use that if hasattr(module, 'original_name'): is_script_conv = 'Conv' in module.original_name # 1.3 workaround: check if this has the same constants as a conv module else: is_script_conv = ( all_in(module.__dict__['_constants_set'], conv_constants) and all_in(conv_constants, module.__dict__['_constants_set'])) is_conv_layer = isinstance(module, nn.Conv2d) or is_script_conv if is_conv_layer and module not in self.backbone.backbone_modules: nn.init.xavier_uniform_(module.weight.data) if module.bias is not None: if cfg.use_focal_loss and 'conf_layer' in name: if not cfg.use_sigmoid_focal_loss: # Initialize the last layer as in the focal loss paper. # Because we use softmax and not sigmoid, I had to derive an alternate expression # on a notecard. Define pi to be the probability of outputting a foreground detection. # Then let z = sum(exp(x)) - exp(x_0). Finally let c be the number of foreground classes. # Chugging through the math, this gives us # x_0 = log(z * (1 - pi) / pi) where 0 is the background class # x_i = log(z / c) for all i > 0 # For simplicity (and because we have a degree of freedom here), set z = 1. Then we have # x_0 = log((1 - pi) / pi) note: don't split up the log for numerical stability # x_i = -log(c) for all i > 0 module.bias.data[0] = np.log((1 - cfg.focal_loss_init_pi) / cfg.focal_loss_init_pi) module.bias.data[1:] = -np.log(module.bias.size(0) - 1) else: module.bias.data[0] = -np.log(cfg.focal_loss_init_pi / (1 - cfg.focal_loss_init_pi)) module.bias.data[1:] = -np.log((1 - cfg.focal_loss_init_pi) / cfg.focal_loss_init_pi) else: module.bias.data.zero_() def train(self, mode=True): super().train(mode) if cfg.freeze_bn: self.freeze_bn() def freeze_bn(self, enable=False): """ Adapted from https://discuss.pytorch.org/t/how-to-train-with-frozen-batchnorm/12106/8 """ for module in self.modules(): if isinstance(module, nn.BatchNorm2d): module.train() if enable else module.eval() module.weight.requires_grad = enable module.bias.requires_grad = enable def forward(self, x): """ The input should be of size [batch_size, 3, img_h, img_w] """ _, _, img_h, img_w = x.size() cfg._tmp_img_h = img_h cfg._tmp_img_w = img_w with timer.env('backbone'): outs = self.backbone(x) if cfg.fpn is not None: with timer.env('fpn'): # Use backbone.selected_layers because we overwrote self.selected_layers outs = [outs[i] for i in cfg.backbone.selected_layers] outs = self.fpn(outs) proto_out = None if cfg.mask_type == mask_type.lincomb and cfg.eval_mask_branch: with timer.env('proto'): proto_x = x if self.proto_src is None else outs[self.proto_src] if self.num_grids > 0: grids = self.grid.repeat(proto_x.size(0), 1, 1, 1) proto_x = torch.cat([proto_x, grids], dim=1) proto_out = self.proto_net(proto_x) proto_out = cfg.mask_proto_prototype_activation(proto_out) if cfg.mask_proto_prototypes_as_features: # Clone here because we don't want to permute this, though idk if contiguous makes this unnecessary proto_downsampled = proto_out.clone() if cfg.mask_proto_prototypes_as_features_no_grad: proto_downsampled = proto_out.detach() # Move the features last so the multiplication is easy proto_out = proto_out.permute(0, 2, 3, 1).contiguous() if cfg.mask_proto_bias: bias_shape = [x for x in proto_out.size()] bias_shape[-1] = 1 proto_out = torch.cat([proto_out, torch.ones(*bias_shape)], -1) with timer.env('pred_heads'): pred_outs = { 'loc': [], 'conf': [], 'mask': [], 'priors': [] } if cfg.use_mask_scoring: pred_outs['score'] = [] if cfg.use_instance_coeff: pred_outs['inst'] = [] for idx, pred_layer in zip(self.selected_layers, self.prediction_layers): pred_x = outs[idx] if cfg.mask_type == mask_type.lincomb and cfg.mask_proto_prototypes_as_features: # Scale the prototypes down to the current prediction layer's size and add it as inputs proto_downsampled = F.interpolate(proto_downsampled, size=outs[idx].size()[2:], mode='bilinear', align_corners=False) pred_x = torch.cat([pred_x, proto_downsampled], dim=1) # A hack for the way dataparallel works if cfg.share_prediction_module and pred_layer is not self.prediction_layers[0]: pred_layer.parent = [self.prediction_layers[0]] if self.only_last_layer: p = pred_layer(pred_x.detach()) else: p = pred_layer(pred_x) for k, v in p.items(): pred_outs[k].append(v) for k, v in pred_outs.items(): pred_outs[k] = torch.cat(v, -2) if proto_out is not None: pred_outs['proto'] = proto_out if self.training: # For the extra loss functions if cfg.use_class_existence_loss: pred_outs['classes'] = self.class_existence_fc(outs[-1].mean(dim=(2, 3))) if cfg.use_semantic_segmentation_loss: pred_outs['segm'] = self.semantic_seg_conv(outs[0]) return pred_outs else: if cfg.use_mask_scoring: pred_outs['score'] = torch.sigmoid(pred_outs['score']) if cfg.use_focal_loss: if cfg.use_sigmoid_focal_loss: # Note: even though conf[0] exists, this mode doesn't train it so don't use it pred_outs['conf'] = torch.sigmoid(pred_outs['conf']) if cfg.use_mask_scoring: pred_outs['conf'] *= pred_outs['score'] elif cfg.use_objectness_score: # See focal_loss_sigmoid in multibox_loss.py for details objectness = torch.sigmoid(pred_outs['conf'][:, :, 0]) pred_outs['conf'][:, :, 1:] = objectness[:, :, None] * F.softmax(pred_outs['conf'][:, :, 1:], -1) pred_outs['conf'][:, :, 0 ] = 1 - objectness else: pred_outs['conf'] = F.softmax(pred_outs['conf'], -1) else: if cfg.use_objectness_score: objectness = torch.sigmoid(pred_outs['conf'][:, :, 0]) pred_outs['conf'][:, :, 1:] = (objectness > 0.10)[..., None] \ * F.softmax(pred_outs['conf'][:, :, 1:], dim=-1) else: pred_outs['conf'] = F.softmax(pred_outs['conf'], -1) return self.detect(pred_outs, self) # Some testing code if __name__ == '__main__': from utils.functions import init_console init_console() # Use the first argument to set the config if you want import sys if len(sys.argv) > 1: from data.config import set_cfg set_cfg(sys.argv[1]) net = Yolact() net.train() net.init_weights(backbone_path='weights/' + cfg.backbone.path) # GPU net = net.cuda() torch.set_default_tensor_type('torch.cuda.FloatTensor') x = torch.zeros((1, 3, cfg.max_size, cfg.max_size)) y = net(x) for p in net.prediction_layers: print(p.last_conv_size) print() for k, a in y.items(): print(k + ': ', a.size(), torch.sum(a)) exit() net(x) # timer.disable('pass2') avg = MovingAverage() try: while True: timer.reset() with timer.env('everything else'): net(x) avg.add(timer.total_time()) print('\033[2J') # Moves console cursor to 0,0 timer.print_stats() print('Avg fps: %.2f\tAvg ms: %.2f ' % (1/avg.get_avg(), avg.get_avg()*1000)) except KeyboardInterrupt: pass
42.945578
137
0.569555
import torch, torchvision import torch.nn as nn import torch.nn.functional as F from torchvision.models.resnet import Bottleneck import numpy as np from itertools import product from math import sqrt from typing import List from collections import defaultdict from data.config import cfg, mask_type from layers import Detect from layers.interpolate import InterpolateModule from backbone import construct_backbone import torch.backends.cudnn as cudnn from utils import timer from utils.functions import MovingAverage, make_net torch.cuda.current_device() use_jit = torch.cuda.device_count() <= 1 if not use_jit: print('Multiple GPUs detected! Turning off JIT.') ScriptModuleWrapper = torch.jit.ScriptModule if use_jit else nn.Module script_method_wrapper = torch.jit.script_method if use_jit else lambda fn, _rcn=None: fn class Concat(nn.Module): def __init__(self, nets, extra_params): super().__init__() self.nets = nn.ModuleList(nets) self.extra_params = extra_params def forward(self, x): # Concat each along the channel dimension return torch.cat([net(x) for net in self.nets], dim=1, **self.extra_params) prior_cache = defaultdict(lambda: None) class PredictionModule(nn.Module): def __init__(self, in_channels, out_channels=1024, aspect_ratios=[[1]], scales=[1], parent=None, index=0): super().__init__() self.num_classes = cfg.num_classes self.mask_dim = cfg.mask_dim # Defined by Yolact self.num_priors = sum(len(x)*len(scales) for x in aspect_ratios) self.parent = [parent] # Don't include this in the state dict self.index = index self.num_heads = cfg.num_heads if cfg.mask_proto_split_prototypes_by_head and cfg.mask_type == mask_type.lincomb: self.mask_dim = self.mask_dim // self.num_heads if cfg.mask_proto_prototypes_as_features: in_channels += self.mask_dim if parent is None: if cfg.extra_head_net is None: out_channels = in_channels else: self.upfeature, out_channels = make_net(in_channels, cfg.extra_head_net) if cfg.use_prediction_module: self.block = Bottleneck(out_channels, out_channels // 4) self.conv = nn.Conv2d(out_channels, out_channels, kernel_size=1, bias=True) self.bn = nn.BatchNorm2d(out_channels) self.bbox_layer = nn.Conv2d(out_channels, self.num_priors * 4, **cfg.head_layer_params) self.conf_layer = nn.Conv2d(out_channels, self.num_priors * self.num_classes, **cfg.head_layer_params) self.mask_layer = nn.Conv2d(out_channels, self.num_priors * self.mask_dim, **cfg.head_layer_params) if cfg.use_mask_scoring: self.score_layer = nn.Conv2d(out_channels, self.num_priors, **cfg.head_layer_params) if cfg.use_instance_coeff: self.inst_layer = nn.Conv2d(out_channels, self.num_priors * cfg.num_instance_coeffs, **cfg.head_layer_params) def make_extra(num_layers): if num_layers == 0: return lambda x: x else: return nn.Sequential(*sum([[ nn.Conv2d(out_channels, out_channels, kernel_size=3, padding=1), nn.ReLU(inplace=True) ] for _ in range(num_layers)], [])) self.bbox_extra, self.conf_extra, self.mask_extra = [make_extra(x) for x in cfg.extra_layers] if cfg.mask_type == mask_type.lincomb and cfg.mask_proto_coeff_gate: self.gate_layer = nn.Conv2d(out_channels, self.num_priors * self.mask_dim, kernel_size=3, padding=1) self.aspect_ratios = aspect_ratios self.scales = scales self.priors = None self.last_conv_size = None self.last_img_size = None def forward(self, x): src = self if self.parent[0] is None else self.parent[0] conv_h = x.size(2) conv_w = x.size(3) if cfg.extra_head_net is not None: x = src.upfeature(x) if cfg.use_prediction_module: # The two branches of PM design (c) a = src.block(x) b = src.conv(x) b = src.bn(b) b = F.relu(b) # TODO: Possibly switch this out for a product x = a + b bbox_x = src.bbox_extra(x) conf_x = src.conf_extra(x) mask_x = src.mask_extra(x) bbox = src.bbox_layer(bbox_x).permute(0, 2, 3, 1).contiguous().view(x.size(0), -1, 4) conf = src.conf_layer(conf_x).permute(0, 2, 3, 1).contiguous().view(x.size(0), -1, self.num_classes) if cfg.eval_mask_branch: mask = src.mask_layer(mask_x).permute(0, 2, 3, 1).contiguous().view(x.size(0), -1, self.mask_dim) else: mask = torch.zeros(x.size(0), bbox.size(1), self.mask_dim, device=bbox.device) if cfg.use_mask_scoring: score = src.score_layer(x).permute(0, 2, 3, 1).contiguous().view(x.size(0), -1, 1) if cfg.use_instance_coeff: inst = src.inst_layer(x).permute(0, 2, 3, 1).contiguous().view(x.size(0), -1, cfg.num_instance_coeffs) # See box_utils.decode for an explanation of this if cfg.use_yolo_regressors: bbox[:, :, :2] = torch.sigmoid(bbox[:, :, :2]) - 0.5 bbox[:, :, 0] /= conv_w bbox[:, :, 1] /= conv_h if cfg.eval_mask_branch: if cfg.mask_type == mask_type.direct: mask = torch.sigmoid(mask) elif cfg.mask_type == mask_type.lincomb: mask = cfg.mask_proto_coeff_activation(mask) if cfg.mask_proto_coeff_gate: gate = src.gate_layer(x).permute(0, 2, 3, 1).contiguous().view(x.size(0), -1, self.mask_dim) mask = mask * torch.sigmoid(gate) if cfg.mask_proto_split_prototypes_by_head and cfg.mask_type == mask_type.lincomb: mask = F.pad(mask, (self.index * self.mask_dim, (self.num_heads - self.index - 1) * self.mask_dim), mode='constant', value=0) priors = self.make_priors(conv_h, conv_w, x.device) preds = { 'loc': bbox, 'conf': conf, 'mask': mask, 'priors': priors } if cfg.use_mask_scoring: preds['score'] = score if cfg.use_instance_coeff: preds['inst'] = inst return preds def make_priors(self, conv_h, conv_w, device): global prior_cache size = (conv_h, conv_w) with timer.env('makepriors'): if self.last_img_size != (cfg._tmp_img_w, cfg._tmp_img_h): prior_data = [] # Iteration order is important (it has to sync up with the convout) for j, i in product(range(conv_h), range(conv_w)): # +0.5 because priors are in center-size notation x = (i + 0.5) / conv_w y = (j + 0.5) / conv_h for ars in self.aspect_ratios: for scale in self.scales: for ar in ars: if not cfg.backbone.preapply_sqrt: ar = sqrt(ar) if cfg.backbone.use_pixel_scales: w = scale * ar / cfg.max_size h = scale / ar / cfg.max_size else: w = scale * ar / conv_w h = scale / ar / conv_h # This is for backward compatability with a bug where I made everything square by accident if cfg.backbone.use_square_anchors: h = w prior_data += [x, y, w, h] self.priors = torch.Tensor(prior_data, device=device).view(-1, 4).detach() self.priors.requires_grad = False self.last_img_size = (cfg._tmp_img_w, cfg._tmp_img_h) self.last_conv_size = (conv_w, conv_h) prior_cache[size] = None elif self.priors.device != device: # This whole weird situation is so that DataParalell doesn't copy the priors each iteration if prior_cache[size] is None: prior_cache[size] = {} if device not in prior_cache[size]: prior_cache[size][device] = self.priors.to(device) self.priors = prior_cache[size][device] return self.priors class FPN(ScriptModuleWrapper): __constants__ = ['interpolation_mode', 'num_downsample', 'use_conv_downsample', 'relu_pred_layers', 'lat_layers', 'pred_layers', 'downsample_layers', 'relu_downsample_layers'] def __init__(self, in_channels): super().__init__() self.lat_layers = nn.ModuleList([ nn.Conv2d(x, cfg.fpn.num_features, kernel_size=1) for x in reversed(in_channels) ]) padding = 1 if cfg.fpn.pad else 0 self.pred_layers = nn.ModuleList([ nn.Conv2d(cfg.fpn.num_features, cfg.fpn.num_features, kernel_size=3, padding=padding) for _ in in_channels ]) if cfg.fpn.use_conv_downsample: self.downsample_layers = nn.ModuleList([ nn.Conv2d(cfg.fpn.num_features, cfg.fpn.num_features, kernel_size=3, padding=1, stride=2) for _ in range(cfg.fpn.num_downsample) ]) self.interpolation_mode = cfg.fpn.interpolation_mode self.num_downsample = cfg.fpn.num_downsample self.use_conv_downsample = cfg.fpn.use_conv_downsample self.relu_downsample_layers = cfg.fpn.relu_downsample_layers self.relu_pred_layers = cfg.fpn.relu_pred_layers @script_method_wrapper def forward(self, convouts:List[torch.Tensor]): out = [] x = torch.zeros(1, device=convouts[0].device) for i in range(len(convouts)): out.append(x) j = len(convouts) for lat_layer in self.lat_layers: j -= 1 if j < len(convouts) - 1: _, _, h, w = convouts[j].size() x = F.interpolate(x, size=(h, w), mode=self.interpolation_mode, align_corners=False) x = x + lat_layer(convouts[j]) out[j] = x j = len(convouts) for pred_layer in self.pred_layers: j -= 1 out[j] = pred_layer(out[j]) if self.relu_pred_layers: F.relu(out[j], inplace=True) cur_idx = len(out) if self.use_conv_downsample: for downsample_layer in self.downsample_layers: out.append(downsample_layer(out[-1])) else: for idx in range(self.num_downsample): out.append(nn.functional.max_pool2d(out[-1], 1, stride=2)) if self.relu_downsample_layers: for idx in range(len(out) - cur_idx): out[idx] = F.relu(out[idx + cur_idx], inplace=False) return out class FastMaskIoUNet(ScriptModuleWrapper): def __init__(self): super().__init__() input_channels = 1 last_layer = [(cfg.num_classes-1, 1, {})] self.maskiou_net, _ = make_net(input_channels, cfg.maskiou_net + last_layer, include_last_relu=True) def forward(self, x): x = self.maskiou_net(x) maskiou_p = F.max_pool2d(x, kernel_size=x.size()[2:]).squeeze(-1).squeeze(-1) return maskiou_p class Yolact(nn.Module): def __init__(self, only_last_layer=False): super().__init__() self.only_last_layer = only_last_layer self.backbone = construct_backbone(cfg.backbone) if cfg.freeze_bn: self.freeze_bn() if cfg.mask_type == mask_type.direct: cfg.mask_dim = cfg.mask_size**2 elif cfg.mask_type == mask_type.lincomb: if cfg.mask_proto_use_grid: self.grid = torch.Tensor(np.load(cfg.mask_proto_grid_file)) self.num_grids = self.grid.size(0) else: self.num_grids = 0 self.proto_src = cfg.mask_proto_src if self.proto_src is None: in_channels = 3 elif cfg.fpn is not None: in_channels = cfg.fpn.num_features else: in_channels = self.backbone.channels[self.proto_src] in_channels += self.num_grids # The include_last_relu=false here is because we might want to change it to another function self.proto_net, cfg.mask_dim = make_net(in_channels, cfg.mask_proto_net, include_last_relu=False) if cfg.mask_proto_bias: cfg.mask_dim += 1 self.selected_layers = cfg.backbone.selected_layers src_channels = self.backbone.channels if cfg.use_maskiou: self.maskiou_net = FastMaskIoUNet() if cfg.fpn is not None: # Some hacky rewiring to accomodate the FPN self.fpn = FPN([src_channels[i] for i in self.selected_layers]) self.selected_layers = list(range(len(self.selected_layers) + cfg.fpn.num_downsample)) src_channels = [cfg.fpn.num_features] * len(self.selected_layers) self.prediction_layers = nn.ModuleList() cfg.num_heads = len(self.selected_layers) for idx, layer_idx in enumerate(self.selected_layers): # If we're sharing prediction module weights, have every module's parent be the first one parent = None if cfg.share_prediction_module and idx > 0: parent = self.prediction_layers[0] pred = PredictionModule(src_channels[layer_idx], src_channels[layer_idx], aspect_ratios = cfg.backbone.pred_aspect_ratios[idx], scales = cfg.backbone.pred_scales[idx], parent = parent, index = idx) self.prediction_layers.append(pred) # Extra parameters for the extra losses if cfg.use_class_existence_loss: # This comes from the smallest layer selected # Also note that cfg.num_classes includes background self.class_existence_fc = nn.Linear(src_channels[-1], cfg.num_classes - 1) if cfg.use_semantic_segmentation_loss: self.semantic_seg_conv = nn.Conv2d(src_channels[0], cfg.num_classes-1, kernel_size=1) # For use in evaluation self.detect = Detect(cfg.num_classes, bkg_label=0, top_k=cfg.nms_top_k, conf_thresh=cfg.nms_conf_thresh, nms_thresh=cfg.nms_thresh) def save_weights(self, path): torch.save(self.state_dict(), path) def load_weights(self, path): state_dict = torch.load(path) # For backward compatability, remove these (the new variable is called layers) for key in list(state_dict.keys()): if key.startswith('backbone.layer') and not key.startswith('backbone.layers'): del state_dict[key] # Also for backward compatibility with v1.0 weights, do this check if key.startswith('fpn.downsample_layers.'): if cfg.fpn is not None and int(key.split('.')[2]) >= cfg.fpn.num_downsample: del state_dict[key] # Uncomment this in normal conditions # self.load_state_dict(state_dict) # Added this for fine-tuning. Comment this in normal conditions. try: self.load_state_dict(state_dict) except RuntimeError as e: print('Ignoring "' + str(e) + '"') def init_weights(self, backbone_path): # Initialize the backbone with the pretrained weights. self.backbone.init_backbone(backbone_path) conv_constants = getattr(nn.Conv2d(1, 1, 1), '__constants__') # Quick lambda to test if one list contains the other def all_in(x, y): for _x in x: if _x not in y: return False return True # Initialize the rest of the conv layers with xavier for name, module in self.named_modules(): # See issue #127 for why we need such a complicated condition if the module is a WeakScriptModuleProxy # Broke in 1.3 (see issue #175), WeakScriptModuleProxy was turned into just ScriptModule. # Broke in 1.4 (see issue #292), where RecursiveScriptModule is the new star of the show. # Note that this might break with future pytorch updates, so let me know if it does is_script_conv = False if 'Script' in type(module).__name__: # 1.4 workaround: now there's an original_name member so just use that if hasattr(module, 'original_name'): is_script_conv = 'Conv' in module.original_name else: is_script_conv = ( all_in(module.__dict__['_constants_set'], conv_constants) and all_in(conv_constants, module.__dict__['_constants_set'])) is_conv_layer = isinstance(module, nn.Conv2d) or is_script_conv if is_conv_layer and module not in self.backbone.backbone_modules: nn.init.xavier_uniform_(module.weight.data) if module.bias is not None: if cfg.use_focal_loss and 'conf_layer' in name: if not cfg.use_sigmoid_focal_loss: # x_i = -log(c) for all i > 0 module.bias.data[0] = np.log((1 - cfg.focal_loss_init_pi) / cfg.focal_loss_init_pi) module.bias.data[1:] = -np.log(module.bias.size(0) - 1) else: module.bias.data[0] = -np.log(cfg.focal_loss_init_pi / (1 - cfg.focal_loss_init_pi)) module.bias.data[1:] = -np.log((1 - cfg.focal_loss_init_pi) / cfg.focal_loss_init_pi) else: module.bias.data.zero_() def train(self, mode=True): super().train(mode) if cfg.freeze_bn: self.freeze_bn() def freeze_bn(self, enable=False): for module in self.modules(): if isinstance(module, nn.BatchNorm2d): module.train() if enable else module.eval() module.weight.requires_grad = enable module.bias.requires_grad = enable def forward(self, x): _, _, img_h, img_w = x.size() cfg._tmp_img_h = img_h cfg._tmp_img_w = img_w with timer.env('backbone'): outs = self.backbone(x) if cfg.fpn is not None: with timer.env('fpn'): # Use backbone.selected_layers because we overwrote self.selected_layers outs = [outs[i] for i in cfg.backbone.selected_layers] outs = self.fpn(outs) proto_out = None if cfg.mask_type == mask_type.lincomb and cfg.eval_mask_branch: with timer.env('proto'): proto_x = x if self.proto_src is None else outs[self.proto_src] if self.num_grids > 0: grids = self.grid.repeat(proto_x.size(0), 1, 1, 1) proto_x = torch.cat([proto_x, grids], dim=1) proto_out = self.proto_net(proto_x) proto_out = cfg.mask_proto_prototype_activation(proto_out) if cfg.mask_proto_prototypes_as_features: # Clone here because we don't want to permute this, though idk if contiguous makes this unnecessary proto_downsampled = proto_out.clone() if cfg.mask_proto_prototypes_as_features_no_grad: proto_downsampled = proto_out.detach() proto_out = proto_out.permute(0, 2, 3, 1).contiguous() if cfg.mask_proto_bias: bias_shape = [x for x in proto_out.size()] bias_shape[-1] = 1 proto_out = torch.cat([proto_out, torch.ones(*bias_shape)], -1) with timer.env('pred_heads'): pred_outs = { 'loc': [], 'conf': [], 'mask': [], 'priors': [] } if cfg.use_mask_scoring: pred_outs['score'] = [] if cfg.use_instance_coeff: pred_outs['inst'] = [] for idx, pred_layer in zip(self.selected_layers, self.prediction_layers): pred_x = outs[idx] if cfg.mask_type == mask_type.lincomb and cfg.mask_proto_prototypes_as_features: proto_downsampled = F.interpolate(proto_downsampled, size=outs[idx].size()[2:], mode='bilinear', align_corners=False) pred_x = torch.cat([pred_x, proto_downsampled], dim=1) # A hack for the way dataparallel works if cfg.share_prediction_module and pred_layer is not self.prediction_layers[0]: pred_layer.parent = [self.prediction_layers[0]] if self.only_last_layer: p = pred_layer(pred_x.detach()) else: p = pred_layer(pred_x) for k, v in p.items(): pred_outs[k].append(v) for k, v in pred_outs.items(): pred_outs[k] = torch.cat(v, -2) if proto_out is not None: pred_outs['proto'] = proto_out if self.training: # For the extra loss functions if cfg.use_class_existence_loss: pred_outs['classes'] = self.class_existence_fc(outs[-1].mean(dim=(2, 3))) if cfg.use_semantic_segmentation_loss: pred_outs['segm'] = self.semantic_seg_conv(outs[0]) return pred_outs else: if cfg.use_mask_scoring: pred_outs['score'] = torch.sigmoid(pred_outs['score']) if cfg.use_focal_loss: if cfg.use_sigmoid_focal_loss: # Note: even though conf[0] exists, this mode doesn't train it so don't use it pred_outs['conf'] = torch.sigmoid(pred_outs['conf']) if cfg.use_mask_scoring: pred_outs['conf'] *= pred_outs['score'] elif cfg.use_objectness_score: # See focal_loss_sigmoid in multibox_loss.py for details objectness = torch.sigmoid(pred_outs['conf'][:, :, 0]) pred_outs['conf'][:, :, 1:] = objectness[:, :, None] * F.softmax(pred_outs['conf'][:, :, 1:], -1) pred_outs['conf'][:, :, 0 ] = 1 - objectness else: pred_outs['conf'] = F.softmax(pred_outs['conf'], -1) else: if cfg.use_objectness_score: objectness = torch.sigmoid(pred_outs['conf'][:, :, 0]) pred_outs['conf'][:, :, 1:] = (objectness > 0.10)[..., None] \ * F.softmax(pred_outs['conf'][:, :, 1:], dim=-1) else: pred_outs['conf'] = F.softmax(pred_outs['conf'], -1) return self.detect(pred_outs, self) # Some testing code if __name__ == '__main__': from utils.functions import init_console init_console() # Use the first argument to set the config if you want import sys if len(sys.argv) > 1: from data.config import set_cfg set_cfg(sys.argv[1]) net = Yolact() net.train() net.init_weights(backbone_path='weights/' + cfg.backbone.path) # GPU net = net.cuda() torch.set_default_tensor_type('torch.cuda.FloatTensor') x = torch.zeros((1, 3, cfg.max_size, cfg.max_size)) y = net(x) for p in net.prediction_layers: print(p.last_conv_size) print() for k, a in y.items(): print(k + ': ', a.size(), torch.sum(a)) exit() net(x) # timer.disable('pass2') avg = MovingAverage() try: while True: timer.reset() with timer.env('everything else'): net(x) avg.add(timer.total_time()) print('\033[2J') # Moves console cursor to 0,0 timer.print_stats() print('Avg fps: %.2f\tAvg ms: %.2f ' % (1/avg.get_avg(), avg.get_avg()*1000)) except KeyboardInterrupt: pass
true
true
f70124509333b85e75d5c224c99a12dd0db65945
5,013
py
Python
jupyterlab_primehub/jupyterlab_primehub/handlers.py
InfuseAI/primehub-job
7a68ac9ff8452c4faa72adf3b73a70270aa5ac65
[ "Apache-2.0" ]
null
null
null
jupyterlab_primehub/jupyterlab_primehub/handlers.py
InfuseAI/primehub-job
7a68ac9ff8452c4faa72adf3b73a70270aa5ac65
[ "Apache-2.0" ]
3
2020-10-26T08:21:02.000Z
2021-07-14T06:06:58.000Z
jupyterlab_primehub/jupyterlab_primehub/handlers.py
InfuseAI/primehub-job
7a68ac9ff8452c4faa72adf3b73a70270aa5ac65
[ "Apache-2.0" ]
null
null
null
import json from jupyterlab.labapp import LabApp from notebook.base.handlers import APIHandler from notebook.utils import url_path_join import tornado from .api import group_info, submit_job, get_env, check_function_set from .utils import get_group_volume_path import os.path from shutil import copyfile from datetime import datetime import importlib.util import sys ENV_API_ENDPOINT = 'JUPYTERLAB_DEV_API_ENDPOINT' NAMESPACE = "jupyterlab-primehub" api_endpoint = 'http://primehub-graphql/api/graphql' NOTEBOOK_DIR = None class CheckFunctionSetHandler(APIHandler): @tornado.web.authenticated def post(self): params = self.get_json_body() api_token = params.get('api_token', None) function_set = check_function_set(api_endpoint, api_token) self.log.info(function_set) self.finish(json.dumps(function_set)) class ResourceHandler(APIHandler): @tornado.web.authenticated def post(self): params = self.get_json_body() api_token = params.get('api_token', None) group_id = os.environ.get('GROUP_ID') self.log.info('group_info with group_id: {}'.format(group_id)) self.finish(json.dumps(group_info(api_endpoint, api_token, group_id))) class SubmitJobHandler(APIHandler): @tornado.web.authenticated def post(self): params = self.get_json_body() api_token = params.get('api_token', None) name = params.get('name', 'notebook_job') group_id = os.environ.get('GROUP_ID') instance_type = params.get('instance_type', None) image = params.get('image', os.environ.get('IMAGE_NAME')) path = params.get('path', None) notebook_parameters = params.get('notebook_parameters', '') self.log.info('group_info with group_id: {}'.format(group_id)) fullpath = os.path.join(NOTEBOOK_DIR, path) self.log.info("relative path: " + path) self.log.info("notebook path: " + fullpath) # copy the file group_name = params.get('group_name', os.environ.get('GROUP_NAME')) time_string = datetime.now().strftime("%Y%m%d%H%M%S%f") nb_file_name = path.split('/').pop() nb_directory_path = os.path.join(NOTEBOOK_DIR, path.replace(nb_file_name, '')) hidden_nb_file_name = '.' + nb_file_name.replace('.ipynb', '') + '-' + time_string + '.ipynb' hidden_nb_fullpath = os.path.join(NOTEBOOK_DIR, path.replace(nb_file_name, ''), hidden_nb_file_name) output_nb_fullpath = os.path.join(NOTEBOOK_DIR, path.replace(nb_file_name, ''), hidden_nb_file_name[1:].replace('.ipynb', '-output.ipynb')) copyfile(fullpath, hidden_nb_fullpath) papermill_parameters = '' try: for parameter in notebook_parameters.replace(' ', '').split(';'): if '=' in parameter: kv = parameter.split('=') papermill_parameters = papermill_parameters + ' -p {} {}'.format(kv[0], kv[1]) except Exception as e: self.finish(json.dumps({ 'status': 'failed', 'error': 'failed to parse notebook parameters', 'message': str(e) })) return command_str = 'cd {} && papermill {} {}{} && rm {}'.format(nb_directory_path, hidden_nb_fullpath, output_nb_fullpath, papermill_parameters, hidden_nb_fullpath) self.finish(json.dumps(submit_job(api_endpoint, api_token, name, group_id, instance_type, image, command_str))) class EnvironmentHandler(APIHandler): @tornado.web.authenticated def post(self): self.finish(json.dumps(get_env())) def url_pattern(web_app, endpoint, *pieces): base_url = web_app.settings["base_url"] return url_path_join(base_url, NAMESPACE, endpoint, *pieces) def setup_handlers(lab_app: LabApp): setup_globals(lab_app) web_app, logger = lab_app.web_app, lab_app.log apply_api_endpoint_override(logger) host_pattern = ".*$" handlers = [(url_pattern(web_app, 'check-function'), CheckFunctionSetHandler), (url_pattern(web_app, 'resources'), ResourceHandler), (url_pattern(web_app, 'submit-job'), SubmitJobHandler), (url_pattern(web_app, 'get-env'), EnvironmentHandler)] web_app.add_handlers(host_pattern, handlers) for h in handlers: logger.info('handler => {}'.format(h)) def setup_globals(lab_app): global NOTEBOOK_DIR NOTEBOOK_DIR = lab_app.notebook_dir lab_app.log.info('setup globals') lab_app.log.info('\tNOTEBOOK_DIR: ' + NOTEBOOK_DIR) def apply_api_endpoint_override(logger): global api_endpoint override = os.environ.get(ENV_API_ENDPOINT, None) if not override: logger.info('use api-endpoint: {}'.format(api_endpoint)) logger.info('it could be override from ENV with the key {}'.format(ENV_API_ENDPOINT)) return logger.info('update api-endpoint from ENV: {}'.format(override)) api_endpoint = override
36.064748
167
0.668263
import json from jupyterlab.labapp import LabApp from notebook.base.handlers import APIHandler from notebook.utils import url_path_join import tornado from .api import group_info, submit_job, get_env, check_function_set from .utils import get_group_volume_path import os.path from shutil import copyfile from datetime import datetime import importlib.util import sys ENV_API_ENDPOINT = 'JUPYTERLAB_DEV_API_ENDPOINT' NAMESPACE = "jupyterlab-primehub" api_endpoint = 'http://primehub-graphql/api/graphql' NOTEBOOK_DIR = None class CheckFunctionSetHandler(APIHandler): @tornado.web.authenticated def post(self): params = self.get_json_body() api_token = params.get('api_token', None) function_set = check_function_set(api_endpoint, api_token) self.log.info(function_set) self.finish(json.dumps(function_set)) class ResourceHandler(APIHandler): @tornado.web.authenticated def post(self): params = self.get_json_body() api_token = params.get('api_token', None) group_id = os.environ.get('GROUP_ID') self.log.info('group_info with group_id: {}'.format(group_id)) self.finish(json.dumps(group_info(api_endpoint, api_token, group_id))) class SubmitJobHandler(APIHandler): @tornado.web.authenticated def post(self): params = self.get_json_body() api_token = params.get('api_token', None) name = params.get('name', 'notebook_job') group_id = os.environ.get('GROUP_ID') instance_type = params.get('instance_type', None) image = params.get('image', os.environ.get('IMAGE_NAME')) path = params.get('path', None) notebook_parameters = params.get('notebook_parameters', '') self.log.info('group_info with group_id: {}'.format(group_id)) fullpath = os.path.join(NOTEBOOK_DIR, path) self.log.info("relative path: " + path) self.log.info("notebook path: " + fullpath) group_name = params.get('group_name', os.environ.get('GROUP_NAME')) time_string = datetime.now().strftime("%Y%m%d%H%M%S%f") nb_file_name = path.split('/').pop() nb_directory_path = os.path.join(NOTEBOOK_DIR, path.replace(nb_file_name, '')) hidden_nb_file_name = '.' + nb_file_name.replace('.ipynb', '') + '-' + time_string + '.ipynb' hidden_nb_fullpath = os.path.join(NOTEBOOK_DIR, path.replace(nb_file_name, ''), hidden_nb_file_name) output_nb_fullpath = os.path.join(NOTEBOOK_DIR, path.replace(nb_file_name, ''), hidden_nb_file_name[1:].replace('.ipynb', '-output.ipynb')) copyfile(fullpath, hidden_nb_fullpath) papermill_parameters = '' try: for parameter in notebook_parameters.replace(' ', '').split(';'): if '=' in parameter: kv = parameter.split('=') papermill_parameters = papermill_parameters + ' -p {} {}'.format(kv[0], kv[1]) except Exception as e: self.finish(json.dumps({ 'status': 'failed', 'error': 'failed to parse notebook parameters', 'message': str(e) })) return command_str = 'cd {} && papermill {} {}{} && rm {}'.format(nb_directory_path, hidden_nb_fullpath, output_nb_fullpath, papermill_parameters, hidden_nb_fullpath) self.finish(json.dumps(submit_job(api_endpoint, api_token, name, group_id, instance_type, image, command_str))) class EnvironmentHandler(APIHandler): @tornado.web.authenticated def post(self): self.finish(json.dumps(get_env())) def url_pattern(web_app, endpoint, *pieces): base_url = web_app.settings["base_url"] return url_path_join(base_url, NAMESPACE, endpoint, *pieces) def setup_handlers(lab_app: LabApp): setup_globals(lab_app) web_app, logger = lab_app.web_app, lab_app.log apply_api_endpoint_override(logger) host_pattern = ".*$" handlers = [(url_pattern(web_app, 'check-function'), CheckFunctionSetHandler), (url_pattern(web_app, 'resources'), ResourceHandler), (url_pattern(web_app, 'submit-job'), SubmitJobHandler), (url_pattern(web_app, 'get-env'), EnvironmentHandler)] web_app.add_handlers(host_pattern, handlers) for h in handlers: logger.info('handler => {}'.format(h)) def setup_globals(lab_app): global NOTEBOOK_DIR NOTEBOOK_DIR = lab_app.notebook_dir lab_app.log.info('setup globals') lab_app.log.info('\tNOTEBOOK_DIR: ' + NOTEBOOK_DIR) def apply_api_endpoint_override(logger): global api_endpoint override = os.environ.get(ENV_API_ENDPOINT, None) if not override: logger.info('use api-endpoint: {}'.format(api_endpoint)) logger.info('it could be override from ENV with the key {}'.format(ENV_API_ENDPOINT)) return logger.info('update api-endpoint from ENV: {}'.format(override)) api_endpoint = override
true
true
f701253b3bcdb175ffd1c5366a57b88a739349f6
572
py
Python
setup.py
bamps53/runai
0c868160f64e1e063c6eb6f660d42917322d40c5
[ "MIT" ]
null
null
null
setup.py
bamps53/runai
0c868160f64e1e063c6eb6f660d42917322d40c5
[ "MIT" ]
null
null
null
setup.py
bamps53/runai
0c868160f64e1e063c6eb6f660d42917322d40c5
[ "MIT" ]
null
null
null
import setuptools with open("README.md", "r") as f: long_description = f.read() setuptools.setup( name="runai", version="0.1.2", author="Run:AI", author_email="[email protected]", description="Run:AI Python library", long_description=long_description, long_description_content_type="text/markdown", url="https://github.com/run-ai/runai", packages=setuptools.find_packages(), classifiers=[ "Development Status :: 4 - Beta", "License :: OSI Approved :: MIT License", "Programming Language :: Python", ], )
26
50
0.645105
import setuptools with open("README.md", "r") as f: long_description = f.read() setuptools.setup( name="runai", version="0.1.2", author="Run:AI", author_email="[email protected]", description="Run:AI Python library", long_description=long_description, long_description_content_type="text/markdown", url="https://github.com/run-ai/runai", packages=setuptools.find_packages(), classifiers=[ "Development Status :: 4 - Beta", "License :: OSI Approved :: MIT License", "Programming Language :: Python", ], )
true
true
f70125880c12638569fbea6a73e05ddc5db89908
167
py
Python
majestic-monolith-django/user/urls.py
kokospapa8/majestic-monolith-django
a0879989a651ecef6761ee7fce619ab17738bb35
[ "Apache-2.0" ]
1
2022-03-12T09:55:36.000Z
2022-03-12T09:55:36.000Z
majestic-monolith-django/user/urls.py
kokospapa8/majestic-monolith-django
a0879989a651ecef6761ee7fce619ab17738bb35
[ "Apache-2.0" ]
6
2022-03-09T10:42:44.000Z
2022-03-31T08:27:25.000Z
majestic-monolith-django/user/urls.py
kokospapa8/majestic-monolith-django
a0879989a651ecef6761ee7fce619ab17738bb35
[ "Apache-2.0" ]
null
null
null
from django.urls import path from .views import ( UserSelfView, ) urlpatterns = [ path("users/self/profile/", UserSelfView.as_view(), name="user_self"), ]
13.916667
74
0.682635
from django.urls import path from .views import ( UserSelfView, ) urlpatterns = [ path("users/self/profile/", UserSelfView.as_view(), name="user_self"), ]
true
true
f70125e4f534b2c8fcba77529a3348f860a0cdaf
9,160
py
Python
examples/Nolan/AFRL/Carts/cart106.py
Rapid-Design-of-Systems-Laboratory/beluga-legacy
d14713d8211b64293c4427005cf02fbd58630598
[ "MIT" ]
1
2019-03-26T03:00:03.000Z
2019-03-26T03:00:03.000Z
examples/Nolan/AFRL/Carts/cart106.py
Rapid-Design-of-Systems-Laboratory/beluga-legacy
d14713d8211b64293c4427005cf02fbd58630598
[ "MIT" ]
null
null
null
examples/Nolan/AFRL/Carts/cart106.py
Rapid-Design-of-Systems-Laboratory/beluga-legacy
d14713d8211b64293c4427005cf02fbd58630598
[ "MIT" ]
1
2019-07-14T22:53:52.000Z
2019-07-14T22:53:52.000Z
if __name__ == "__main__": import numpy as np import beluga.Beluga as Beluga import beluga.bvpsol as bvpsol import beluga.bvpsol.algorithms as algorithms import beluga.optim.Problem from beluga.optim.problem import * from beluga.continuation import * import logging # Import Libraries for Matrix Calculations from sympy import symbols, Matrix, Transpose, simplify, diff, diag from sympy import sin from sympy import cos, acos from sympy import sqrt from sympy import exp from sympy import atan from numpy import pi writeEqn = True simpList = False if writeEqn: writeList = [] # Constants v, u_max = symbols('v, u_max') xb, yb = symbols('xb, yb') Dt, sigv, sigw, sigr = symbols('Dt, sigv, sigw, sigr') # Primary States x, y, theta = symbols('x, y, theta') # Control w = symbols('w') # Secondary States # Primary State Rates x_dot = v * cos(theta) y_dot = v * sin(theta) theta_dot = u_max * sin(w) writeList = [x_dot, y_dot, theta_dot] # Covariance Calculations p11, p12, p13,\ p22, p23, \ p33 \ = symbols('p11 p12 p13\ p22 p23 \ p33') P = Matrix([[p11, p12, p13], [p12, p22, p23], [p13, p13, p33]]) F = Matrix([[diff(x_dot, x), diff(x_dot, y), diff(x_dot, theta)], [diff(y_dot, x), diff(y_dot, y), diff(y_dot, theta)], [diff(theta_dot, x), diff(theta_dot, y), diff(theta_dot, theta)],]) G = Matrix([[cos(theta), 0], [sin(theta), 0], [0, 1]]) h = sqrt((x - xb)**2 + (y - yb)**2) H = Matrix([[diff(h, x), diff(h, y), diff(h, theta)]]) Q = Dt*diag(sigv**2, sigw**2) R = Dt*diag(sigr**2) P_dot = (F*P + P*F.T - P*H.T*(R**-1)*H*P + G*Q*G.T) Dim = P_dot.shape k = symbols('k') PP = (F*P + P*F.T - k * P*H.T*(R**-1)*H*P + G*Q*G.T) obj = PP[1, 1] for i in range(0, Dim[0]): for j in range(i, Dim[1]): # print(P_dot[i, j]) writeList.append(P_dot[i, j]) # h_new, theta_new, v_new, gam_new = symbols('h_new, theta_new, v_new, gam_new') # h_scale, theta_scale, v_scale, gam_scale = symbols('h_scale, theta_scale, v_scale, gam_scale') states = [x, y, theta, p11, p12, p13, p22, p23, p33] x_s, y_s, theta_s, \ p11_s, p12_s, p13_s, \ p22_s, p23_s, \ p33_s = \ symbols('x_s, y_s, theta_s, \ p11_s, p12_s, p13_s, \ p22_s, p23_s, \ p33_s') scales = [x_s, y_s, theta_s, p11_s, p12_s, p13_s, p22_s, p23_s, p33_s] x_n, y_n, theta_n, \ p11_n, p12_n, p13_n, \ p22_n, p23_n, \ p33_n = \ symbols('x_n, y_n, theta_n, \ p11_n, p12_n, p13_n, \ p22_n, p23_n, \ p33_n') states_new = [x_n, y_n, theta_n, p11_n, p12_n, p13_n, p22_n, p23_n, p33_n] # print(writeList) Z1 = zip(writeList, scales) scaledList = [] for item, Scale in Z1: # print(item) item = item/Scale Z2 = zip(states, states_new, scales) # print(item) # for state, new, scale in Z2: # print(state) # print(new) # print(scale) for state, new, scale in Z2: # print(new) item = item.subs(state, scale*new) # print(item) scaledList.append(item) Z2 = zip(states, states_new, scales) for state, new, scale in Z2: # print(new) obj = obj.subs(state, scale * new) k = 1 with open("eqns.txt", "w") as my_file: for item in scaledList: if simpList: # print('* ' + str(item)) item = simplify(item) # print('# ' + str(item)) my_file.write(str(item) + "\n") # print(" Wrote " + str(k) + "/" + str(len(scaledList))) k += 1 k = 1 with open("eqnsUnscaled.txt", "w") as my_file: for item in writeList: my_file.write(str(item) + "\n") # print(" Wrote " + str(k) + "/" + str(len(writeList))) k += 1 ''' Start Optimal Control Calculations ''' # Read Covariance State Rates from File with open("eqns.txt", "r") as f: eqnsList = list(f) # for item in P_dot_eqns: # print(item) # Rename this and/or move to optim package? problem = beluga.optim.Problem('carts0') # Define independent variables problem.independent('t', 's') # Define equations of motion problem\ .state('x_n', eqnsList[0] + '+ ep*u_max*cos(w)', '1') \ .state('y_n', eqnsList[1], '1') \ .state('theta_n', eqnsList[2], '1') \ .state('p11_n', eqnsList[3], '1') \ .state('p12_n', eqnsList[4], '1') \ .state('p13_n', eqnsList[5], '1') \ .state('p22_n', eqnsList[6], '1') \ .state('p23_n', eqnsList[7], '1') \ .state('p33_n', eqnsList[8], '1') \ # Define controls problem.control('w', '1') \ # Define costs # problem.cost['path'] = Expression('p11', 'm^2/s^2') # problem.cost['path'] = Expression('sin(w)**2', 's') # problem.cost['terminal'] = Expression('p22_n', '1') problem.cost['path'] = Expression(str(obj), 's') # Define constraints problem.constraints() \ .initial('x_n-x_n_0', '1') \ .initial('y_n-y_n_0', '1') \ .initial('theta_n-theta_n_0', '1') \ \ .initial('p11_n-p11_n_0', '1') \ .initial('p12_n-p12_n_0', '1') \ .initial('p13_n-p13_n_0', '1') \ .initial('p22_n-p22_n_0', '1') \ .initial('p23_n-p23_n_0', '1') \ .initial('p33_n-p33_n_0', '1') \ \ .terminal('x_n-x_n_f', '1') \ .terminal('y_n-y_n_f', '1') \ \ # Define constants problem.constant('Dt', 0.1, '1') problem.constant('sigv', 0.1, '1') problem.constant('sigw', 0.1, '1') problem.constant('sigr', 0.1, '1') problem.constant('xb', 5, '1') problem.constant('yb', 5, '1') problem.constant('u_max', 0.1, '1') problem.constant('v', 30, '1') problem.constant('x_s', 1, '1') problem.constant('y_s', 1, '1') problem.constant('theta_s', 1, '1') problem.constant('p11_s', 1e-3, '1') problem.constant('p12_s', 1e-3, '1') problem.constant('p13_s', 1e-3, '1') problem.constant('p22_s', 1e-1, '1') problem.constant('p23_s', 1e-2, '1') problem.constant('p33_s', 1e-3, '1') problem.constant('ep', 5, '1') problem.constant('k', 0, '1') problem.bvp_solver = algorithms.MultipleShooting(derivative_method='fd', tolerance=1e-4, max_iterations=1000, verbose=True, cached=False, number_arcs=64) # problem.bvp_solver = algorithms.SingleShooting(derivative_method='fd',tolerance=1e-4, max_iterations=1000, verbose=True, cached=False) problem.scale.unit('m', 1) \ .unit('s', 1) \ .unit('kg', 1) \ .unit('rad', 1) # Define quantity (not implemented at present) # Is this actually an Expression rather than a Value? # problem.quantity = [Value('tanAng','tan(theta)')] problem.guess.setup('auto', start=[0, 0, 0, 0, 0, 0, 0, 0, 0], time_integrate=1, costate_guess=[0, 0, 0.001, -0.0001, 0.0, 0.0, 0.001, 0.0, 0.]) # problem.guess.setup('auto',start=[80000,3.38575809e-21,5000,7.98617365e-02, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],direction='forward',time_integrate=229.865209,costate_guess =[-1.37514494e+01,3.80852584e+06,-3.26290152e+03,-2.31984720e-14,0.00,0.01,0.01,0.01,0.01,0.01,0.01,0.01,0.01,0.01]) # Figure out nicer way of representing this. Done? problem.steps.add_step().num_cases(5) \ .terminal('x_n', 20) \ .terminal('y_n', 0) \ problem.steps.add_step().num_cases(10) \ .const('xb', 200) \ .const('yb', 600) \ problem.steps.add_step().num_cases(80) \ .terminal('x_n', 4000) \ problem.steps.add_step().num_cases(20) \ .const('k', 1) \ # problem.steps.add_step().num_cases(10) \ # .const('xb', 7) \ # .const('yb', 7) \ # \ # \ # problem.steps.add_step().num_cases(20) \ # .terminal('x_n', 150) \ # .terminal('y_n', 0) \ # problem.steps.add_step().num_cases(15) \ # .terminal('theta', 5) # problem.steps.add_step().num_cases(21) \ # .terminal('theta', 10*pi/180) Beluga.run(problem, display_level=logging.DEBUG)
30.032787
288
0.504476
if __name__ == "__main__": import numpy as np import beluga.Beluga as Beluga import beluga.bvpsol as bvpsol import beluga.bvpsol.algorithms as algorithms import beluga.optim.Problem from beluga.optim.problem import * from beluga.continuation import * import logging from sympy import symbols, Matrix, Transpose, simplify, diff, diag from sympy import sin from sympy import cos, acos from sympy import sqrt from sympy import exp from sympy import atan from numpy import pi writeEqn = True simpList = False if writeEqn: writeList = [] v, u_max = symbols('v, u_max') xb, yb = symbols('xb, yb') Dt, sigv, sigw, sigr = symbols('Dt, sigv, sigw, sigr') x, y, theta = symbols('x, y, theta') w = symbols('w') x_dot = v * cos(theta) y_dot = v * sin(theta) theta_dot = u_max * sin(w) writeList = [x_dot, y_dot, theta_dot] p11, p12, p13,\ p22, p23, \ p33 \ = symbols('p11 p12 p13\ p22 p23 \ p33') P = Matrix([[p11, p12, p13], [p12, p22, p23], [p13, p13, p33]]) F = Matrix([[diff(x_dot, x), diff(x_dot, y), diff(x_dot, theta)], [diff(y_dot, x), diff(y_dot, y), diff(y_dot, theta)], [diff(theta_dot, x), diff(theta_dot, y), diff(theta_dot, theta)],]) G = Matrix([[cos(theta), 0], [sin(theta), 0], [0, 1]]) h = sqrt((x - xb)**2 + (y - yb)**2) H = Matrix([[diff(h, x), diff(h, y), diff(h, theta)]]) Q = Dt*diag(sigv**2, sigw**2) R = Dt*diag(sigr**2) P_dot = (F*P + P*F.T - P*H.T*(R**-1)*H*P + G*Q*G.T) Dim = P_dot.shape k = symbols('k') PP = (F*P + P*F.T - k * P*H.T*(R**-1)*H*P + G*Q*G.T) obj = PP[1, 1] for i in range(0, Dim[0]): for j in range(i, Dim[1]): writeList.append(P_dot[i, j]) states = [x, y, theta, p11, p12, p13, p22, p23, p33] x_s, y_s, theta_s, \ p11_s, p12_s, p13_s, \ p22_s, p23_s, \ p33_s = \ symbols('x_s, y_s, theta_s, \ p11_s, p12_s, p13_s, \ p22_s, p23_s, \ p33_s') scales = [x_s, y_s, theta_s, p11_s, p12_s, p13_s, p22_s, p23_s, p33_s] x_n, y_n, theta_n, \ p11_n, p12_n, p13_n, \ p22_n, p23_n, \ p33_n = \ symbols('x_n, y_n, theta_n, \ p11_n, p12_n, p13_n, \ p22_n, p23_n, \ p33_n') states_new = [x_n, y_n, theta_n, p11_n, p12_n, p13_n, p22_n, p23_n, p33_n] Z1 = zip(writeList, scales) scaledList = [] for item, Scale in Z1: item = item/Scale Z2 = zip(states, states_new, scales) for state, new, scale in Z2: item = item.subs(state, scale*new) scaledList.append(item) Z2 = zip(states, states_new, scales) for state, new, scale in Z2: obj = obj.subs(state, scale * new) k = 1 with open("eqns.txt", "w") as my_file: for item in scaledList: if simpList: item = simplify(item) my_file.write(str(item) + "\n") k += 1 k = 1 with open("eqnsUnscaled.txt", "w") as my_file: for item in writeList: my_file.write(str(item) + "\n") k += 1 with open("eqns.txt", "r") as f: eqnsList = list(f) problem = beluga.optim.Problem('carts0') problem.independent('t', 's') problem\ .state('x_n', eqnsList[0] + '+ ep*u_max*cos(w)', '1') \ .state('y_n', eqnsList[1], '1') \ .state('theta_n', eqnsList[2], '1') \ .state('p11_n', eqnsList[3], '1') \ .state('p12_n', eqnsList[4], '1') \ .state('p13_n', eqnsList[5], '1') \ .state('p22_n', eqnsList[6], '1') \ .state('p23_n', eqnsList[7], '1') \ .state('p33_n', eqnsList[8], '1') \ problem.control('w', '1') \ problem.cost['path'] = Expression(str(obj), 's') problem.constraints() \ .initial('x_n-x_n_0', '1') \ .initial('y_n-y_n_0', '1') \ .initial('theta_n-theta_n_0', '1') \ \ .initial('p11_n-p11_n_0', '1') \ .initial('p12_n-p12_n_0', '1') \ .initial('p13_n-p13_n_0', '1') \ .initial('p22_n-p22_n_0', '1') \ .initial('p23_n-p23_n_0', '1') \ .initial('p33_n-p33_n_0', '1') \ \ .terminal('x_n-x_n_f', '1') \ .terminal('y_n-y_n_f', '1') \ \ problem.constant('Dt', 0.1, '1') problem.constant('sigv', 0.1, '1') problem.constant('sigw', 0.1, '1') problem.constant('sigr', 0.1, '1') problem.constant('xb', 5, '1') problem.constant('yb', 5, '1') problem.constant('u_max', 0.1, '1') problem.constant('v', 30, '1') problem.constant('x_s', 1, '1') problem.constant('y_s', 1, '1') problem.constant('theta_s', 1, '1') problem.constant('p11_s', 1e-3, '1') problem.constant('p12_s', 1e-3, '1') problem.constant('p13_s', 1e-3, '1') problem.constant('p22_s', 1e-1, '1') problem.constant('p23_s', 1e-2, '1') problem.constant('p33_s', 1e-3, '1') problem.constant('ep', 5, '1') problem.constant('k', 0, '1') problem.bvp_solver = algorithms.MultipleShooting(derivative_method='fd', tolerance=1e-4, max_iterations=1000, verbose=True, cached=False, number_arcs=64) problem.scale.unit('m', 1) \ .unit('s', 1) \ .unit('kg', 1) \ .unit('rad', 1) problem.guess.setup('auto', start=[0, 0, 0, 0, 0, 0, 0, 0, 0], time_integrate=1, costate_guess=[0, 0, 0.001, -0.0001, 0.0, 0.0, 0.001, 0.0, 0.]) problem.steps.add_step().num_cases(5) \ .terminal('x_n', 20) \ .terminal('y_n', 0) \ problem.steps.add_step().num_cases(10) \ .const('xb', 200) \ .const('yb', 600) \ problem.steps.add_step().num_cases(80) \ .terminal('x_n', 4000) \ problem.steps.add_step().num_cases(20) \ .const('k', 1) \ Beluga.run(problem, display_level=logging.DEBUG)
true
true
f701263075037165544928938c50df33a6f178da
881
py
Python
src/airbnb_priceforecaster/manage.py
andersbogsnes/airbnb_priceforecaster
f397c16a08fe7eba9977611f4af5352d234a4624
[ "MIT" ]
null
null
null
src/airbnb_priceforecaster/manage.py
andersbogsnes/airbnb_priceforecaster
f397c16a08fe7eba9977611f4af5352d234a4624
[ "MIT" ]
null
null
null
src/airbnb_priceforecaster/manage.py
andersbogsnes/airbnb_priceforecaster
f397c16a08fe7eba9977611f4af5352d234a4624
[ "MIT" ]
null
null
null
from airbnb_priceforecaster.models import train_model from airbnb_priceforecaster.models import build_model from airbnb_priceforecaster.data import AirBnBDataset import click @click.group() def cli(): pass @cli.command() @click.option("-y", "--year", default=2020, type=int) @click.option("-m", "--month", default=5, type=int) @click.option("-d", "--day", default=30, type=int) def train(year, month, day): result = train_model(year, month, day) click.echo(result) @cli.command() @click.option("-y", "--year", default=2020, type=int) @click.option("-m", "--month", default=5, type=int) @click.option("-d", "--day", default=30, type=int) def prod(year, month, day): dataset = AirBnBDataset(year=year, month=month, day=day) model = build_model() model.train_estimator(dataset) model.save_estimator(prod=True) if __name__ == '__main__': cli()
25.171429
60
0.692395
from airbnb_priceforecaster.models import train_model from airbnb_priceforecaster.models import build_model from airbnb_priceforecaster.data import AirBnBDataset import click @click.group() def cli(): pass @cli.command() @click.option("-y", "--year", default=2020, type=int) @click.option("-m", "--month", default=5, type=int) @click.option("-d", "--day", default=30, type=int) def train(year, month, day): result = train_model(year, month, day) click.echo(result) @cli.command() @click.option("-y", "--year", default=2020, type=int) @click.option("-m", "--month", default=5, type=int) @click.option("-d", "--day", default=30, type=int) def prod(year, month, day): dataset = AirBnBDataset(year=year, month=month, day=day) model = build_model() model.train_estimator(dataset) model.save_estimator(prod=True) if __name__ == '__main__': cli()
true
true
f7012653b3bd41290d7516ca434157dbd84824a7
69,373
py
Python
sympy/functions/elementary/tests/test_trigonometric.py
MarcPartensky/sympy
ca53aee92788e81958fae1ce53a9d9e58bbdadaa
[ "BSD-3-Clause" ]
null
null
null
sympy/functions/elementary/tests/test_trigonometric.py
MarcPartensky/sympy
ca53aee92788e81958fae1ce53a9d9e58bbdadaa
[ "BSD-3-Clause" ]
null
null
null
sympy/functions/elementary/tests/test_trigonometric.py
MarcPartensky/sympy
ca53aee92788e81958fae1ce53a9d9e58bbdadaa
[ "BSD-3-Clause" ]
null
null
null
from sympy import (symbols, Symbol, nan, oo, zoo, I, sinh, sin, pi, atan, acos, Rational, sqrt, asin, acot, coth, E, S, tan, tanh, cos, cosh, atan2, exp, log, asinh, acoth, atanh, O, cancel, Matrix, re, im, Float, Pow, gcd, sec, csc, cot, diff, simplify, Heaviside, arg, conjugate, series, FiniteSet, asec, acsc, Mul, sinc, jn, AccumBounds, Interval, ImageSet, Lambda, besselj) from sympy.core.compatibility import range from sympy.core.expr import unchanged from sympy.core.function import ArgumentIndexError from sympy.core.relational import Ne, Eq from sympy.functions.elementary.piecewise import Piecewise from sympy.sets.setexpr import SetExpr from sympy.utilities.pytest import XFAIL, slow, raises x, y, z = symbols('x y z') r = Symbol('r', real=True) k = Symbol('k', integer=True) p = Symbol('p', positive=True) n = Symbol('n', negative=True) np = Symbol('p', nonpositive=True) nn = Symbol('n', nonnegative=True) nz = Symbol('nz', nonzero=True) ep = Symbol('ep', extended_positive=True) en = Symbol('en', extended_negative=True) enp = Symbol('ep', extended_nonpositive=True) enn = Symbol('en', extended_nonnegative=True) enz = Symbol('enz', extended_nonzero=True) a = Symbol('a', algebraic=True) na = Symbol('na', nonzero=True, algebraic=True) def test_sin(): x, y = symbols('x y') assert sin.nargs == FiniteSet(1) assert sin(nan) is nan assert sin(zoo) is nan assert sin(oo) == AccumBounds(-1, 1) assert sin(oo) - sin(oo) == AccumBounds(-2, 2) assert sin(oo*I) == oo*I assert sin(-oo*I) == -oo*I assert 0*sin(oo) is S.Zero assert 0/sin(oo) is S.Zero assert 0 + sin(oo) == AccumBounds(-1, 1) assert 5 + sin(oo) == AccumBounds(4, 6) assert sin(0) == 0 assert sin(asin(x)) == x assert sin(atan(x)) == x / sqrt(1 + x**2) assert sin(acos(x)) == sqrt(1 - x**2) assert sin(acot(x)) == 1 / (sqrt(1 + 1 / x**2) * x) assert sin(acsc(x)) == 1 / x assert sin(asec(x)) == sqrt(1 - 1 / x**2) assert sin(atan2(y, x)) == y / sqrt(x**2 + y**2) assert sin(pi*I) == sinh(pi)*I assert sin(-pi*I) == -sinh(pi)*I assert sin(-2*I) == -sinh(2)*I assert sin(pi) == 0 assert sin(-pi) == 0 assert sin(2*pi) == 0 assert sin(-2*pi) == 0 assert sin(-3*10**73*pi) == 0 assert sin(7*10**103*pi) == 0 assert sin(pi/2) == 1 assert sin(-pi/2) == -1 assert sin(pi*Rational(5, 2)) == 1 assert sin(pi*Rational(7, 2)) == -1 ne = symbols('ne', integer=True, even=False) e = symbols('e', even=True) assert sin(pi*ne/2) == (-1)**(ne/2 - S.Half) assert sin(pi*k/2).func == sin assert sin(pi*e/2) == 0 assert sin(pi*k) == 0 assert sin(pi*k).subs(k, 3) == sin(pi*k/2).subs(k, 6) # issue 8298 assert sin(pi/3) == S.Half*sqrt(3) assert sin(pi*Rational(-2, 3)) == Rational(-1, 2)*sqrt(3) assert sin(pi/4) == S.Half*sqrt(2) assert sin(-pi/4) == Rational(-1, 2)*sqrt(2) assert sin(pi*Rational(17, 4)) == S.Half*sqrt(2) assert sin(pi*Rational(-3, 4)) == Rational(-1, 2)*sqrt(2) assert sin(pi/6) == S.Half assert sin(-pi/6) == Rational(-1, 2) assert sin(pi*Rational(7, 6)) == Rational(-1, 2) assert sin(pi*Rational(-5, 6)) == Rational(-1, 2) assert sin(pi*Rational(1, 5)) == sqrt((5 - sqrt(5)) / 8) assert sin(pi*Rational(2, 5)) == sqrt((5 + sqrt(5)) / 8) assert sin(pi*Rational(3, 5)) == sin(pi*Rational(2, 5)) assert sin(pi*Rational(4, 5)) == sin(pi*Rational(1, 5)) assert sin(pi*Rational(6, 5)) == -sin(pi*Rational(1, 5)) assert sin(pi*Rational(8, 5)) == -sin(pi*Rational(2, 5)) assert sin(pi*Rational(-1273, 5)) == -sin(pi*Rational(2, 5)) assert sin(pi/8) == sqrt((2 - sqrt(2))/4) assert sin(pi/10) == Rational(-1, 4) + sqrt(5)/4 assert sin(pi/12) == -sqrt(2)/4 + sqrt(6)/4 assert sin(pi*Rational(5, 12)) == sqrt(2)/4 + sqrt(6)/4 assert sin(pi*Rational(-7, 12)) == -sqrt(2)/4 - sqrt(6)/4 assert sin(pi*Rational(-11, 12)) == sqrt(2)/4 - sqrt(6)/4 assert sin(pi*Rational(104, 105)) == sin(pi/105) assert sin(pi*Rational(106, 105)) == -sin(pi/105) assert sin(pi*Rational(-104, 105)) == -sin(pi/105) assert sin(pi*Rational(-106, 105)) == sin(pi/105) assert sin(x*I) == sinh(x)*I assert sin(k*pi) == 0 assert sin(17*k*pi) == 0 assert sin(k*pi*I) == sinh(k*pi)*I assert sin(r).is_real is True assert sin(0, evaluate=False).is_algebraic assert sin(a).is_algebraic is None assert sin(na).is_algebraic is False q = Symbol('q', rational=True) assert sin(pi*q).is_algebraic qn = Symbol('qn', rational=True, nonzero=True) assert sin(qn).is_rational is False assert sin(q).is_rational is None # issue 8653 assert isinstance(sin( re(x) - im(y)), sin) is True assert isinstance(sin(-re(x) + im(y)), sin) is False assert sin(SetExpr(Interval(0, 1))) == SetExpr(ImageSet(Lambda(x, sin(x)), Interval(0, 1))) for d in list(range(1, 22)) + [60, 85]: for n in range(0, d*2 + 1): x = n*pi/d e = abs( float(sin(x)) - sin(float(x)) ) assert e < 1e-12 def test_sin_cos(): for d in [1, 2, 3, 4, 5, 6, 10, 12, 15, 20, 24, 30, 40, 60, 120]: # list is not exhaustive... for n in range(-2*d, d*2): x = n*pi/d assert sin(x + pi/2) == cos(x), "fails for %d*pi/%d" % (n, d) assert sin(x - pi/2) == -cos(x), "fails for %d*pi/%d" % (n, d) assert sin(x) == cos(x - pi/2), "fails for %d*pi/%d" % (n, d) assert -sin(x) == cos(x + pi/2), "fails for %d*pi/%d" % (n, d) def test_sin_series(): assert sin(x).series(x, 0, 9) == \ x - x**3/6 + x**5/120 - x**7/5040 + O(x**9) def test_sin_rewrite(): assert sin(x).rewrite(exp) == -I*(exp(I*x) - exp(-I*x))/2 assert sin(x).rewrite(tan) == 2*tan(x/2)/(1 + tan(x/2)**2) assert sin(x).rewrite(cot) == 2*cot(x/2)/(1 + cot(x/2)**2) assert sin(sinh(x)).rewrite( exp).subs(x, 3).n() == sin(x).rewrite(exp).subs(x, sinh(3)).n() assert sin(cosh(x)).rewrite( exp).subs(x, 3).n() == sin(x).rewrite(exp).subs(x, cosh(3)).n() assert sin(tanh(x)).rewrite( exp).subs(x, 3).n() == sin(x).rewrite(exp).subs(x, tanh(3)).n() assert sin(coth(x)).rewrite( exp).subs(x, 3).n() == sin(x).rewrite(exp).subs(x, coth(3)).n() assert sin(sin(x)).rewrite( exp).subs(x, 3).n() == sin(x).rewrite(exp).subs(x, sin(3)).n() assert sin(cos(x)).rewrite( exp).subs(x, 3).n() == sin(x).rewrite(exp).subs(x, cos(3)).n() assert sin(tan(x)).rewrite( exp).subs(x, 3).n() == sin(x).rewrite(exp).subs(x, tan(3)).n() assert sin(cot(x)).rewrite( exp).subs(x, 3).n() == sin(x).rewrite(exp).subs(x, cot(3)).n() assert sin(log(x)).rewrite(Pow) == I*x**-I / 2 - I*x**I /2 assert sin(x).rewrite(csc) == 1/csc(x) assert sin(x).rewrite(cos) == cos(x - pi / 2, evaluate=False) assert sin(x).rewrite(sec) == 1 / sec(x - pi / 2, evaluate=False) assert sin(cos(x)).rewrite(Pow) == sin(cos(x)) def test_sin_expansion(): # Note: these formulas are not unique. The ones here come from the # Chebyshev formulas. assert sin(x + y).expand(trig=True) == sin(x)*cos(y) + cos(x)*sin(y) assert sin(x - y).expand(trig=True) == sin(x)*cos(y) - cos(x)*sin(y) assert sin(y - x).expand(trig=True) == cos(x)*sin(y) - sin(x)*cos(y) assert sin(2*x).expand(trig=True) == 2*sin(x)*cos(x) assert sin(3*x).expand(trig=True) == -4*sin(x)**3 + 3*sin(x) assert sin(4*x).expand(trig=True) == -8*sin(x)**3*cos(x) + 4*sin(x)*cos(x) assert sin(2).expand(trig=True) == 2*sin(1)*cos(1) assert sin(3).expand(trig=True) == -4*sin(1)**3 + 3*sin(1) def test_sin_AccumBounds(): assert sin(AccumBounds(-oo, oo)) == AccumBounds(-1, 1) assert sin(AccumBounds(0, oo)) == AccumBounds(-1, 1) assert sin(AccumBounds(-oo, 0)) == AccumBounds(-1, 1) assert sin(AccumBounds(0, 2*S.Pi)) == AccumBounds(-1, 1) assert sin(AccumBounds(0, S.Pi*Rational(3, 4))) == AccumBounds(0, 1) assert sin(AccumBounds(S.Pi*Rational(3, 4), S.Pi*Rational(7, 4))) == AccumBounds(-1, sin(S.Pi*Rational(3, 4))) assert sin(AccumBounds(S.Pi/4, S.Pi/3)) == AccumBounds(sin(S.Pi/4), sin(S.Pi/3)) assert sin(AccumBounds(S.Pi*Rational(3, 4), S.Pi*Rational(5, 6))) == AccumBounds(sin(S.Pi*Rational(5, 6)), sin(S.Pi*Rational(3, 4))) def test_sin_fdiff(): assert sin(x).fdiff() == cos(x) raises(ArgumentIndexError, lambda: sin(x).fdiff(2)) def test_trig_symmetry(): assert sin(-x) == -sin(x) assert cos(-x) == cos(x) assert tan(-x) == -tan(x) assert cot(-x) == -cot(x) assert sin(x + pi) == -sin(x) assert sin(x + 2*pi) == sin(x) assert sin(x + 3*pi) == -sin(x) assert sin(x + 4*pi) == sin(x) assert sin(x - 5*pi) == -sin(x) assert cos(x + pi) == -cos(x) assert cos(x + 2*pi) == cos(x) assert cos(x + 3*pi) == -cos(x) assert cos(x + 4*pi) == cos(x) assert cos(x - 5*pi) == -cos(x) assert tan(x + pi) == tan(x) assert tan(x - 3*pi) == tan(x) assert cot(x + pi) == cot(x) assert cot(x - 3*pi) == cot(x) assert sin(pi/2 - x) == cos(x) assert sin(pi*Rational(3, 2) - x) == -cos(x) assert sin(pi*Rational(5, 2) - x) == cos(x) assert cos(pi/2 - x) == sin(x) assert cos(pi*Rational(3, 2) - x) == -sin(x) assert cos(pi*Rational(5, 2) - x) == sin(x) assert tan(pi/2 - x) == cot(x) assert tan(pi*Rational(3, 2) - x) == cot(x) assert tan(pi*Rational(5, 2) - x) == cot(x) assert cot(pi/2 - x) == tan(x) assert cot(pi*Rational(3, 2) - x) == tan(x) assert cot(pi*Rational(5, 2) - x) == tan(x) assert sin(pi/2 + x) == cos(x) assert cos(pi/2 + x) == -sin(x) assert tan(pi/2 + x) == -cot(x) assert cot(pi/2 + x) == -tan(x) def test_cos(): x, y = symbols('x y') assert cos.nargs == FiniteSet(1) assert cos(nan) is nan assert cos(oo) == AccumBounds(-1, 1) assert cos(oo) - cos(oo) == AccumBounds(-2, 2) assert cos(oo*I) is oo assert cos(-oo*I) is oo assert cos(zoo) is nan assert cos(0) == 1 assert cos(acos(x)) == x assert cos(atan(x)) == 1 / sqrt(1 + x**2) assert cos(asin(x)) == sqrt(1 - x**2) assert cos(acot(x)) == 1 / sqrt(1 + 1 / x**2) assert cos(acsc(x)) == sqrt(1 - 1 / x**2) assert cos(asec(x)) == 1 / x assert cos(atan2(y, x)) == x / sqrt(x**2 + y**2) assert cos(pi*I) == cosh(pi) assert cos(-pi*I) == cosh(pi) assert cos(-2*I) == cosh(2) assert cos(pi/2) == 0 assert cos(-pi/2) == 0 assert cos(pi/2) == 0 assert cos(-pi/2) == 0 assert cos((-3*10**73 + 1)*pi/2) == 0 assert cos((7*10**103 + 1)*pi/2) == 0 n = symbols('n', integer=True, even=False) e = symbols('e', even=True) assert cos(pi*n/2) == 0 assert cos(pi*e/2) == (-1)**(e/2) assert cos(pi) == -1 assert cos(-pi) == -1 assert cos(2*pi) == 1 assert cos(5*pi) == -1 assert cos(8*pi) == 1 assert cos(pi/3) == S.Half assert cos(pi*Rational(-2, 3)) == Rational(-1, 2) assert cos(pi/4) == S.Half*sqrt(2) assert cos(-pi/4) == S.Half*sqrt(2) assert cos(pi*Rational(11, 4)) == Rational(-1, 2)*sqrt(2) assert cos(pi*Rational(-3, 4)) == Rational(-1, 2)*sqrt(2) assert cos(pi/6) == S.Half*sqrt(3) assert cos(-pi/6) == S.Half*sqrt(3) assert cos(pi*Rational(7, 6)) == Rational(-1, 2)*sqrt(3) assert cos(pi*Rational(-5, 6)) == Rational(-1, 2)*sqrt(3) assert cos(pi*Rational(1, 5)) == (sqrt(5) + 1)/4 assert cos(pi*Rational(2, 5)) == (sqrt(5) - 1)/4 assert cos(pi*Rational(3, 5)) == -cos(pi*Rational(2, 5)) assert cos(pi*Rational(4, 5)) == -cos(pi*Rational(1, 5)) assert cos(pi*Rational(6, 5)) == -cos(pi*Rational(1, 5)) assert cos(pi*Rational(8, 5)) == cos(pi*Rational(2, 5)) assert cos(pi*Rational(-1273, 5)) == -cos(pi*Rational(2, 5)) assert cos(pi/8) == sqrt((2 + sqrt(2))/4) assert cos(pi/12) == sqrt(2)/4 + sqrt(6)/4 assert cos(pi*Rational(5, 12)) == -sqrt(2)/4 + sqrt(6)/4 assert cos(pi*Rational(7, 12)) == sqrt(2)/4 - sqrt(6)/4 assert cos(pi*Rational(11, 12)) == -sqrt(2)/4 - sqrt(6)/4 assert cos(pi*Rational(104, 105)) == -cos(pi/105) assert cos(pi*Rational(106, 105)) == -cos(pi/105) assert cos(pi*Rational(-104, 105)) == -cos(pi/105) assert cos(pi*Rational(-106, 105)) == -cos(pi/105) assert cos(x*I) == cosh(x) assert cos(k*pi*I) == cosh(k*pi) assert cos(r).is_real is True assert cos(0, evaluate=False).is_algebraic assert cos(a).is_algebraic is None assert cos(na).is_algebraic is False q = Symbol('q', rational=True) assert cos(pi*q).is_algebraic assert cos(pi*Rational(2, 7)).is_algebraic assert cos(k*pi) == (-1)**k assert cos(2*k*pi) == 1 for d in list(range(1, 22)) + [60, 85]: for n in range(0, 2*d + 1): x = n*pi/d e = abs( float(cos(x)) - cos(float(x)) ) assert e < 1e-12 def test_issue_6190(): c = Float('123456789012345678901234567890.25', '') for cls in [sin, cos, tan, cot]: assert cls(c*pi) == cls(pi/4) assert cls(4.125*pi) == cls(pi/8) assert cls(4.7*pi) == cls((4.7 % 2)*pi) def test_cos_series(): assert cos(x).series(x, 0, 9) == \ 1 - x**2/2 + x**4/24 - x**6/720 + x**8/40320 + O(x**9) def test_cos_rewrite(): assert cos(x).rewrite(exp) == exp(I*x)/2 + exp(-I*x)/2 assert cos(x).rewrite(tan) == (1 - tan(x/2)**2)/(1 + tan(x/2)**2) assert cos(x).rewrite(cot) == -(1 - cot(x/2)**2)/(1 + cot(x/2)**2) assert cos(sinh(x)).rewrite( exp).subs(x, 3).n() == cos(x).rewrite(exp).subs(x, sinh(3)).n() assert cos(cosh(x)).rewrite( exp).subs(x, 3).n() == cos(x).rewrite(exp).subs(x, cosh(3)).n() assert cos(tanh(x)).rewrite( exp).subs(x, 3).n() == cos(x).rewrite(exp).subs(x, tanh(3)).n() assert cos(coth(x)).rewrite( exp).subs(x, 3).n() == cos(x).rewrite(exp).subs(x, coth(3)).n() assert cos(sin(x)).rewrite( exp).subs(x, 3).n() == cos(x).rewrite(exp).subs(x, sin(3)).n() assert cos(cos(x)).rewrite( exp).subs(x, 3).n() == cos(x).rewrite(exp).subs(x, cos(3)).n() assert cos(tan(x)).rewrite( exp).subs(x, 3).n() == cos(x).rewrite(exp).subs(x, tan(3)).n() assert cos(cot(x)).rewrite( exp).subs(x, 3).n() == cos(x).rewrite(exp).subs(x, cot(3)).n() assert cos(log(x)).rewrite(Pow) == x**I/2 + x**-I/2 assert cos(x).rewrite(sec) == 1/sec(x) assert cos(x).rewrite(sin) == sin(x + pi/2, evaluate=False) assert cos(x).rewrite(csc) == 1/csc(-x + pi/2, evaluate=False) assert cos(sin(x)).rewrite(Pow) == cos(sin(x)) def test_cos_expansion(): assert cos(x + y).expand(trig=True) == cos(x)*cos(y) - sin(x)*sin(y) assert cos(x - y).expand(trig=True) == cos(x)*cos(y) + sin(x)*sin(y) assert cos(y - x).expand(trig=True) == cos(x)*cos(y) + sin(x)*sin(y) assert cos(2*x).expand(trig=True) == 2*cos(x)**2 - 1 assert cos(3*x).expand(trig=True) == 4*cos(x)**3 - 3*cos(x) assert cos(4*x).expand(trig=True) == 8*cos(x)**4 - 8*cos(x)**2 + 1 assert cos(2).expand(trig=True) == 2*cos(1)**2 - 1 assert cos(3).expand(trig=True) == 4*cos(1)**3 - 3*cos(1) def test_cos_AccumBounds(): assert cos(AccumBounds(-oo, oo)) == AccumBounds(-1, 1) assert cos(AccumBounds(0, oo)) == AccumBounds(-1, 1) assert cos(AccumBounds(-oo, 0)) == AccumBounds(-1, 1) assert cos(AccumBounds(0, 2*S.Pi)) == AccumBounds(-1, 1) assert cos(AccumBounds(-S.Pi/3, S.Pi/4)) == AccumBounds(cos(-S.Pi/3), 1) assert cos(AccumBounds(S.Pi*Rational(3, 4), S.Pi*Rational(5, 4))) == AccumBounds(-1, cos(S.Pi*Rational(3, 4))) assert cos(AccumBounds(S.Pi*Rational(5, 4), S.Pi*Rational(4, 3))) == AccumBounds(cos(S.Pi*Rational(5, 4)), cos(S.Pi*Rational(4, 3))) assert cos(AccumBounds(S.Pi/4, S.Pi/3)) == AccumBounds(cos(S.Pi/3), cos(S.Pi/4)) def test_cos_fdiff(): assert cos(x).fdiff() == -sin(x) raises(ArgumentIndexError, lambda: cos(x).fdiff(2)) def test_tan(): assert tan(nan) is nan assert tan(zoo) is nan assert tan(oo) == AccumBounds(-oo, oo) assert tan(oo) - tan(oo) == AccumBounds(-oo, oo) assert tan.nargs == FiniteSet(1) assert tan(oo*I) == I assert tan(-oo*I) == -I assert tan(0) == 0 assert tan(atan(x)) == x assert tan(asin(x)) == x / sqrt(1 - x**2) assert tan(acos(x)) == sqrt(1 - x**2) / x assert tan(acot(x)) == 1 / x assert tan(acsc(x)) == 1 / (sqrt(1 - 1 / x**2) * x) assert tan(asec(x)) == sqrt(1 - 1 / x**2) * x assert tan(atan2(y, x)) == y/x assert tan(pi*I) == tanh(pi)*I assert tan(-pi*I) == -tanh(pi)*I assert tan(-2*I) == -tanh(2)*I assert tan(pi) == 0 assert tan(-pi) == 0 assert tan(2*pi) == 0 assert tan(-2*pi) == 0 assert tan(-3*10**73*pi) == 0 assert tan(pi/2) is zoo assert tan(pi*Rational(3, 2)) is zoo assert tan(pi/3) == sqrt(3) assert tan(pi*Rational(-2, 3)) == sqrt(3) assert tan(pi/4) is S.One assert tan(-pi/4) is S.NegativeOne assert tan(pi*Rational(17, 4)) is S.One assert tan(pi*Rational(-3, 4)) is S.One assert tan(pi/5) == sqrt(5 - 2*sqrt(5)) assert tan(pi*Rational(2, 5)) == sqrt(5 + 2*sqrt(5)) assert tan(pi*Rational(18, 5)) == -sqrt(5 + 2*sqrt(5)) assert tan(pi*Rational(-16, 5)) == -sqrt(5 - 2*sqrt(5)) assert tan(pi/6) == 1/sqrt(3) assert tan(-pi/6) == -1/sqrt(3) assert tan(pi*Rational(7, 6)) == 1/sqrt(3) assert tan(pi*Rational(-5, 6)) == 1/sqrt(3) assert tan(pi/8) == -1 + sqrt(2) assert tan(pi*Rational(3, 8)) == 1 + sqrt(2) # issue 15959 assert tan(pi*Rational(5, 8)) == -1 - sqrt(2) assert tan(pi*Rational(7, 8)) == 1 - sqrt(2) assert tan(pi/10) == sqrt(1 - 2*sqrt(5)/5) assert tan(pi*Rational(3, 10)) == sqrt(1 + 2*sqrt(5)/5) assert tan(pi*Rational(17, 10)) == -sqrt(1 + 2*sqrt(5)/5) assert tan(pi*Rational(-31, 10)) == -sqrt(1 - 2*sqrt(5)/5) assert tan(pi/12) == -sqrt(3) + 2 assert tan(pi*Rational(5, 12)) == sqrt(3) + 2 assert tan(pi*Rational(7, 12)) == -sqrt(3) - 2 assert tan(pi*Rational(11, 12)) == sqrt(3) - 2 assert tan(pi/24).radsimp() == -2 - sqrt(3) + sqrt(2) + sqrt(6) assert tan(pi*Rational(5, 24)).radsimp() == -2 + sqrt(3) - sqrt(2) + sqrt(6) assert tan(pi*Rational(7, 24)).radsimp() == 2 - sqrt(3) - sqrt(2) + sqrt(6) assert tan(pi*Rational(11, 24)).radsimp() == 2 + sqrt(3) + sqrt(2) + sqrt(6) assert tan(pi*Rational(13, 24)).radsimp() == -2 - sqrt(3) - sqrt(2) - sqrt(6) assert tan(pi*Rational(17, 24)).radsimp() == -2 + sqrt(3) + sqrt(2) - sqrt(6) assert tan(pi*Rational(19, 24)).radsimp() == 2 - sqrt(3) + sqrt(2) - sqrt(6) assert tan(pi*Rational(23, 24)).radsimp() == 2 + sqrt(3) - sqrt(2) - sqrt(6) assert tan(x*I) == tanh(x)*I assert tan(k*pi) == 0 assert tan(17*k*pi) == 0 assert tan(k*pi*I) == tanh(k*pi)*I assert tan(r).is_real is None assert tan(r).is_extended_real is True assert tan(0, evaluate=False).is_algebraic assert tan(a).is_algebraic is None assert tan(na).is_algebraic is False assert tan(pi*Rational(10, 7)) == tan(pi*Rational(3, 7)) assert tan(pi*Rational(11, 7)) == -tan(pi*Rational(3, 7)) assert tan(pi*Rational(-11, 7)) == tan(pi*Rational(3, 7)) assert tan(pi*Rational(15, 14)) == tan(pi/14) assert tan(pi*Rational(-15, 14)) == -tan(pi/14) assert tan(r).is_finite is None assert tan(I*r).is_finite is True def test_tan_series(): assert tan(x).series(x, 0, 9) == \ x + x**3/3 + 2*x**5/15 + 17*x**7/315 + O(x**9) def test_tan_rewrite(): neg_exp, pos_exp = exp(-x*I), exp(x*I) assert tan(x).rewrite(exp) == I*(neg_exp - pos_exp)/(neg_exp + pos_exp) assert tan(x).rewrite(sin) == 2*sin(x)**2/sin(2*x) assert tan(x).rewrite(cos) == cos(x - S.Pi/2, evaluate=False)/cos(x) assert tan(x).rewrite(cot) == 1/cot(x) assert tan(sinh(x)).rewrite( exp).subs(x, 3).n() == tan(x).rewrite(exp).subs(x, sinh(3)).n() assert tan(cosh(x)).rewrite( exp).subs(x, 3).n() == tan(x).rewrite(exp).subs(x, cosh(3)).n() assert tan(tanh(x)).rewrite( exp).subs(x, 3).n() == tan(x).rewrite(exp).subs(x, tanh(3)).n() assert tan(coth(x)).rewrite( exp).subs(x, 3).n() == tan(x).rewrite(exp).subs(x, coth(3)).n() assert tan(sin(x)).rewrite( exp).subs(x, 3).n() == tan(x).rewrite(exp).subs(x, sin(3)).n() assert tan(cos(x)).rewrite( exp).subs(x, 3).n() == tan(x).rewrite(exp).subs(x, cos(3)).n() assert tan(tan(x)).rewrite( exp).subs(x, 3).n() == tan(x).rewrite(exp).subs(x, tan(3)).n() assert tan(cot(x)).rewrite( exp).subs(x, 3).n() == tan(x).rewrite(exp).subs(x, cot(3)).n() assert tan(log(x)).rewrite(Pow) == I*(x**-I - x**I)/(x**-I + x**I) assert 0 == (cos(pi/34)*tan(pi/34) - sin(pi/34)).rewrite(pow) assert 0 == (cos(pi/17)*tan(pi/17) - sin(pi/17)).rewrite(pow) assert tan(pi/19).rewrite(pow) == tan(pi/19) assert tan(pi*Rational(8, 19)).rewrite(sqrt) == tan(pi*Rational(8, 19)) assert tan(x).rewrite(sec) == sec(x)/sec(x - pi/2, evaluate=False) assert tan(x).rewrite(csc) == csc(-x + pi/2, evaluate=False)/csc(x) assert tan(sin(x)).rewrite(Pow) == tan(sin(x)) assert tan(pi*Rational(2, 5), evaluate=False).rewrite(sqrt) == sqrt(sqrt(5)/8 + Rational(5, 8))/(Rational(-1, 4) + sqrt(5)/4) def test_tan_subs(): assert tan(x).subs(tan(x), y) == y assert tan(x).subs(x, y) == tan(y) assert tan(x).subs(x, S.Pi/2) is zoo assert tan(x).subs(x, S.Pi*Rational(3, 2)) is zoo def test_tan_expansion(): assert tan(x + y).expand(trig=True) == ((tan(x) + tan(y))/(1 - tan(x)*tan(y))).expand() assert tan(x - y).expand(trig=True) == ((tan(x) - tan(y))/(1 + tan(x)*tan(y))).expand() assert tan(x + y + z).expand(trig=True) == ( (tan(x) + tan(y) + tan(z) - tan(x)*tan(y)*tan(z))/ (1 - tan(x)*tan(y) - tan(x)*tan(z) - tan(y)*tan(z))).expand() assert 0 == tan(2*x).expand(trig=True).rewrite(tan).subs([(tan(x), Rational(1, 7))])*24 - 7 assert 0 == tan(3*x).expand(trig=True).rewrite(tan).subs([(tan(x), Rational(1, 5))])*55 - 37 assert 0 == tan(4*x - pi/4).expand(trig=True).rewrite(tan).subs([(tan(x), Rational(1, 5))])*239 - 1 def test_tan_AccumBounds(): assert tan(AccumBounds(-oo, oo)) == AccumBounds(-oo, oo) assert tan(AccumBounds(S.Pi/3, S.Pi*Rational(2, 3))) == AccumBounds(-oo, oo) assert tan(AccumBounds(S.Pi/6, S.Pi/3)) == AccumBounds(tan(S.Pi/6), tan(S.Pi/3)) def test_tan_fdiff(): assert tan(x).fdiff() == tan(x)**2 + 1 raises(ArgumentIndexError, lambda: tan(x).fdiff(2)) def test_cot(): assert cot(nan) is nan assert cot.nargs == FiniteSet(1) assert cot(oo*I) == -I assert cot(-oo*I) == I assert cot(zoo) is nan assert cot(0) is zoo assert cot(2*pi) is zoo assert cot(acot(x)) == x assert cot(atan(x)) == 1 / x assert cot(asin(x)) == sqrt(1 - x**2) / x assert cot(acos(x)) == x / sqrt(1 - x**2) assert cot(acsc(x)) == sqrt(1 - 1 / x**2) * x assert cot(asec(x)) == 1 / (sqrt(1 - 1 / x**2) * x) assert cot(atan2(y, x)) == x/y assert cot(pi*I) == -coth(pi)*I assert cot(-pi*I) == coth(pi)*I assert cot(-2*I) == coth(2)*I assert cot(pi) == cot(2*pi) == cot(3*pi) assert cot(-pi) == cot(-2*pi) == cot(-3*pi) assert cot(pi/2) == 0 assert cot(-pi/2) == 0 assert cot(pi*Rational(5, 2)) == 0 assert cot(pi*Rational(7, 2)) == 0 assert cot(pi/3) == 1/sqrt(3) assert cot(pi*Rational(-2, 3)) == 1/sqrt(3) assert cot(pi/4) is S.One assert cot(-pi/4) is S.NegativeOne assert cot(pi*Rational(17, 4)) is S.One assert cot(pi*Rational(-3, 4)) is S.One assert cot(pi/6) == sqrt(3) assert cot(-pi/6) == -sqrt(3) assert cot(pi*Rational(7, 6)) == sqrt(3) assert cot(pi*Rational(-5, 6)) == sqrt(3) assert cot(pi/8) == 1 + sqrt(2) assert cot(pi*Rational(3, 8)) == -1 + sqrt(2) assert cot(pi*Rational(5, 8)) == 1 - sqrt(2) assert cot(pi*Rational(7, 8)) == -1 - sqrt(2) assert cot(pi/12) == sqrt(3) + 2 assert cot(pi*Rational(5, 12)) == -sqrt(3) + 2 assert cot(pi*Rational(7, 12)) == sqrt(3) - 2 assert cot(pi*Rational(11, 12)) == -sqrt(3) - 2 assert cot(pi/24).radsimp() == sqrt(2) + sqrt(3) + 2 + sqrt(6) assert cot(pi*Rational(5, 24)).radsimp() == -sqrt(2) - sqrt(3) + 2 + sqrt(6) assert cot(pi*Rational(7, 24)).radsimp() == -sqrt(2) + sqrt(3) - 2 + sqrt(6) assert cot(pi*Rational(11, 24)).radsimp() == sqrt(2) - sqrt(3) - 2 + sqrt(6) assert cot(pi*Rational(13, 24)).radsimp() == -sqrt(2) + sqrt(3) + 2 - sqrt(6) assert cot(pi*Rational(17, 24)).radsimp() == sqrt(2) - sqrt(3) + 2 - sqrt(6) assert cot(pi*Rational(19, 24)).radsimp() == sqrt(2) + sqrt(3) - 2 - sqrt(6) assert cot(pi*Rational(23, 24)).radsimp() == -sqrt(2) - sqrt(3) - 2 - sqrt(6) assert cot(x*I) == -coth(x)*I assert cot(k*pi*I) == -coth(k*pi)*I assert cot(r).is_real is None assert cot(r).is_extended_real is True assert cot(a).is_algebraic is None assert cot(na).is_algebraic is False assert cot(pi*Rational(10, 7)) == cot(pi*Rational(3, 7)) assert cot(pi*Rational(11, 7)) == -cot(pi*Rational(3, 7)) assert cot(pi*Rational(-11, 7)) == cot(pi*Rational(3, 7)) assert cot(pi*Rational(39, 34)) == cot(pi*Rational(5, 34)) assert cot(pi*Rational(-41, 34)) == -cot(pi*Rational(7, 34)) assert cot(x).is_finite is None assert cot(r).is_finite is None i = Symbol('i', imaginary=True) assert cot(i).is_finite is True assert cot(x).subs(x, 3*pi) is zoo def test_tan_cot_sin_cos_evalf(): assert abs((tan(pi*Rational(8, 15))*cos(pi*Rational(8, 15))/sin(pi*Rational(8, 15)) - 1).evalf()) < 1e-14 assert abs((cot(pi*Rational(4, 15))*sin(pi*Rational(4, 15))/cos(pi*Rational(4, 15)) - 1).evalf()) < 1e-14 @XFAIL def test_tan_cot_sin_cos_ratsimp(): assert 1 == (tan(pi*Rational(8, 15))*cos(pi*Rational(8, 15))/sin(pi*Rational(8, 15))).ratsimp() assert 1 == (cot(pi*Rational(4, 15))*sin(pi*Rational(4, 15))/cos(pi*Rational(4, 15))).ratsimp() def test_cot_series(): assert cot(x).series(x, 0, 9) == \ 1/x - x/3 - x**3/45 - 2*x**5/945 - x**7/4725 + O(x**9) # issue 6210 assert cot(x**4 + x**5).series(x, 0, 1) == \ x**(-4) - 1/x**3 + x**(-2) - 1/x + 1 + O(x) assert cot(pi*(1-x)).series(x, 0, 3) == -1/(pi*x) + pi*x/3 + O(x**3) assert cot(x).taylor_term(0, x) == 1/x assert cot(x).taylor_term(2, x) is S.Zero assert cot(x).taylor_term(3, x) == -x**3/45 def test_cot_rewrite(): neg_exp, pos_exp = exp(-x*I), exp(x*I) assert cot(x).rewrite(exp) == I*(pos_exp + neg_exp)/(pos_exp - neg_exp) assert cot(x).rewrite(sin) == sin(2*x)/(2*(sin(x)**2)) assert cot(x).rewrite(cos) == cos(x)/cos(x - pi/2, evaluate=False) assert cot(x).rewrite(tan) == 1/tan(x) assert cot(sinh(x)).rewrite( exp).subs(x, 3).n() == cot(x).rewrite(exp).subs(x, sinh(3)).n() assert cot(cosh(x)).rewrite( exp).subs(x, 3).n() == cot(x).rewrite(exp).subs(x, cosh(3)).n() assert cot(tanh(x)).rewrite( exp).subs(x, 3).n() == cot(x).rewrite(exp).subs(x, tanh(3)).n() assert cot(coth(x)).rewrite( exp).subs(x, 3).n() == cot(x).rewrite(exp).subs(x, coth(3)).n() assert cot(sin(x)).rewrite( exp).subs(x, 3).n() == cot(x).rewrite(exp).subs(x, sin(3)).n() assert cot(tan(x)).rewrite( exp).subs(x, 3).n() == cot(x).rewrite(exp).subs(x, tan(3)).n() assert cot(log(x)).rewrite(Pow) == -I*(x**-I + x**I)/(x**-I - x**I) assert cot(pi*Rational(4, 34)).rewrite(pow).ratsimp() == (cos(pi*Rational(4, 34))/sin(pi*Rational(4, 34))).rewrite(pow).ratsimp() assert cot(pi*Rational(4, 17)).rewrite(pow) == (cos(pi*Rational(4, 17))/sin(pi*Rational(4, 17))).rewrite(pow) assert cot(pi/19).rewrite(pow) == cot(pi/19) assert cot(pi/19).rewrite(sqrt) == cot(pi/19) assert cot(x).rewrite(sec) == sec(x - pi / 2, evaluate=False) / sec(x) assert cot(x).rewrite(csc) == csc(x) / csc(- x + pi / 2, evaluate=False) assert cot(sin(x)).rewrite(Pow) == cot(sin(x)) assert cot(pi*Rational(2, 5), evaluate=False).rewrite(sqrt) == (Rational(-1, 4) + sqrt(5)/4)/\ sqrt(sqrt(5)/8 + Rational(5, 8)) def test_cot_subs(): assert cot(x).subs(cot(x), y) == y assert cot(x).subs(x, y) == cot(y) assert cot(x).subs(x, 0) is zoo assert cot(x).subs(x, S.Pi) is zoo def test_cot_expansion(): assert cot(x + y).expand(trig=True) == ((cot(x)*cot(y) - 1)/(cot(x) + cot(y))).expand() assert cot(x - y).expand(trig=True) == (-(cot(x)*cot(y) + 1)/(cot(x) - cot(y))).expand() assert cot(x + y + z).expand(trig=True) == ( (cot(x)*cot(y)*cot(z) - cot(x) - cot(y) - cot(z))/ (-1 + cot(x)*cot(y) + cot(x)*cot(z) + cot(y)*cot(z))).expand() assert cot(3*x).expand(trig=True) == ((cot(x)**3 - 3*cot(x))/(3*cot(x)**2 - 1)).expand() assert 0 == cot(2*x).expand(trig=True).rewrite(cot).subs([(cot(x), Rational(1, 3))])*3 + 4 assert 0 == cot(3*x).expand(trig=True).rewrite(cot).subs([(cot(x), Rational(1, 5))])*55 - 37 assert 0 == cot(4*x - pi/4).expand(trig=True).rewrite(cot).subs([(cot(x), Rational(1, 7))])*863 + 191 def test_cot_AccumBounds(): assert cot(AccumBounds(-oo, oo)) == AccumBounds(-oo, oo) assert cot(AccumBounds(-S.Pi/3, S.Pi/3)) == AccumBounds(-oo, oo) assert cot(AccumBounds(S.Pi/6, S.Pi/3)) == AccumBounds(cot(S.Pi/3), cot(S.Pi/6)) def test_cot_fdiff(): assert cot(x).fdiff() == -cot(x)**2 - 1 raises(ArgumentIndexError, lambda: cot(x).fdiff(2)) def test_sinc(): assert isinstance(sinc(x), sinc) s = Symbol('s', zero=True) assert sinc(s) is S.One assert sinc(S.Infinity) is S.Zero assert sinc(S.NegativeInfinity) is S.Zero assert sinc(S.NaN) is S.NaN assert sinc(S.ComplexInfinity) is S.NaN n = Symbol('n', integer=True, nonzero=True) assert sinc(n*pi) is S.Zero assert sinc(-n*pi) is S.Zero assert sinc(pi/2) == 2 / pi assert sinc(-pi/2) == 2 / pi assert sinc(pi*Rational(5, 2)) == 2 / (5*pi) assert sinc(pi*Rational(7, 2)) == -2 / (7*pi) assert sinc(-x) == sinc(x) assert sinc(x).diff() == Piecewise(((x*cos(x) - sin(x)) / x**2, Ne(x, 0)), (0, True)) assert sinc(x).diff(x).equals(sinc(x).rewrite(sin).diff(x)) assert sinc(x).diff().subs(x, 0) is S.Zero assert sinc(x).series() == 1 - x**2/6 + x**4/120 + O(x**6) assert sinc(x).rewrite(jn) == jn(0, x) assert sinc(x).rewrite(sin) == Piecewise((sin(x)/x, Ne(x, 0)), (1, True)) def test_asin(): assert asin(nan) is nan assert asin.nargs == FiniteSet(1) assert asin(oo) == -I*oo assert asin(-oo) == I*oo assert asin(zoo) is zoo # Note: asin(-x) = - asin(x) assert asin(0) == 0 assert asin(1) == pi/2 assert asin(-1) == -pi/2 assert asin(sqrt(3)/2) == pi/3 assert asin(-sqrt(3)/2) == -pi/3 assert asin(sqrt(2)/2) == pi/4 assert asin(-sqrt(2)/2) == -pi/4 assert asin(sqrt((5 - sqrt(5))/8)) == pi/5 assert asin(-sqrt((5 - sqrt(5))/8)) == -pi/5 assert asin(S.Half) == pi/6 assert asin(Rational(-1, 2)) == -pi/6 assert asin((sqrt(2 - sqrt(2)))/2) == pi/8 assert asin(-(sqrt(2 - sqrt(2)))/2) == -pi/8 assert asin((sqrt(5) - 1)/4) == pi/10 assert asin(-(sqrt(5) - 1)/4) == -pi/10 assert asin((sqrt(3) - 1)/sqrt(2**3)) == pi/12 assert asin(-(sqrt(3) - 1)/sqrt(2**3)) == -pi/12 # check round-trip for exact values: for d in [5, 6, 8, 10, 12]: for n in range(-(d//2), d//2 + 1): if gcd(n, d) == 1: assert asin(sin(n*pi/d)) == n*pi/d assert asin(x).diff(x) == 1/sqrt(1 - x**2) assert asin(0.2).is_real is True assert asin(-2).is_real is False assert asin(r).is_real is None assert asin(-2*I) == -I*asinh(2) assert asin(Rational(1, 7), evaluate=False).is_positive is True assert asin(Rational(-1, 7), evaluate=False).is_positive is False assert asin(p).is_positive is None assert asin(sin(Rational(7, 2))) == Rational(-7, 2) + pi assert asin(sin(Rational(-7, 4))) == Rational(7, 4) - pi assert unchanged(asin, cos(x)) def test_asin_series(): assert asin(x).series(x, 0, 9) == \ x + x**3/6 + 3*x**5/40 + 5*x**7/112 + O(x**9) t5 = asin(x).taylor_term(5, x) assert t5 == 3*x**5/40 assert asin(x).taylor_term(7, x, t5, 0) == 5*x**7/112 def test_asin_rewrite(): assert asin(x).rewrite(log) == -I*log(I*x + sqrt(1 - x**2)) assert asin(x).rewrite(atan) == 2*atan(x/(1 + sqrt(1 - x**2))) assert asin(x).rewrite(acos) == S.Pi/2 - acos(x) assert asin(x).rewrite(acot) == 2*acot((sqrt(-x**2 + 1) + 1)/x) assert asin(x).rewrite(asec) == -asec(1/x) + pi/2 assert asin(x).rewrite(acsc) == acsc(1/x) def test_asin_fdiff(): assert asin(x).fdiff() == 1/sqrt(1 - x**2) raises(ArgumentIndexError, lambda: asin(x).fdiff(2)) def test_acos(): assert acos(nan) is nan assert acos(zoo) is zoo assert acos.nargs == FiniteSet(1) assert acos(oo) == I*oo assert acos(-oo) == -I*oo # Note: acos(-x) = pi - acos(x) assert acos(0) == pi/2 assert acos(S.Half) == pi/3 assert acos(Rational(-1, 2)) == pi*Rational(2, 3) assert acos(1) == 0 assert acos(-1) == pi assert acos(sqrt(2)/2) == pi/4 assert acos(-sqrt(2)/2) == pi*Rational(3, 4) # check round-trip for exact values: for d in [5, 6, 8, 10, 12]: for num in range(d): if gcd(num, d) == 1: assert acos(cos(num*pi/d)) == num*pi/d assert acos(2*I) == pi/2 - asin(2*I) assert acos(x).diff(x) == -1/sqrt(1 - x**2) assert acos(0.2).is_real is True assert acos(-2).is_real is False assert acos(r).is_real is None assert acos(Rational(1, 7), evaluate=False).is_positive is True assert acos(Rational(-1, 7), evaluate=False).is_positive is True assert acos(Rational(3, 2), evaluate=False).is_positive is False assert acos(p).is_positive is None assert acos(2 + p).conjugate() != acos(10 + p) assert acos(-3 + n).conjugate() != acos(-3 + n) assert acos(Rational(1, 3)).conjugate() == acos(Rational(1, 3)) assert acos(Rational(-1, 3)).conjugate() == acos(Rational(-1, 3)) assert acos(p + n*I).conjugate() == acos(p - n*I) assert acos(z).conjugate() != acos(conjugate(z)) def test_acos_series(): assert acos(x).series(x, 0, 8) == \ pi/2 - x - x**3/6 - 3*x**5/40 - 5*x**7/112 + O(x**8) assert acos(x).series(x, 0, 8) == pi/2 - asin(x).series(x, 0, 8) t5 = acos(x).taylor_term(5, x) assert t5 == -3*x**5/40 assert acos(x).taylor_term(7, x, t5, 0) == -5*x**7/112 assert acos(x).taylor_term(0, x) == pi/2 assert acos(x).taylor_term(2, x) is S.Zero def test_acos_rewrite(): assert acos(x).rewrite(log) == pi/2 + I*log(I*x + sqrt(1 - x**2)) assert acos(x).rewrite(atan) == \ atan(sqrt(1 - x**2)/x) + (pi/2)*(1 - x*sqrt(1/x**2)) assert acos(0).rewrite(atan) == S.Pi/2 assert acos(0.5).rewrite(atan) == acos(0.5).rewrite(log) assert acos(x).rewrite(asin) == S.Pi/2 - asin(x) assert acos(x).rewrite(acot) == -2*acot((sqrt(-x**2 + 1) + 1)/x) + pi/2 assert acos(x).rewrite(asec) == asec(1/x) assert acos(x).rewrite(acsc) == -acsc(1/x) + pi/2 def test_acos_fdiff(): assert acos(x).fdiff() == -1/sqrt(1 - x**2) raises(ArgumentIndexError, lambda: acos(x).fdiff(2)) def test_atan(): assert atan(nan) is nan assert atan.nargs == FiniteSet(1) assert atan(oo) == pi/2 assert atan(-oo) == -pi/2 assert atan(zoo) == AccumBounds(-pi/2, pi/2) assert atan(0) == 0 assert atan(1) == pi/4 assert atan(sqrt(3)) == pi/3 assert atan(-(1 + sqrt(2))) == pi*Rational(-3, 8) assert atan(sqrt((5 - 2 * sqrt(5)))) == pi/5 assert atan(-sqrt(1 - 2 * sqrt(5)/ 5)) == -pi/10 assert atan(sqrt(1 + 2 * sqrt(5) / 5)) == pi*Rational(3, 10) assert atan(-2 + sqrt(3)) == -pi/12 assert atan(2 + sqrt(3)) == pi*Rational(5, 12) assert atan(-2 - sqrt(3)) == pi*Rational(-5, 12) # check round-trip for exact values: for d in [5, 6, 8, 10, 12]: for num in range(-(d//2), d//2 + 1): if gcd(num, d) == 1: assert atan(tan(num*pi/d)) == num*pi/d assert atan(oo) == pi/2 assert atan(x).diff(x) == 1/(1 + x**2) assert atan(r).is_real is True assert atan(-2*I) == -I*atanh(2) assert unchanged(atan, cot(x)) assert atan(cot(Rational(1, 4))) == Rational(-1, 4) + pi/2 assert acot(Rational(1, 4)).is_rational is False for s in (x, p, n, np, nn, nz, ep, en, enp, enn, enz): if s.is_real or s.is_extended_real is None: assert s.is_nonzero is atan(s).is_nonzero assert s.is_positive is atan(s).is_positive assert s.is_negative is atan(s).is_negative assert s.is_nonpositive is atan(s).is_nonpositive assert s.is_nonnegative is atan(s).is_nonnegative else: assert s.is_extended_nonzero is atan(s).is_nonzero assert s.is_extended_positive is atan(s).is_positive assert s.is_extended_negative is atan(s).is_negative assert s.is_extended_nonpositive is atan(s).is_nonpositive assert s.is_extended_nonnegative is atan(s).is_nonnegative assert s.is_extended_nonzero is atan(s).is_extended_nonzero assert s.is_extended_positive is atan(s).is_extended_positive assert s.is_extended_negative is atan(s).is_extended_negative assert s.is_extended_nonpositive is atan(s).is_extended_nonpositive assert s.is_extended_nonnegative is atan(s).is_extended_nonnegative def test_atan_rewrite(): assert atan(x).rewrite(log) == I*(log(1 - I*x)-log(1 + I*x))/2 assert atan(x).rewrite(asin) == (-asin(1/sqrt(x**2 + 1)) + pi/2)*sqrt(x**2)/x assert atan(x).rewrite(acos) == sqrt(x**2)*acos(1/sqrt(x**2 + 1))/x assert atan(x).rewrite(acot) == acot(1/x) assert atan(x).rewrite(asec) == sqrt(x**2)*asec(sqrt(x**2 + 1))/x assert atan(x).rewrite(acsc) == (-acsc(sqrt(x**2 + 1)) + pi/2)*sqrt(x**2)/x assert atan(-5*I).evalf() == atan(x).rewrite(log).evalf(subs={x:-5*I}) assert atan(5*I).evalf() == atan(x).rewrite(log).evalf(subs={x:5*I}) def test_atan_fdiff(): assert atan(x).fdiff() == 1/(x**2 + 1) raises(ArgumentIndexError, lambda: atan(x).fdiff(2)) def test_atan2(): assert atan2.nargs == FiniteSet(2) assert atan2(0, 0) is S.NaN assert atan2(0, 1) == 0 assert atan2(1, 1) == pi/4 assert atan2(1, 0) == pi/2 assert atan2(1, -1) == pi*Rational(3, 4) assert atan2(0, -1) == pi assert atan2(-1, -1) == pi*Rational(-3, 4) assert atan2(-1, 0) == -pi/2 assert atan2(-1, 1) == -pi/4 i = symbols('i', imaginary=True) r = symbols('r', real=True) eq = atan2(r, i) ans = -I*log((i + I*r)/sqrt(i**2 + r**2)) reps = ((r, 2), (i, I)) assert eq.subs(reps) == ans.subs(reps) x = Symbol('x', negative=True) y = Symbol('y', negative=True) assert atan2(y, x) == atan(y/x) - pi y = Symbol('y', nonnegative=True) assert atan2(y, x) == atan(y/x) + pi y = Symbol('y') assert atan2(y, x) == atan2(y, x, evaluate=False) u = Symbol("u", positive=True) assert atan2(0, u) == 0 u = Symbol("u", negative=True) assert atan2(0, u) == pi assert atan2(y, oo) == 0 assert atan2(y, -oo)== 2*pi*Heaviside(re(y)) - pi assert atan2(y, x).rewrite(log) == -I*log((x + I*y)/sqrt(x**2 + y**2)) assert atan2(0, 0) is S.NaN ex = atan2(y, x) - arg(x + I*y) assert ex.subs({x:2, y:3}).rewrite(arg) == 0 assert ex.subs({x:2, y:3*I}).rewrite(arg) == -pi - I*log(sqrt(5)*I/5) assert ex.subs({x:2*I, y:3}).rewrite(arg) == -pi/2 - I*log(sqrt(5)*I) assert ex.subs({x:2*I, y:3*I}).rewrite(arg) == -pi + atan(Rational(2, 3)) + atan(Rational(3, 2)) i = symbols('i', imaginary=True) r = symbols('r', real=True) e = atan2(i, r) rewrite = e.rewrite(arg) reps = {i: I, r: -2} assert rewrite == -I*log(abs(I*i + r)/sqrt(abs(i**2 + r**2))) + arg((I*i + r)/sqrt(i**2 + r**2)) assert (e - rewrite).subs(reps).equals(0) assert atan2(0, x).rewrite(atan) == Piecewise((pi, re(x) < 0), (0, Ne(x, 0)), (nan, True)) assert atan2(0, r).rewrite(atan) == Piecewise((pi, r < 0), (0, Ne(r, 0)), (S.NaN, True)) assert atan2(0, i),rewrite(atan) == 0 assert atan2(0, r + i).rewrite(atan) == Piecewise((pi, r < 0), (0, True)) assert atan2(y, x).rewrite(atan) == Piecewise( (2*atan(y/(x + sqrt(x**2 + y**2))), Ne(y, 0)), (pi, re(x) < 0), (0, (re(x) > 0) | Ne(im(x), 0)), (nan, True)) assert conjugate(atan2(x, y)) == atan2(conjugate(x), conjugate(y)) assert diff(atan2(y, x), x) == -y/(x**2 + y**2) assert diff(atan2(y, x), y) == x/(x**2 + y**2) assert simplify(diff(atan2(y, x).rewrite(log), x)) == -y/(x**2 + y**2) assert simplify(diff(atan2(y, x).rewrite(log), y)) == x/(x**2 + y**2) assert str(atan2(1, 2).evalf(5)) == '0.46365' raises(ArgumentIndexError, lambda: atan2(x, y).fdiff(3)) def test_issue_17461(): class A(Symbol): is_extended_real = True def _eval_evalf(self, prec): return Float(5.0) x = A('X') y = A('Y') assert abs(atan2(x, y).evalf() - 0.785398163397448) <= 1e-10 def test_acot(): assert acot(nan) is nan assert acot.nargs == FiniteSet(1) assert acot(-oo) == 0 assert acot(oo) == 0 assert acot(zoo) == 0 assert acot(1) == pi/4 assert acot(0) == pi/2 assert acot(sqrt(3)/3) == pi/3 assert acot(1/sqrt(3)) == pi/3 assert acot(-1/sqrt(3)) == -pi/3 assert acot(x).diff(x) == -1/(1 + x**2) assert acot(r).is_extended_real is True assert acot(I*pi) == -I*acoth(pi) assert acot(-2*I) == I*acoth(2) assert acot(x).is_positive is None assert acot(n).is_positive is False assert acot(p).is_positive is True assert acot(I).is_positive is False assert acot(Rational(1, 4)).is_rational is False assert unchanged(acot, cot(x)) assert unchanged(acot, tan(x)) assert acot(cot(Rational(1, 4))) == Rational(1, 4) assert acot(tan(Rational(-1, 4))) == Rational(1, 4) - pi/2 def test_acot_rewrite(): assert acot(x).rewrite(log) == I*(log(1 - I/x)-log(1 + I/x))/2 assert acot(x).rewrite(asin) == x*(-asin(sqrt(-x**2)/sqrt(-x**2 - 1)) + pi/2)*sqrt(x**(-2)) assert acot(x).rewrite(acos) == x*sqrt(x**(-2))*acos(sqrt(-x**2)/sqrt(-x**2 - 1)) assert acot(x).rewrite(atan) == atan(1/x) assert acot(x).rewrite(asec) == x*sqrt(x**(-2))*asec(sqrt((x**2 + 1)/x**2)) assert acot(x).rewrite(acsc) == x*(-acsc(sqrt((x**2 + 1)/x**2)) + pi/2)*sqrt(x**(-2)) assert acot(-I/5).evalf() == acot(x).rewrite(log).evalf(subs={x:-I/5}) assert acot(I/5).evalf() == acot(x).rewrite(log).evalf(subs={x:I/5}) def test_acot_fdiff(): assert acot(x).fdiff() == -1/(x**2 + 1) raises(ArgumentIndexError, lambda: acot(x).fdiff(2)) def test_attributes(): assert sin(x).args == (x,) def test_sincos_rewrite(): assert sin(pi/2 - x) == cos(x) assert sin(pi - x) == sin(x) assert cos(pi/2 - x) == sin(x) assert cos(pi - x) == -cos(x) def _check_even_rewrite(func, arg): """Checks that the expr has been rewritten using f(-x) -> f(x) arg : -x """ return func(arg).args[0] == -arg def _check_odd_rewrite(func, arg): """Checks that the expr has been rewritten using f(-x) -> -f(x) arg : -x """ return func(arg).func.is_Mul def _check_no_rewrite(func, arg): """Checks that the expr is not rewritten""" return func(arg).args[0] == arg def test_evenodd_rewrite(): a = cos(2) # negative b = sin(1) # positive even = [cos] odd = [sin, tan, cot, asin, atan, acot] with_minus = [-1, -2**1024 * E, -pi/105, -x*y, -x - y] for func in even: for expr in with_minus: assert _check_even_rewrite(func, expr) assert _check_no_rewrite(func, a*b) assert func( x - y) == func(y - x) # it doesn't matter which form is canonical for func in odd: for expr in with_minus: assert _check_odd_rewrite(func, expr) assert _check_no_rewrite(func, a*b) assert func( x - y) == -func(y - x) # it doesn't matter which form is canonical def test_issue_4547(): assert sin(x).rewrite(cot) == 2*cot(x/2)/(1 + cot(x/2)**2) assert cos(x).rewrite(cot) == -(1 - cot(x/2)**2)/(1 + cot(x/2)**2) assert tan(x).rewrite(cot) == 1/cot(x) assert cot(x).fdiff() == -1 - cot(x)**2 def test_as_leading_term_issue_5272(): assert sin(x).as_leading_term(x) == x assert cos(x).as_leading_term(x) == 1 assert tan(x).as_leading_term(x) == x assert cot(x).as_leading_term(x) == 1/x assert asin(x).as_leading_term(x) == x assert acos(x).as_leading_term(x) == x assert atan(x).as_leading_term(x) == x assert acot(x).as_leading_term(x) == x def test_leading_terms(): for func in [sin, cos, tan, cot, asin, acos, atan, acot]: for arg in (1/x, S.Half): eq = func(arg) assert eq.as_leading_term(x) == eq def test_atan2_expansion(): assert cancel(atan2(x**2, x + 1).diff(x) - atan(x**2/(x + 1)).diff(x)) == 0 assert cancel(atan(y/x).series(y, 0, 5) - atan2(y, x).series(y, 0, 5) + atan2(0, x) - atan(0)) == O(y**5) assert cancel(atan(y/x).series(x, 1, 4) - atan2(y, x).series(x, 1, 4) + atan2(y, 1) - atan(y)) == O((x - 1)**4, (x, 1)) assert cancel(atan((y + x)/x).series(x, 1, 3) - atan2(y + x, x).series(x, 1, 3) + atan2(1 + y, 1) - atan(1 + y)) == O((x - 1)**3, (x, 1)) assert Matrix([atan2(y, x)]).jacobian([y, x]) == \ Matrix([[x/(y**2 + x**2), -y/(y**2 + x**2)]]) def test_aseries(): def t(n, v, d, e): assert abs( n(1/v).evalf() - n(1/x).series(x, dir=d).removeO().subs(x, v)) < e t(atan, 0.1, '+', 1e-5) t(atan, -0.1, '-', 1e-5) t(acot, 0.1, '+', 1e-5) t(acot, -0.1, '-', 1e-5) def test_issue_4420(): i = Symbol('i', integer=True) e = Symbol('e', even=True) o = Symbol('o', odd=True) # unknown parity for variable assert cos(4*i*pi) == 1 assert sin(4*i*pi) == 0 assert tan(4*i*pi) == 0 assert cot(4*i*pi) is zoo assert cos(3*i*pi) == cos(pi*i) # +/-1 assert sin(3*i*pi) == 0 assert tan(3*i*pi) == 0 assert cot(3*i*pi) is zoo assert cos(4.0*i*pi) == 1 assert sin(4.0*i*pi) == 0 assert tan(4.0*i*pi) == 0 assert cot(4.0*i*pi) is zoo assert cos(3.0*i*pi) == cos(pi*i) # +/-1 assert sin(3.0*i*pi) == 0 assert tan(3.0*i*pi) == 0 assert cot(3.0*i*pi) is zoo assert cos(4.5*i*pi) == cos(0.5*pi*i) assert sin(4.5*i*pi) == sin(0.5*pi*i) assert tan(4.5*i*pi) == tan(0.5*pi*i) assert cot(4.5*i*pi) == cot(0.5*pi*i) # parity of variable is known assert cos(4*e*pi) == 1 assert sin(4*e*pi) == 0 assert tan(4*e*pi) == 0 assert cot(4*e*pi) is zoo assert cos(3*e*pi) == 1 assert sin(3*e*pi) == 0 assert tan(3*e*pi) == 0 assert cot(3*e*pi) is zoo assert cos(4.0*e*pi) == 1 assert sin(4.0*e*pi) == 0 assert tan(4.0*e*pi) == 0 assert cot(4.0*e*pi) is zoo assert cos(3.0*e*pi) == 1 assert sin(3.0*e*pi) == 0 assert tan(3.0*e*pi) == 0 assert cot(3.0*e*pi) is zoo assert cos(4.5*e*pi) == cos(0.5*pi*e) assert sin(4.5*e*pi) == sin(0.5*pi*e) assert tan(4.5*e*pi) == tan(0.5*pi*e) assert cot(4.5*e*pi) == cot(0.5*pi*e) assert cos(4*o*pi) == 1 assert sin(4*o*pi) == 0 assert tan(4*o*pi) == 0 assert cot(4*o*pi) is zoo assert cos(3*o*pi) == -1 assert sin(3*o*pi) == 0 assert tan(3*o*pi) == 0 assert cot(3*o*pi) is zoo assert cos(4.0*o*pi) == 1 assert sin(4.0*o*pi) == 0 assert tan(4.0*o*pi) == 0 assert cot(4.0*o*pi) is zoo assert cos(3.0*o*pi) == -1 assert sin(3.0*o*pi) == 0 assert tan(3.0*o*pi) == 0 assert cot(3.0*o*pi) is zoo assert cos(4.5*o*pi) == cos(0.5*pi*o) assert sin(4.5*o*pi) == sin(0.5*pi*o) assert tan(4.5*o*pi) == tan(0.5*pi*o) assert cot(4.5*o*pi) == cot(0.5*pi*o) # x could be imaginary assert cos(4*x*pi) == cos(4*pi*x) assert sin(4*x*pi) == sin(4*pi*x) assert tan(4*x*pi) == tan(4*pi*x) assert cot(4*x*pi) == cot(4*pi*x) assert cos(3*x*pi) == cos(3*pi*x) assert sin(3*x*pi) == sin(3*pi*x) assert tan(3*x*pi) == tan(3*pi*x) assert cot(3*x*pi) == cot(3*pi*x) assert cos(4.0*x*pi) == cos(4.0*pi*x) assert sin(4.0*x*pi) == sin(4.0*pi*x) assert tan(4.0*x*pi) == tan(4.0*pi*x) assert cot(4.0*x*pi) == cot(4.0*pi*x) assert cos(3.0*x*pi) == cos(3.0*pi*x) assert sin(3.0*x*pi) == sin(3.0*pi*x) assert tan(3.0*x*pi) == tan(3.0*pi*x) assert cot(3.0*x*pi) == cot(3.0*pi*x) assert cos(4.5*x*pi) == cos(4.5*pi*x) assert sin(4.5*x*pi) == sin(4.5*pi*x) assert tan(4.5*x*pi) == tan(4.5*pi*x) assert cot(4.5*x*pi) == cot(4.5*pi*x) def test_inverses(): raises(AttributeError, lambda: sin(x).inverse()) raises(AttributeError, lambda: cos(x).inverse()) assert tan(x).inverse() == atan assert cot(x).inverse() == acot raises(AttributeError, lambda: csc(x).inverse()) raises(AttributeError, lambda: sec(x).inverse()) assert asin(x).inverse() == sin assert acos(x).inverse() == cos assert atan(x).inverse() == tan assert acot(x).inverse() == cot def test_real_imag(): a, b = symbols('a b', real=True) z = a + b*I for deep in [True, False]: assert sin( z).as_real_imag(deep=deep) == (sin(a)*cosh(b), cos(a)*sinh(b)) assert cos( z).as_real_imag(deep=deep) == (cos(a)*cosh(b), -sin(a)*sinh(b)) assert tan(z).as_real_imag(deep=deep) == (sin(2*a)/(cos(2*a) + cosh(2*b)), sinh(2*b)/(cos(2*a) + cosh(2*b))) assert cot(z).as_real_imag(deep=deep) == (-sin(2*a)/(cos(2*a) - cosh(2*b)), -sinh(2*b)/(cos(2*a) - cosh(2*b))) assert sin(a).as_real_imag(deep=deep) == (sin(a), 0) assert cos(a).as_real_imag(deep=deep) == (cos(a), 0) assert tan(a).as_real_imag(deep=deep) == (tan(a), 0) assert cot(a).as_real_imag(deep=deep) == (cot(a), 0) @XFAIL def test_sin_cos_with_infinity(): # Test for issue 5196 # https://github.com/sympy/sympy/issues/5196 assert sin(oo) is S.NaN assert cos(oo) is S.NaN @slow def test_sincos_rewrite_sqrt(): # equivalent to testing rewrite(pow) for p in [1, 3, 5, 17]: for t in [1, 8]: n = t*p # The vertices `exp(i*pi/n)` of a regular `n`-gon can # be expressed by means of nested square roots if and # only if `n` is a product of Fermat primes, `p`, and # powers of 2, `t'. The code aims to check all vertices # not belonging to an `m`-gon for `m < n`(`gcd(i, n) == 1`). # For large `n` this makes the test too slow, therefore # the vertices are limited to those of index `i < 10`. for i in range(1, min((n + 1)//2 + 1, 10)): if 1 == gcd(i, n): x = i*pi/n s1 = sin(x).rewrite(sqrt) c1 = cos(x).rewrite(sqrt) assert not s1.has(cos, sin), "fails for %d*pi/%d" % (i, n) assert not c1.has(cos, sin), "fails for %d*pi/%d" % (i, n) assert 1e-3 > abs(sin(x.evalf(5)) - s1.evalf(2)), "fails for %d*pi/%d" % (i, n) assert 1e-3 > abs(cos(x.evalf(5)) - c1.evalf(2)), "fails for %d*pi/%d" % (i, n) assert cos(pi/14).rewrite(sqrt) == sqrt(cos(pi/7)/2 + S.Half) assert cos(pi/257).rewrite(sqrt).evalf(64) == cos(pi/257).evalf(64) assert cos(pi*Rational(-15, 2)/11, evaluate=False).rewrite( sqrt) == -sqrt(-cos(pi*Rational(4, 11))/2 + S.Half) assert cos(Mul(2, pi, S.Half, evaluate=False), evaluate=False).rewrite( sqrt) == -1 e = cos(pi/3/17) # don't use pi/15 since that is caught at instantiation a = ( -3*sqrt(-sqrt(17) + 17)*sqrt(sqrt(17) + 17)/64 - 3*sqrt(34)*sqrt(sqrt(17) + 17)/128 - sqrt(sqrt(17) + 17)*sqrt(-8*sqrt(2)*sqrt(sqrt(17) + 17) - sqrt(2)*sqrt(-sqrt(17) + 17) + sqrt(34)*sqrt(-sqrt(17) + 17) + 6*sqrt(17) + 34)/64 - sqrt(-sqrt(17) + 17)*sqrt(-8*sqrt(2)*sqrt(sqrt(17) + 17) - sqrt(2)*sqrt(-sqrt(17) + 17) + sqrt(34)*sqrt(-sqrt(17) + 17) + 6*sqrt(17) + 34)/128 - Rational(1, 32) + sqrt(2)*sqrt(-8*sqrt(2)*sqrt(sqrt(17) + 17) - sqrt(2)*sqrt(-sqrt(17) + 17) + sqrt(34)*sqrt(-sqrt(17) + 17) + 6*sqrt(17) + 34)/64 + 3*sqrt(2)*sqrt(sqrt(17) + 17)/128 + sqrt(34)*sqrt(-sqrt(17) + 17)/128 + 13*sqrt(2)*sqrt(-sqrt(17) + 17)/128 + sqrt(17)*sqrt(-sqrt(17) + 17)*sqrt(-8*sqrt(2)*sqrt(sqrt(17) + 17) - sqrt(2)*sqrt(-sqrt(17) + 17) + sqrt(34)*sqrt(-sqrt(17) + 17) + 6*sqrt(17) + 34)/128 + 5*sqrt(17)/32 + sqrt(3)*sqrt(-sqrt(2)*sqrt(sqrt(17) + 17)*sqrt(sqrt(17)/32 + sqrt(2)*sqrt(-sqrt(17) + 17)/32 + sqrt(2)*sqrt(-8*sqrt(2)*sqrt(sqrt(17) + 17) - sqrt(2)*sqrt(-sqrt(17) + 17) + sqrt(34)*sqrt(-sqrt(17) + 17) + 6*sqrt(17) + 34)/32 + Rational(15, 32))/8 - 5*sqrt(2)*sqrt(sqrt(17)/32 + sqrt(2)*sqrt(-sqrt(17) + 17)/32 + sqrt(2)*sqrt(-8*sqrt(2)*sqrt(sqrt(17) + 17) - sqrt(2)*sqrt(-sqrt(17) + 17) + sqrt(34)*sqrt(-sqrt(17) + 17) + 6*sqrt(17) + 34)/32 + Rational(15, 32))*sqrt(-8*sqrt(2)*sqrt(sqrt(17) + 17) - sqrt(2)*sqrt(-sqrt(17) + 17) + sqrt(34)*sqrt(-sqrt(17) + 17) + 6*sqrt(17) + 34)/64 - 3*sqrt(2)*sqrt(-sqrt(17) + 17)*sqrt(sqrt(17)/32 + sqrt(2)*sqrt(-sqrt(17) + 17)/32 + sqrt(2)*sqrt(-8*sqrt(2)*sqrt(sqrt(17) + 17) - sqrt(2)*sqrt(-sqrt(17) + 17) + sqrt(34)*sqrt(-sqrt(17) + 17) + 6*sqrt(17) + 34)/32 + Rational(15, 32))/32 + sqrt(34)*sqrt(sqrt(17)/32 + sqrt(2)*sqrt(-sqrt(17) + 17)/32 + sqrt(2)*sqrt(-8*sqrt(2)*sqrt(sqrt(17) + 17) - sqrt(2)*sqrt(-sqrt(17) + 17) + sqrt(34)*sqrt(-sqrt(17) + 17) + 6*sqrt(17) + 34)/32 + Rational(15, 32))*sqrt(-8*sqrt(2)*sqrt(sqrt(17) + 17) - sqrt(2)*sqrt(-sqrt(17) + 17) + sqrt(34)*sqrt(-sqrt(17) + 17) + 6*sqrt(17) + 34)/64 + sqrt(sqrt(17)/32 + sqrt(2)*sqrt(-sqrt(17) + 17)/32 + sqrt(2)*sqrt(-8*sqrt(2)*sqrt(sqrt(17) + 17) - sqrt(2)*sqrt(-sqrt(17) + 17) + sqrt(34)*sqrt(-sqrt(17) + 17) + 6*sqrt(17) + 34)/32 + Rational(15, 32))/2 + S.Half + sqrt(-sqrt(17) + 17)*sqrt(sqrt(17)/32 + sqrt(2)*sqrt(-sqrt(17) + 17)/32 + sqrt(2)*sqrt(-8*sqrt(2)*sqrt(sqrt(17) + 17) - sqrt(2)*sqrt(-sqrt(17) + 17) + sqrt(34)*sqrt(-sqrt(17) + 17) + 6*sqrt(17) + 34)/32 + Rational(15, 32))*sqrt(-8*sqrt(2)*sqrt(sqrt(17) + 17) - sqrt(2)*sqrt(-sqrt(17) + 17) + sqrt(34)*sqrt(-sqrt(17) + 17) + 6*sqrt(17) + 34)/32 + sqrt(34)*sqrt(-sqrt(17) + 17)*sqrt(sqrt(17)/32 + sqrt(2)*sqrt(-sqrt(17) + 17)/32 + sqrt(2)*sqrt(-8*sqrt(2)*sqrt(sqrt(17) + 17) - sqrt(2)*sqrt(-sqrt(17) + 17) + sqrt(34)*sqrt(-sqrt(17) + 17) + 6*sqrt(17) + 34)/32 + Rational(15, 32))/32)/2) assert e.rewrite(sqrt) == a assert e.n() == a.n() # coverage of fermatCoords: multiplicity > 1; the following could be # different but that portion of the code should be tested in some way assert cos(pi/9/17).rewrite(sqrt) == \ sin(pi/9)*sin(pi*Rational(2, 17)) + cos(pi/9)*cos(pi*Rational(2, 17)) @slow def test_tancot_rewrite_sqrt(): # equivalent to testing rewrite(pow) for p in [1, 3, 5, 17]: for t in [1, 8]: n = t*p for i in range(1, min((n + 1)//2 + 1, 10)): if 1 == gcd(i, n): x = i*pi/n if 2*i != n and 3*i != 2*n: t1 = tan(x).rewrite(sqrt) assert not t1.has(cot, tan), "fails for %d*pi/%d" % (i, n) assert 1e-3 > abs( tan(x.evalf(7)) - t1.evalf(4) ), "fails for %d*pi/%d" % (i, n) if i != 0 and i != n: c1 = cot(x).rewrite(sqrt) assert not c1.has(cot, tan), "fails for %d*pi/%d" % (i, n) assert 1e-3 > abs( cot(x.evalf(7)) - c1.evalf(4) ), "fails for %d*pi/%d" % (i, n) def test_sec(): x = symbols('x', real=True) z = symbols('z') assert sec.nargs == FiniteSet(1) assert sec(zoo) is nan assert sec(0) == 1 assert sec(pi) == -1 assert sec(pi/2) is zoo assert sec(-pi/2) is zoo assert sec(pi/6) == 2*sqrt(3)/3 assert sec(pi/3) == 2 assert sec(pi*Rational(5, 2)) is zoo assert sec(pi*Rational(9, 7)) == -sec(pi*Rational(2, 7)) assert sec(pi*Rational(3, 4)) == -sqrt(2) # issue 8421 assert sec(I) == 1/cosh(1) assert sec(x*I) == 1/cosh(x) assert sec(-x) == sec(x) assert sec(asec(x)) == x assert sec(z).conjugate() == sec(conjugate(z)) assert (sec(z).as_real_imag() == (cos(re(z))*cosh(im(z))/(sin(re(z))**2*sinh(im(z))**2 + cos(re(z))**2*cosh(im(z))**2), sin(re(z))*sinh(im(z))/(sin(re(z))**2*sinh(im(z))**2 + cos(re(z))**2*cosh(im(z))**2))) assert sec(x).expand(trig=True) == 1/cos(x) assert sec(2*x).expand(trig=True) == 1/(2*cos(x)**2 - 1) assert sec(x).is_extended_real == True assert sec(z).is_real == None assert sec(a).is_algebraic is None assert sec(na).is_algebraic is False assert sec(x).as_leading_term() == sec(x) assert sec(0).is_finite == True assert sec(x).is_finite == None assert sec(pi/2).is_finite == False assert series(sec(x), x, x0=0, n=6) == 1 + x**2/2 + 5*x**4/24 + O(x**6) # https://github.com/sympy/sympy/issues/7166 assert series(sqrt(sec(x))) == 1 + x**2/4 + 7*x**4/96 + O(x**6) # https://github.com/sympy/sympy/issues/7167 assert (series(sqrt(sec(x)), x, x0=pi*3/2, n=4) == 1/sqrt(x - pi*Rational(3, 2)) + (x - pi*Rational(3, 2))**Rational(3, 2)/12 + (x - pi*Rational(3, 2))**Rational(7, 2)/160 + O((x - pi*Rational(3, 2))**4, (x, pi*Rational(3, 2)))) assert sec(x).diff(x) == tan(x)*sec(x) # Taylor Term checks assert sec(z).taylor_term(4, z) == 5*z**4/24 assert sec(z).taylor_term(6, z) == 61*z**6/720 assert sec(z).taylor_term(5, z) == 0 def test_sec_rewrite(): assert sec(x).rewrite(exp) == 1/(exp(I*x)/2 + exp(-I*x)/2) assert sec(x).rewrite(cos) == 1/cos(x) assert sec(x).rewrite(tan) == (tan(x/2)**2 + 1)/(-tan(x/2)**2 + 1) assert sec(x).rewrite(pow) == sec(x) assert sec(x).rewrite(sqrt) == sec(x) assert sec(z).rewrite(cot) == (cot(z/2)**2 + 1)/(cot(z/2)**2 - 1) assert sec(x).rewrite(sin) == 1 / sin(x + pi / 2, evaluate=False) assert sec(x).rewrite(tan) == (tan(x / 2)**2 + 1) / (-tan(x / 2)**2 + 1) assert sec(x).rewrite(csc) == csc(-x + pi/2, evaluate=False) def test_sec_fdiff(): assert sec(x).fdiff() == tan(x)*sec(x) raises(ArgumentIndexError, lambda: sec(x).fdiff(2)) def test_csc(): x = symbols('x', real=True) z = symbols('z') # https://github.com/sympy/sympy/issues/6707 cosecant = csc('x') alternate = 1/sin('x') assert cosecant.equals(alternate) == True assert alternate.equals(cosecant) == True assert csc.nargs == FiniteSet(1) assert csc(0) is zoo assert csc(pi) is zoo assert csc(zoo) is nan assert csc(pi/2) == 1 assert csc(-pi/2) == -1 assert csc(pi/6) == 2 assert csc(pi/3) == 2*sqrt(3)/3 assert csc(pi*Rational(5, 2)) == 1 assert csc(pi*Rational(9, 7)) == -csc(pi*Rational(2, 7)) assert csc(pi*Rational(3, 4)) == sqrt(2) # issue 8421 assert csc(I) == -I/sinh(1) assert csc(x*I) == -I/sinh(x) assert csc(-x) == -csc(x) assert csc(acsc(x)) == x assert csc(z).conjugate() == csc(conjugate(z)) assert (csc(z).as_real_imag() == (sin(re(z))*cosh(im(z))/(sin(re(z))**2*cosh(im(z))**2 + cos(re(z))**2*sinh(im(z))**2), -cos(re(z))*sinh(im(z))/(sin(re(z))**2*cosh(im(z))**2 + cos(re(z))**2*sinh(im(z))**2))) assert csc(x).expand(trig=True) == 1/sin(x) assert csc(2*x).expand(trig=True) == 1/(2*sin(x)*cos(x)) assert csc(x).is_extended_real == True assert csc(z).is_real == None assert csc(a).is_algebraic is None assert csc(na).is_algebraic is False assert csc(x).as_leading_term() == csc(x) assert csc(0).is_finite == False assert csc(x).is_finite == None assert csc(pi/2).is_finite == True assert series(csc(x), x, x0=pi/2, n=6) == \ 1 + (x - pi/2)**2/2 + 5*(x - pi/2)**4/24 + O((x - pi/2)**6, (x, pi/2)) assert series(csc(x), x, x0=0, n=6) == \ 1/x + x/6 + 7*x**3/360 + 31*x**5/15120 + O(x**6) assert csc(x).diff(x) == -cot(x)*csc(x) assert csc(x).taylor_term(2, x) == 0 assert csc(x).taylor_term(3, x) == 7*x**3/360 assert csc(x).taylor_term(5, x) == 31*x**5/15120 raises(ArgumentIndexError, lambda: csc(x).fdiff(2)) def test_asec(): z = Symbol('z', zero=True) assert asec(z) is zoo assert asec(nan) is nan assert asec(1) == 0 assert asec(-1) == pi assert asec(oo) == pi/2 assert asec(-oo) == pi/2 assert asec(zoo) == pi/2 assert asec(sec(pi*Rational(13, 4))) == pi*Rational(3, 4) assert asec(1 + sqrt(5)) == pi*Rational(2, 5) assert asec(2/sqrt(3)) == pi/6 assert asec(sqrt(4 - 2*sqrt(2))) == pi/8 assert asec(-sqrt(4 + 2*sqrt(2))) == pi*Rational(5, 8) assert asec(sqrt(2 + 2*sqrt(5)/5)) == pi*Rational(3, 10) assert asec(-sqrt(2 + 2*sqrt(5)/5)) == pi*Rational(7, 10) assert asec(sqrt(2) - sqrt(6)) == pi*Rational(11, 12) assert asec(x).diff(x) == 1/(x**2*sqrt(1 - 1/x**2)) assert asec(x).as_leading_term(x) == log(x) assert asec(x).rewrite(log) == I*log(sqrt(1 - 1/x**2) + I/x) + pi/2 assert asec(x).rewrite(asin) == -asin(1/x) + pi/2 assert asec(x).rewrite(acos) == acos(1/x) assert asec(x).rewrite(atan) == (2*atan(x + sqrt(x**2 - 1)) - pi/2)*sqrt(x**2)/x assert asec(x).rewrite(acot) == (2*acot(x - sqrt(x**2 - 1)) - pi/2)*sqrt(x**2)/x assert asec(x).rewrite(acsc) == -acsc(x) + pi/2 raises(ArgumentIndexError, lambda: asec(x).fdiff(2)) def test_asec_is_real(): assert asec(S.Half).is_real is False n = Symbol('n', positive=True, integer=True) assert asec(n).is_extended_real is True assert asec(x).is_real is None assert asec(r).is_real is None t = Symbol('t', real=False, finite=True) assert asec(t).is_real is False def test_acsc(): assert acsc(nan) is nan assert acsc(1) == pi/2 assert acsc(-1) == -pi/2 assert acsc(oo) == 0 assert acsc(-oo) == 0 assert acsc(zoo) == 0 assert acsc(0) is zoo assert acsc(csc(3)) == -3 + pi assert acsc(csc(4)) == -4 + pi assert acsc(csc(6)) == 6 - 2*pi assert unchanged(acsc, csc(x)) assert unchanged(acsc, sec(x)) assert acsc(2/sqrt(3)) == pi/3 assert acsc(csc(pi*Rational(13, 4))) == -pi/4 assert acsc(sqrt(2 + 2*sqrt(5)/5)) == pi/5 assert acsc(-sqrt(2 + 2*sqrt(5)/5)) == -pi/5 assert acsc(-2) == -pi/6 assert acsc(-sqrt(4 + 2*sqrt(2))) == -pi/8 assert acsc(sqrt(4 - 2*sqrt(2))) == pi*Rational(3, 8) assert acsc(1 + sqrt(5)) == pi/10 assert acsc(sqrt(2) - sqrt(6)) == pi*Rational(-5, 12) assert acsc(x).diff(x) == -1/(x**2*sqrt(1 - 1/x**2)) assert acsc(x).as_leading_term(x) == log(x) assert acsc(x).rewrite(log) == -I*log(sqrt(1 - 1/x**2) + I/x) assert acsc(x).rewrite(asin) == asin(1/x) assert acsc(x).rewrite(acos) == -acos(1/x) + pi/2 assert acsc(x).rewrite(atan) == (-atan(sqrt(x**2 - 1)) + pi/2)*sqrt(x**2)/x assert acsc(x).rewrite(acot) == (-acot(1/sqrt(x**2 - 1)) + pi/2)*sqrt(x**2)/x assert acsc(x).rewrite(asec) == -asec(x) + pi/2 raises(ArgumentIndexError, lambda: acsc(x).fdiff(2)) def test_csc_rewrite(): assert csc(x).rewrite(pow) == csc(x) assert csc(x).rewrite(sqrt) == csc(x) assert csc(x).rewrite(exp) == 2*I/(exp(I*x) - exp(-I*x)) assert csc(x).rewrite(sin) == 1/sin(x) assert csc(x).rewrite(tan) == (tan(x/2)**2 + 1)/(2*tan(x/2)) assert csc(x).rewrite(cot) == (cot(x/2)**2 + 1)/(2*cot(x/2)) assert csc(x).rewrite(cos) == 1/cos(x - pi/2, evaluate=False) assert csc(x).rewrite(sec) == sec(-x + pi/2, evaluate=False) # issue 17349 assert csc(1 - exp(-besselj(I, I))).rewrite(cos) == \ -1/cos(-pi/2 - 1 + cos(I*besselj(I, I)) + I*cos(-pi/2 + I*besselj(I, I), evaluate=False), evaluate=False) def test_issue_8653(): n = Symbol('n', integer=True) assert sin(n).is_irrational is None assert cos(n).is_irrational is None assert tan(n).is_irrational is None def test_issue_9157(): n = Symbol('n', integer=True, positive=True) assert atan(n - 1).is_nonnegative is True def test_trig_period(): x, y = symbols('x, y') assert sin(x).period() == 2*pi assert cos(x).period() == 2*pi assert tan(x).period() == pi assert cot(x).period() == pi assert sec(x).period() == 2*pi assert csc(x).period() == 2*pi assert sin(2*x).period() == pi assert cot(4*x - 6).period() == pi/4 assert cos((-3)*x).period() == pi*Rational(2, 3) assert cos(x*y).period(x) == 2*pi/abs(y) assert sin(3*x*y + 2*pi).period(y) == 2*pi/abs(3*x) assert tan(3*x).period(y) is S.Zero raises(NotImplementedError, lambda: sin(x**2).period(x)) def test_issue_7171(): assert sin(x).rewrite(sqrt) == sin(x) assert sin(x).rewrite(pow) == sin(x) def test_issue_11864(): w, k = symbols('w, k', real=True) F = Piecewise((1, Eq(2*pi*k, 0)), (sin(pi*k)/(pi*k), True)) soln = Piecewise((1, Eq(2*pi*k, 0)), (sinc(pi*k), True)) assert F.rewrite(sinc) == soln def test_real_assumptions(): z = Symbol('z', real=False, finite=True) assert sin(z).is_real is None assert cos(z).is_real is None assert tan(z).is_real is False assert sec(z).is_real is None assert csc(z).is_real is None assert cot(z).is_real is False assert asin(p).is_real is None assert asin(n).is_real is None assert asec(p).is_real is None assert asec(n).is_real is None assert acos(p).is_real is None assert acos(n).is_real is None assert acsc(p).is_real is None assert acsc(n).is_real is None assert atan(p).is_positive is True assert atan(n).is_negative is True assert acot(p).is_positive is True assert acot(n).is_negative is True def test_issue_14320(): assert asin(sin(2)) == -2 + pi and (-pi/2 <= -2 + pi <= pi/2) and sin(2) == sin(-2 + pi) assert asin(cos(2)) == -2 + pi/2 and (-pi/2 <= -2 + pi/2 <= pi/2) and cos(2) == sin(-2 + pi/2) assert acos(sin(2)) == -pi/2 + 2 and (0 <= -pi/2 + 2 <= pi) and sin(2) == cos(-pi/2 + 2) assert acos(cos(20)) == -6*pi + 20 and (0 <= -6*pi + 20 <= pi) and cos(20) == cos(-6*pi + 20) assert acos(cos(30)) == -30 + 10*pi and (0 <= -30 + 10*pi <= pi) and cos(30) == cos(-30 + 10*pi) assert atan(tan(17)) == -5*pi + 17 and (-pi/2 < -5*pi + 17 < pi/2) and tan(17) == tan(-5*pi + 17) assert atan(tan(15)) == -5*pi + 15 and (-pi/2 < -5*pi + 15 < pi/2) and tan(15) == tan(-5*pi + 15) assert atan(cot(12)) == -12 + pi*Rational(7, 2) and (-pi/2 < -12 + pi*Rational(7, 2) < pi/2) and cot(12) == tan(-12 + pi*Rational(7, 2)) assert acot(cot(15)) == -5*pi + 15 and (-pi/2 < -5*pi + 15 <= pi/2) and cot(15) == cot(-5*pi + 15) assert acot(tan(19)) == -19 + pi*Rational(13, 2) and (-pi/2 < -19 + pi*Rational(13, 2) <= pi/2) and tan(19) == cot(-19 + pi*Rational(13, 2)) assert asec(sec(11)) == -11 + 4*pi and (0 <= -11 + 4*pi <= pi) and cos(11) == cos(-11 + 4*pi) assert asec(csc(13)) == -13 + pi*Rational(9, 2) and (0 <= -13 + pi*Rational(9, 2) <= pi) and sin(13) == cos(-13 + pi*Rational(9, 2)) assert acsc(csc(14)) == -4*pi + 14 and (-pi/2 <= -4*pi + 14 <= pi/2) and sin(14) == sin(-4*pi + 14) assert acsc(sec(10)) == pi*Rational(-7, 2) + 10 and (-pi/2 <= pi*Rational(-7, 2) + 10 <= pi/2) and cos(10) == sin(pi*Rational(-7, 2) + 10) def test_issue_14543(): assert sec(2*pi + 11) == sec(11) assert sec(2*pi - 11) == sec(11) assert sec(pi + 11) == -sec(11) assert sec(pi - 11) == -sec(11) assert csc(2*pi + 17) == csc(17) assert csc(2*pi - 17) == -csc(17) assert csc(pi + 17) == -csc(17) assert csc(pi - 17) == csc(17) x = Symbol('x') assert csc(pi/2 + x) == sec(x) assert csc(pi/2 - x) == sec(x) assert csc(pi*Rational(3, 2) + x) == -sec(x) assert csc(pi*Rational(3, 2) - x) == -sec(x) assert sec(pi/2 - x) == csc(x) assert sec(pi/2 + x) == -csc(x) assert sec(pi*Rational(3, 2) + x) == csc(x) assert sec(pi*Rational(3, 2) - x) == -csc(x)
37.970991
144
0.555346
from sympy import (symbols, Symbol, nan, oo, zoo, I, sinh, sin, pi, atan, acos, Rational, sqrt, asin, acot, coth, E, S, tan, tanh, cos, cosh, atan2, exp, log, asinh, acoth, atanh, O, cancel, Matrix, re, im, Float, Pow, gcd, sec, csc, cot, diff, simplify, Heaviside, arg, conjugate, series, FiniteSet, asec, acsc, Mul, sinc, jn, AccumBounds, Interval, ImageSet, Lambda, besselj) from sympy.core.compatibility import range from sympy.core.expr import unchanged from sympy.core.function import ArgumentIndexError from sympy.core.relational import Ne, Eq from sympy.functions.elementary.piecewise import Piecewise from sympy.sets.setexpr import SetExpr from sympy.utilities.pytest import XFAIL, slow, raises x, y, z = symbols('x y z') r = Symbol('r', real=True) k = Symbol('k', integer=True) p = Symbol('p', positive=True) n = Symbol('n', negative=True) np = Symbol('p', nonpositive=True) nn = Symbol('n', nonnegative=True) nz = Symbol('nz', nonzero=True) ep = Symbol('ep', extended_positive=True) en = Symbol('en', extended_negative=True) enp = Symbol('ep', extended_nonpositive=True) enn = Symbol('en', extended_nonnegative=True) enz = Symbol('enz', extended_nonzero=True) a = Symbol('a', algebraic=True) na = Symbol('na', nonzero=True, algebraic=True) def test_sin(): x, y = symbols('x y') assert sin.nargs == FiniteSet(1) assert sin(nan) is nan assert sin(zoo) is nan assert sin(oo) == AccumBounds(-1, 1) assert sin(oo) - sin(oo) == AccumBounds(-2, 2) assert sin(oo*I) == oo*I assert sin(-oo*I) == -oo*I assert 0*sin(oo) is S.Zero assert 0/sin(oo) is S.Zero assert 0 + sin(oo) == AccumBounds(-1, 1) assert 5 + sin(oo) == AccumBounds(4, 6) assert sin(0) == 0 assert sin(asin(x)) == x assert sin(atan(x)) == x / sqrt(1 + x**2) assert sin(acos(x)) == sqrt(1 - x**2) assert sin(acot(x)) == 1 / (sqrt(1 + 1 / x**2) * x) assert sin(acsc(x)) == 1 / x assert sin(asec(x)) == sqrt(1 - 1 / x**2) assert sin(atan2(y, x)) == y / sqrt(x**2 + y**2) assert sin(pi*I) == sinh(pi)*I assert sin(-pi*I) == -sinh(pi)*I assert sin(-2*I) == -sinh(2)*I assert sin(pi) == 0 assert sin(-pi) == 0 assert sin(2*pi) == 0 assert sin(-2*pi) == 0 assert sin(-3*10**73*pi) == 0 assert sin(7*10**103*pi) == 0 assert sin(pi/2) == 1 assert sin(-pi/2) == -1 assert sin(pi*Rational(5, 2)) == 1 assert sin(pi*Rational(7, 2)) == -1 ne = symbols('ne', integer=True, even=False) e = symbols('e', even=True) assert sin(pi*ne/2) == (-1)**(ne/2 - S.Half) assert sin(pi*k/2).func == sin assert sin(pi*e/2) == 0 assert sin(pi*k) == 0 assert sin(pi*k).subs(k, 3) == sin(pi*k/2).subs(k, 6) assert sin(pi/3) == S.Half*sqrt(3) assert sin(pi*Rational(-2, 3)) == Rational(-1, 2)*sqrt(3) assert sin(pi/4) == S.Half*sqrt(2) assert sin(-pi/4) == Rational(-1, 2)*sqrt(2) assert sin(pi*Rational(17, 4)) == S.Half*sqrt(2) assert sin(pi*Rational(-3, 4)) == Rational(-1, 2)*sqrt(2) assert sin(pi/6) == S.Half assert sin(-pi/6) == Rational(-1, 2) assert sin(pi*Rational(7, 6)) == Rational(-1, 2) assert sin(pi*Rational(-5, 6)) == Rational(-1, 2) assert sin(pi*Rational(1, 5)) == sqrt((5 - sqrt(5)) / 8) assert sin(pi*Rational(2, 5)) == sqrt((5 + sqrt(5)) / 8) assert sin(pi*Rational(3, 5)) == sin(pi*Rational(2, 5)) assert sin(pi*Rational(4, 5)) == sin(pi*Rational(1, 5)) assert sin(pi*Rational(6, 5)) == -sin(pi*Rational(1, 5)) assert sin(pi*Rational(8, 5)) == -sin(pi*Rational(2, 5)) assert sin(pi*Rational(-1273, 5)) == -sin(pi*Rational(2, 5)) assert sin(pi/8) == sqrt((2 - sqrt(2))/4) assert sin(pi/10) == Rational(-1, 4) + sqrt(5)/4 assert sin(pi/12) == -sqrt(2)/4 + sqrt(6)/4 assert sin(pi*Rational(5, 12)) == sqrt(2)/4 + sqrt(6)/4 assert sin(pi*Rational(-7, 12)) == -sqrt(2)/4 - sqrt(6)/4 assert sin(pi*Rational(-11, 12)) == sqrt(2)/4 - sqrt(6)/4 assert sin(pi*Rational(104, 105)) == sin(pi/105) assert sin(pi*Rational(106, 105)) == -sin(pi/105) assert sin(pi*Rational(-104, 105)) == -sin(pi/105) assert sin(pi*Rational(-106, 105)) == sin(pi/105) assert sin(x*I) == sinh(x)*I assert sin(k*pi) == 0 assert sin(17*k*pi) == 0 assert sin(k*pi*I) == sinh(k*pi)*I assert sin(r).is_real is True assert sin(0, evaluate=False).is_algebraic assert sin(a).is_algebraic is None assert sin(na).is_algebraic is False q = Symbol('q', rational=True) assert sin(pi*q).is_algebraic qn = Symbol('qn', rational=True, nonzero=True) assert sin(qn).is_rational is False assert sin(q).is_rational is None assert isinstance(sin( re(x) - im(y)), sin) is True assert isinstance(sin(-re(x) + im(y)), sin) is False assert sin(SetExpr(Interval(0, 1))) == SetExpr(ImageSet(Lambda(x, sin(x)), Interval(0, 1))) for d in list(range(1, 22)) + [60, 85]: for n in range(0, d*2 + 1): x = n*pi/d e = abs( float(sin(x)) - sin(float(x)) ) assert e < 1e-12 def test_sin_cos(): for d in [1, 2, 3, 4, 5, 6, 10, 12, 15, 20, 24, 30, 40, 60, 120]: for n in range(-2*d, d*2): x = n*pi/d assert sin(x + pi/2) == cos(x), "fails for %d*pi/%d" % (n, d) assert sin(x - pi/2) == -cos(x), "fails for %d*pi/%d" % (n, d) assert sin(x) == cos(x - pi/2), "fails for %d*pi/%d" % (n, d) assert -sin(x) == cos(x + pi/2), "fails for %d*pi/%d" % (n, d) def test_sin_series(): assert sin(x).series(x, 0, 9) == \ x - x**3/6 + x**5/120 - x**7/5040 + O(x**9) def test_sin_rewrite(): assert sin(x).rewrite(exp) == -I*(exp(I*x) - exp(-I*x))/2 assert sin(x).rewrite(tan) == 2*tan(x/2)/(1 + tan(x/2)**2) assert sin(x).rewrite(cot) == 2*cot(x/2)/(1 + cot(x/2)**2) assert sin(sinh(x)).rewrite( exp).subs(x, 3).n() == sin(x).rewrite(exp).subs(x, sinh(3)).n() assert sin(cosh(x)).rewrite( exp).subs(x, 3).n() == sin(x).rewrite(exp).subs(x, cosh(3)).n() assert sin(tanh(x)).rewrite( exp).subs(x, 3).n() == sin(x).rewrite(exp).subs(x, tanh(3)).n() assert sin(coth(x)).rewrite( exp).subs(x, 3).n() == sin(x).rewrite(exp).subs(x, coth(3)).n() assert sin(sin(x)).rewrite( exp).subs(x, 3).n() == sin(x).rewrite(exp).subs(x, sin(3)).n() assert sin(cos(x)).rewrite( exp).subs(x, 3).n() == sin(x).rewrite(exp).subs(x, cos(3)).n() assert sin(tan(x)).rewrite( exp).subs(x, 3).n() == sin(x).rewrite(exp).subs(x, tan(3)).n() assert sin(cot(x)).rewrite( exp).subs(x, 3).n() == sin(x).rewrite(exp).subs(x, cot(3)).n() assert sin(log(x)).rewrite(Pow) == I*x**-I / 2 - I*x**I /2 assert sin(x).rewrite(csc) == 1/csc(x) assert sin(x).rewrite(cos) == cos(x - pi / 2, evaluate=False) assert sin(x).rewrite(sec) == 1 / sec(x - pi / 2, evaluate=False) assert sin(cos(x)).rewrite(Pow) == sin(cos(x)) def test_sin_expansion(): assert sin(x + y).expand(trig=True) == sin(x)*cos(y) + cos(x)*sin(y) assert sin(x - y).expand(trig=True) == sin(x)*cos(y) - cos(x)*sin(y) assert sin(y - x).expand(trig=True) == cos(x)*sin(y) - sin(x)*cos(y) assert sin(2*x).expand(trig=True) == 2*sin(x)*cos(x) assert sin(3*x).expand(trig=True) == -4*sin(x)**3 + 3*sin(x) assert sin(4*x).expand(trig=True) == -8*sin(x)**3*cos(x) + 4*sin(x)*cos(x) assert sin(2).expand(trig=True) == 2*sin(1)*cos(1) assert sin(3).expand(trig=True) == -4*sin(1)**3 + 3*sin(1) def test_sin_AccumBounds(): assert sin(AccumBounds(-oo, oo)) == AccumBounds(-1, 1) assert sin(AccumBounds(0, oo)) == AccumBounds(-1, 1) assert sin(AccumBounds(-oo, 0)) == AccumBounds(-1, 1) assert sin(AccumBounds(0, 2*S.Pi)) == AccumBounds(-1, 1) assert sin(AccumBounds(0, S.Pi*Rational(3, 4))) == AccumBounds(0, 1) assert sin(AccumBounds(S.Pi*Rational(3, 4), S.Pi*Rational(7, 4))) == AccumBounds(-1, sin(S.Pi*Rational(3, 4))) assert sin(AccumBounds(S.Pi/4, S.Pi/3)) == AccumBounds(sin(S.Pi/4), sin(S.Pi/3)) assert sin(AccumBounds(S.Pi*Rational(3, 4), S.Pi*Rational(5, 6))) == AccumBounds(sin(S.Pi*Rational(5, 6)), sin(S.Pi*Rational(3, 4))) def test_sin_fdiff(): assert sin(x).fdiff() == cos(x) raises(ArgumentIndexError, lambda: sin(x).fdiff(2)) def test_trig_symmetry(): assert sin(-x) == -sin(x) assert cos(-x) == cos(x) assert tan(-x) == -tan(x) assert cot(-x) == -cot(x) assert sin(x + pi) == -sin(x) assert sin(x + 2*pi) == sin(x) assert sin(x + 3*pi) == -sin(x) assert sin(x + 4*pi) == sin(x) assert sin(x - 5*pi) == -sin(x) assert cos(x + pi) == -cos(x) assert cos(x + 2*pi) == cos(x) assert cos(x + 3*pi) == -cos(x) assert cos(x + 4*pi) == cos(x) assert cos(x - 5*pi) == -cos(x) assert tan(x + pi) == tan(x) assert tan(x - 3*pi) == tan(x) assert cot(x + pi) == cot(x) assert cot(x - 3*pi) == cot(x) assert sin(pi/2 - x) == cos(x) assert sin(pi*Rational(3, 2) - x) == -cos(x) assert sin(pi*Rational(5, 2) - x) == cos(x) assert cos(pi/2 - x) == sin(x) assert cos(pi*Rational(3, 2) - x) == -sin(x) assert cos(pi*Rational(5, 2) - x) == sin(x) assert tan(pi/2 - x) == cot(x) assert tan(pi*Rational(3, 2) - x) == cot(x) assert tan(pi*Rational(5, 2) - x) == cot(x) assert cot(pi/2 - x) == tan(x) assert cot(pi*Rational(3, 2) - x) == tan(x) assert cot(pi*Rational(5, 2) - x) == tan(x) assert sin(pi/2 + x) == cos(x) assert cos(pi/2 + x) == -sin(x) assert tan(pi/2 + x) == -cot(x) assert cot(pi/2 + x) == -tan(x) def test_cos(): x, y = symbols('x y') assert cos.nargs == FiniteSet(1) assert cos(nan) is nan assert cos(oo) == AccumBounds(-1, 1) assert cos(oo) - cos(oo) == AccumBounds(-2, 2) assert cos(oo*I) is oo assert cos(-oo*I) is oo assert cos(zoo) is nan assert cos(0) == 1 assert cos(acos(x)) == x assert cos(atan(x)) == 1 / sqrt(1 + x**2) assert cos(asin(x)) == sqrt(1 - x**2) assert cos(acot(x)) == 1 / sqrt(1 + 1 / x**2) assert cos(acsc(x)) == sqrt(1 - 1 / x**2) assert cos(asec(x)) == 1 / x assert cos(atan2(y, x)) == x / sqrt(x**2 + y**2) assert cos(pi*I) == cosh(pi) assert cos(-pi*I) == cosh(pi) assert cos(-2*I) == cosh(2) assert cos(pi/2) == 0 assert cos(-pi/2) == 0 assert cos(pi/2) == 0 assert cos(-pi/2) == 0 assert cos((-3*10**73 + 1)*pi/2) == 0 assert cos((7*10**103 + 1)*pi/2) == 0 n = symbols('n', integer=True, even=False) e = symbols('e', even=True) assert cos(pi*n/2) == 0 assert cos(pi*e/2) == (-1)**(e/2) assert cos(pi) == -1 assert cos(-pi) == -1 assert cos(2*pi) == 1 assert cos(5*pi) == -1 assert cos(8*pi) == 1 assert cos(pi/3) == S.Half assert cos(pi*Rational(-2, 3)) == Rational(-1, 2) assert cos(pi/4) == S.Half*sqrt(2) assert cos(-pi/4) == S.Half*sqrt(2) assert cos(pi*Rational(11, 4)) == Rational(-1, 2)*sqrt(2) assert cos(pi*Rational(-3, 4)) == Rational(-1, 2)*sqrt(2) assert cos(pi/6) == S.Half*sqrt(3) assert cos(-pi/6) == S.Half*sqrt(3) assert cos(pi*Rational(7, 6)) == Rational(-1, 2)*sqrt(3) assert cos(pi*Rational(-5, 6)) == Rational(-1, 2)*sqrt(3) assert cos(pi*Rational(1, 5)) == (sqrt(5) + 1)/4 assert cos(pi*Rational(2, 5)) == (sqrt(5) - 1)/4 assert cos(pi*Rational(3, 5)) == -cos(pi*Rational(2, 5)) assert cos(pi*Rational(4, 5)) == -cos(pi*Rational(1, 5)) assert cos(pi*Rational(6, 5)) == -cos(pi*Rational(1, 5)) assert cos(pi*Rational(8, 5)) == cos(pi*Rational(2, 5)) assert cos(pi*Rational(-1273, 5)) == -cos(pi*Rational(2, 5)) assert cos(pi/8) == sqrt((2 + sqrt(2))/4) assert cos(pi/12) == sqrt(2)/4 + sqrt(6)/4 assert cos(pi*Rational(5, 12)) == -sqrt(2)/4 + sqrt(6)/4 assert cos(pi*Rational(7, 12)) == sqrt(2)/4 - sqrt(6)/4 assert cos(pi*Rational(11, 12)) == -sqrt(2)/4 - sqrt(6)/4 assert cos(pi*Rational(104, 105)) == -cos(pi/105) assert cos(pi*Rational(106, 105)) == -cos(pi/105) assert cos(pi*Rational(-104, 105)) == -cos(pi/105) assert cos(pi*Rational(-106, 105)) == -cos(pi/105) assert cos(x*I) == cosh(x) assert cos(k*pi*I) == cosh(k*pi) assert cos(r).is_real is True assert cos(0, evaluate=False).is_algebraic assert cos(a).is_algebraic is None assert cos(na).is_algebraic is False q = Symbol('q', rational=True) assert cos(pi*q).is_algebraic assert cos(pi*Rational(2, 7)).is_algebraic assert cos(k*pi) == (-1)**k assert cos(2*k*pi) == 1 for d in list(range(1, 22)) + [60, 85]: for n in range(0, 2*d + 1): x = n*pi/d e = abs( float(cos(x)) - cos(float(x)) ) assert e < 1e-12 def test_issue_6190(): c = Float('123456789012345678901234567890.25', '') for cls in [sin, cos, tan, cot]: assert cls(c*pi) == cls(pi/4) assert cls(4.125*pi) == cls(pi/8) assert cls(4.7*pi) == cls((4.7 % 2)*pi) def test_cos_series(): assert cos(x).series(x, 0, 9) == \ 1 - x**2/2 + x**4/24 - x**6/720 + x**8/40320 + O(x**9) def test_cos_rewrite(): assert cos(x).rewrite(exp) == exp(I*x)/2 + exp(-I*x)/2 assert cos(x).rewrite(tan) == (1 - tan(x/2)**2)/(1 + tan(x/2)**2) assert cos(x).rewrite(cot) == -(1 - cot(x/2)**2)/(1 + cot(x/2)**2) assert cos(sinh(x)).rewrite( exp).subs(x, 3).n() == cos(x).rewrite(exp).subs(x, sinh(3)).n() assert cos(cosh(x)).rewrite( exp).subs(x, 3).n() == cos(x).rewrite(exp).subs(x, cosh(3)).n() assert cos(tanh(x)).rewrite( exp).subs(x, 3).n() == cos(x).rewrite(exp).subs(x, tanh(3)).n() assert cos(coth(x)).rewrite( exp).subs(x, 3).n() == cos(x).rewrite(exp).subs(x, coth(3)).n() assert cos(sin(x)).rewrite( exp).subs(x, 3).n() == cos(x).rewrite(exp).subs(x, sin(3)).n() assert cos(cos(x)).rewrite( exp).subs(x, 3).n() == cos(x).rewrite(exp).subs(x, cos(3)).n() assert cos(tan(x)).rewrite( exp).subs(x, 3).n() == cos(x).rewrite(exp).subs(x, tan(3)).n() assert cos(cot(x)).rewrite( exp).subs(x, 3).n() == cos(x).rewrite(exp).subs(x, cot(3)).n() assert cos(log(x)).rewrite(Pow) == x**I/2 + x**-I/2 assert cos(x).rewrite(sec) == 1/sec(x) assert cos(x).rewrite(sin) == sin(x + pi/2, evaluate=False) assert cos(x).rewrite(csc) == 1/csc(-x + pi/2, evaluate=False) assert cos(sin(x)).rewrite(Pow) == cos(sin(x)) def test_cos_expansion(): assert cos(x + y).expand(trig=True) == cos(x)*cos(y) - sin(x)*sin(y) assert cos(x - y).expand(trig=True) == cos(x)*cos(y) + sin(x)*sin(y) assert cos(y - x).expand(trig=True) == cos(x)*cos(y) + sin(x)*sin(y) assert cos(2*x).expand(trig=True) == 2*cos(x)**2 - 1 assert cos(3*x).expand(trig=True) == 4*cos(x)**3 - 3*cos(x) assert cos(4*x).expand(trig=True) == 8*cos(x)**4 - 8*cos(x)**2 + 1 assert cos(2).expand(trig=True) == 2*cos(1)**2 - 1 assert cos(3).expand(trig=True) == 4*cos(1)**3 - 3*cos(1) def test_cos_AccumBounds(): assert cos(AccumBounds(-oo, oo)) == AccumBounds(-1, 1) assert cos(AccumBounds(0, oo)) == AccumBounds(-1, 1) assert cos(AccumBounds(-oo, 0)) == AccumBounds(-1, 1) assert cos(AccumBounds(0, 2*S.Pi)) == AccumBounds(-1, 1) assert cos(AccumBounds(-S.Pi/3, S.Pi/4)) == AccumBounds(cos(-S.Pi/3), 1) assert cos(AccumBounds(S.Pi*Rational(3, 4), S.Pi*Rational(5, 4))) == AccumBounds(-1, cos(S.Pi*Rational(3, 4))) assert cos(AccumBounds(S.Pi*Rational(5, 4), S.Pi*Rational(4, 3))) == AccumBounds(cos(S.Pi*Rational(5, 4)), cos(S.Pi*Rational(4, 3))) assert cos(AccumBounds(S.Pi/4, S.Pi/3)) == AccumBounds(cos(S.Pi/3), cos(S.Pi/4)) def test_cos_fdiff(): assert cos(x).fdiff() == -sin(x) raises(ArgumentIndexError, lambda: cos(x).fdiff(2)) def test_tan(): assert tan(nan) is nan assert tan(zoo) is nan assert tan(oo) == AccumBounds(-oo, oo) assert tan(oo) - tan(oo) == AccumBounds(-oo, oo) assert tan.nargs == FiniteSet(1) assert tan(oo*I) == I assert tan(-oo*I) == -I assert tan(0) == 0 assert tan(atan(x)) == x assert tan(asin(x)) == x / sqrt(1 - x**2) assert tan(acos(x)) == sqrt(1 - x**2) / x assert tan(acot(x)) == 1 / x assert tan(acsc(x)) == 1 / (sqrt(1 - 1 / x**2) * x) assert tan(asec(x)) == sqrt(1 - 1 / x**2) * x assert tan(atan2(y, x)) == y/x assert tan(pi*I) == tanh(pi)*I assert tan(-pi*I) == -tanh(pi)*I assert tan(-2*I) == -tanh(2)*I assert tan(pi) == 0 assert tan(-pi) == 0 assert tan(2*pi) == 0 assert tan(-2*pi) == 0 assert tan(-3*10**73*pi) == 0 assert tan(pi/2) is zoo assert tan(pi*Rational(3, 2)) is zoo assert tan(pi/3) == sqrt(3) assert tan(pi*Rational(-2, 3)) == sqrt(3) assert tan(pi/4) is S.One assert tan(-pi/4) is S.NegativeOne assert tan(pi*Rational(17, 4)) is S.One assert tan(pi*Rational(-3, 4)) is S.One assert tan(pi/5) == sqrt(5 - 2*sqrt(5)) assert tan(pi*Rational(2, 5)) == sqrt(5 + 2*sqrt(5)) assert tan(pi*Rational(18, 5)) == -sqrt(5 + 2*sqrt(5)) assert tan(pi*Rational(-16, 5)) == -sqrt(5 - 2*sqrt(5)) assert tan(pi/6) == 1/sqrt(3) assert tan(-pi/6) == -1/sqrt(3) assert tan(pi*Rational(7, 6)) == 1/sqrt(3) assert tan(pi*Rational(-5, 6)) == 1/sqrt(3) assert tan(pi/8) == -1 + sqrt(2) assert tan(pi*Rational(3, 8)) == 1 + sqrt(2) assert tan(pi*Rational(5, 8)) == -1 - sqrt(2) assert tan(pi*Rational(7, 8)) == 1 - sqrt(2) assert tan(pi/10) == sqrt(1 - 2*sqrt(5)/5) assert tan(pi*Rational(3, 10)) == sqrt(1 + 2*sqrt(5)/5) assert tan(pi*Rational(17, 10)) == -sqrt(1 + 2*sqrt(5)/5) assert tan(pi*Rational(-31, 10)) == -sqrt(1 - 2*sqrt(5)/5) assert tan(pi/12) == -sqrt(3) + 2 assert tan(pi*Rational(5, 12)) == sqrt(3) + 2 assert tan(pi*Rational(7, 12)) == -sqrt(3) - 2 assert tan(pi*Rational(11, 12)) == sqrt(3) - 2 assert tan(pi/24).radsimp() == -2 - sqrt(3) + sqrt(2) + sqrt(6) assert tan(pi*Rational(5, 24)).radsimp() == -2 + sqrt(3) - sqrt(2) + sqrt(6) assert tan(pi*Rational(7, 24)).radsimp() == 2 - sqrt(3) - sqrt(2) + sqrt(6) assert tan(pi*Rational(11, 24)).radsimp() == 2 + sqrt(3) + sqrt(2) + sqrt(6) assert tan(pi*Rational(13, 24)).radsimp() == -2 - sqrt(3) - sqrt(2) - sqrt(6) assert tan(pi*Rational(17, 24)).radsimp() == -2 + sqrt(3) + sqrt(2) - sqrt(6) assert tan(pi*Rational(19, 24)).radsimp() == 2 - sqrt(3) + sqrt(2) - sqrt(6) assert tan(pi*Rational(23, 24)).radsimp() == 2 + sqrt(3) - sqrt(2) - sqrt(6) assert tan(x*I) == tanh(x)*I assert tan(k*pi) == 0 assert tan(17*k*pi) == 0 assert tan(k*pi*I) == tanh(k*pi)*I assert tan(r).is_real is None assert tan(r).is_extended_real is True assert tan(0, evaluate=False).is_algebraic assert tan(a).is_algebraic is None assert tan(na).is_algebraic is False assert tan(pi*Rational(10, 7)) == tan(pi*Rational(3, 7)) assert tan(pi*Rational(11, 7)) == -tan(pi*Rational(3, 7)) assert tan(pi*Rational(-11, 7)) == tan(pi*Rational(3, 7)) assert tan(pi*Rational(15, 14)) == tan(pi/14) assert tan(pi*Rational(-15, 14)) == -tan(pi/14) assert tan(r).is_finite is None assert tan(I*r).is_finite is True def test_tan_series(): assert tan(x).series(x, 0, 9) == \ x + x**3/3 + 2*x**5/15 + 17*x**7/315 + O(x**9) def test_tan_rewrite(): neg_exp, pos_exp = exp(-x*I), exp(x*I) assert tan(x).rewrite(exp) == I*(neg_exp - pos_exp)/(neg_exp + pos_exp) assert tan(x).rewrite(sin) == 2*sin(x)**2/sin(2*x) assert tan(x).rewrite(cos) == cos(x - S.Pi/2, evaluate=False)/cos(x) assert tan(x).rewrite(cot) == 1/cot(x) assert tan(sinh(x)).rewrite( exp).subs(x, 3).n() == tan(x).rewrite(exp).subs(x, sinh(3)).n() assert tan(cosh(x)).rewrite( exp).subs(x, 3).n() == tan(x).rewrite(exp).subs(x, cosh(3)).n() assert tan(tanh(x)).rewrite( exp).subs(x, 3).n() == tan(x).rewrite(exp).subs(x, tanh(3)).n() assert tan(coth(x)).rewrite( exp).subs(x, 3).n() == tan(x).rewrite(exp).subs(x, coth(3)).n() assert tan(sin(x)).rewrite( exp).subs(x, 3).n() == tan(x).rewrite(exp).subs(x, sin(3)).n() assert tan(cos(x)).rewrite( exp).subs(x, 3).n() == tan(x).rewrite(exp).subs(x, cos(3)).n() assert tan(tan(x)).rewrite( exp).subs(x, 3).n() == tan(x).rewrite(exp).subs(x, tan(3)).n() assert tan(cot(x)).rewrite( exp).subs(x, 3).n() == tan(x).rewrite(exp).subs(x, cot(3)).n() assert tan(log(x)).rewrite(Pow) == I*(x**-I - x**I)/(x**-I + x**I) assert 0 == (cos(pi/34)*tan(pi/34) - sin(pi/34)).rewrite(pow) assert 0 == (cos(pi/17)*tan(pi/17) - sin(pi/17)).rewrite(pow) assert tan(pi/19).rewrite(pow) == tan(pi/19) assert tan(pi*Rational(8, 19)).rewrite(sqrt) == tan(pi*Rational(8, 19)) assert tan(x).rewrite(sec) == sec(x)/sec(x - pi/2, evaluate=False) assert tan(x).rewrite(csc) == csc(-x + pi/2, evaluate=False)/csc(x) assert tan(sin(x)).rewrite(Pow) == tan(sin(x)) assert tan(pi*Rational(2, 5), evaluate=False).rewrite(sqrt) == sqrt(sqrt(5)/8 + Rational(5, 8))/(Rational(-1, 4) + sqrt(5)/4) def test_tan_subs(): assert tan(x).subs(tan(x), y) == y assert tan(x).subs(x, y) == tan(y) assert tan(x).subs(x, S.Pi/2) is zoo assert tan(x).subs(x, S.Pi*Rational(3, 2)) is zoo def test_tan_expansion(): assert tan(x + y).expand(trig=True) == ((tan(x) + tan(y))/(1 - tan(x)*tan(y))).expand() assert tan(x - y).expand(trig=True) == ((tan(x) - tan(y))/(1 + tan(x)*tan(y))).expand() assert tan(x + y + z).expand(trig=True) == ( (tan(x) + tan(y) + tan(z) - tan(x)*tan(y)*tan(z))/ (1 - tan(x)*tan(y) - tan(x)*tan(z) - tan(y)*tan(z))).expand() assert 0 == tan(2*x).expand(trig=True).rewrite(tan).subs([(tan(x), Rational(1, 7))])*24 - 7 assert 0 == tan(3*x).expand(trig=True).rewrite(tan).subs([(tan(x), Rational(1, 5))])*55 - 37 assert 0 == tan(4*x - pi/4).expand(trig=True).rewrite(tan).subs([(tan(x), Rational(1, 5))])*239 - 1 def test_tan_AccumBounds(): assert tan(AccumBounds(-oo, oo)) == AccumBounds(-oo, oo) assert tan(AccumBounds(S.Pi/3, S.Pi*Rational(2, 3))) == AccumBounds(-oo, oo) assert tan(AccumBounds(S.Pi/6, S.Pi/3)) == AccumBounds(tan(S.Pi/6), tan(S.Pi/3)) def test_tan_fdiff(): assert tan(x).fdiff() == tan(x)**2 + 1 raises(ArgumentIndexError, lambda: tan(x).fdiff(2)) def test_cot(): assert cot(nan) is nan assert cot.nargs == FiniteSet(1) assert cot(oo*I) == -I assert cot(-oo*I) == I assert cot(zoo) is nan assert cot(0) is zoo assert cot(2*pi) is zoo assert cot(acot(x)) == x assert cot(atan(x)) == 1 / x assert cot(asin(x)) == sqrt(1 - x**2) / x assert cot(acos(x)) == x / sqrt(1 - x**2) assert cot(acsc(x)) == sqrt(1 - 1 / x**2) * x assert cot(asec(x)) == 1 / (sqrt(1 - 1 / x**2) * x) assert cot(atan2(y, x)) == x/y assert cot(pi*I) == -coth(pi)*I assert cot(-pi*I) == coth(pi)*I assert cot(-2*I) == coth(2)*I assert cot(pi) == cot(2*pi) == cot(3*pi) assert cot(-pi) == cot(-2*pi) == cot(-3*pi) assert cot(pi/2) == 0 assert cot(-pi/2) == 0 assert cot(pi*Rational(5, 2)) == 0 assert cot(pi*Rational(7, 2)) == 0 assert cot(pi/3) == 1/sqrt(3) assert cot(pi*Rational(-2, 3)) == 1/sqrt(3) assert cot(pi/4) is S.One assert cot(-pi/4) is S.NegativeOne assert cot(pi*Rational(17, 4)) is S.One assert cot(pi*Rational(-3, 4)) is S.One assert cot(pi/6) == sqrt(3) assert cot(-pi/6) == -sqrt(3) assert cot(pi*Rational(7, 6)) == sqrt(3) assert cot(pi*Rational(-5, 6)) == sqrt(3) assert cot(pi/8) == 1 + sqrt(2) assert cot(pi*Rational(3, 8)) == -1 + sqrt(2) assert cot(pi*Rational(5, 8)) == 1 - sqrt(2) assert cot(pi*Rational(7, 8)) == -1 - sqrt(2) assert cot(pi/12) == sqrt(3) + 2 assert cot(pi*Rational(5, 12)) == -sqrt(3) + 2 assert cot(pi*Rational(7, 12)) == sqrt(3) - 2 assert cot(pi*Rational(11, 12)) == -sqrt(3) - 2 assert cot(pi/24).radsimp() == sqrt(2) + sqrt(3) + 2 + sqrt(6) assert cot(pi*Rational(5, 24)).radsimp() == -sqrt(2) - sqrt(3) + 2 + sqrt(6) assert cot(pi*Rational(7, 24)).radsimp() == -sqrt(2) + sqrt(3) - 2 + sqrt(6) assert cot(pi*Rational(11, 24)).radsimp() == sqrt(2) - sqrt(3) - 2 + sqrt(6) assert cot(pi*Rational(13, 24)).radsimp() == -sqrt(2) + sqrt(3) + 2 - sqrt(6) assert cot(pi*Rational(17, 24)).radsimp() == sqrt(2) - sqrt(3) + 2 - sqrt(6) assert cot(pi*Rational(19, 24)).radsimp() == sqrt(2) + sqrt(3) - 2 - sqrt(6) assert cot(pi*Rational(23, 24)).radsimp() == -sqrt(2) - sqrt(3) - 2 - sqrt(6) assert cot(x*I) == -coth(x)*I assert cot(k*pi*I) == -coth(k*pi)*I assert cot(r).is_real is None assert cot(r).is_extended_real is True assert cot(a).is_algebraic is None assert cot(na).is_algebraic is False assert cot(pi*Rational(10, 7)) == cot(pi*Rational(3, 7)) assert cot(pi*Rational(11, 7)) == -cot(pi*Rational(3, 7)) assert cot(pi*Rational(-11, 7)) == cot(pi*Rational(3, 7)) assert cot(pi*Rational(39, 34)) == cot(pi*Rational(5, 34)) assert cot(pi*Rational(-41, 34)) == -cot(pi*Rational(7, 34)) assert cot(x).is_finite is None assert cot(r).is_finite is None i = Symbol('i', imaginary=True) assert cot(i).is_finite is True assert cot(x).subs(x, 3*pi) is zoo def test_tan_cot_sin_cos_evalf(): assert abs((tan(pi*Rational(8, 15))*cos(pi*Rational(8, 15))/sin(pi*Rational(8, 15)) - 1).evalf()) < 1e-14 assert abs((cot(pi*Rational(4, 15))*sin(pi*Rational(4, 15))/cos(pi*Rational(4, 15)) - 1).evalf()) < 1e-14 @XFAIL def test_tan_cot_sin_cos_ratsimp(): assert 1 == (tan(pi*Rational(8, 15))*cos(pi*Rational(8, 15))/sin(pi*Rational(8, 15))).ratsimp() assert 1 == (cot(pi*Rational(4, 15))*sin(pi*Rational(4, 15))/cos(pi*Rational(4, 15))).ratsimp() def test_cot_series(): assert cot(x).series(x, 0, 9) == \ 1/x - x/3 - x**3/45 - 2*x**5/945 - x**7/4725 + O(x**9) assert cot(x**4 + x**5).series(x, 0, 1) == \ x**(-4) - 1/x**3 + x**(-2) - 1/x + 1 + O(x) assert cot(pi*(1-x)).series(x, 0, 3) == -1/(pi*x) + pi*x/3 + O(x**3) assert cot(x).taylor_term(0, x) == 1/x assert cot(x).taylor_term(2, x) is S.Zero assert cot(x).taylor_term(3, x) == -x**3/45 def test_cot_rewrite(): neg_exp, pos_exp = exp(-x*I), exp(x*I) assert cot(x).rewrite(exp) == I*(pos_exp + neg_exp)/(pos_exp - neg_exp) assert cot(x).rewrite(sin) == sin(2*x)/(2*(sin(x)**2)) assert cot(x).rewrite(cos) == cos(x)/cos(x - pi/2, evaluate=False) assert cot(x).rewrite(tan) == 1/tan(x) assert cot(sinh(x)).rewrite( exp).subs(x, 3).n() == cot(x).rewrite(exp).subs(x, sinh(3)).n() assert cot(cosh(x)).rewrite( exp).subs(x, 3).n() == cot(x).rewrite(exp).subs(x, cosh(3)).n() assert cot(tanh(x)).rewrite( exp).subs(x, 3).n() == cot(x).rewrite(exp).subs(x, tanh(3)).n() assert cot(coth(x)).rewrite( exp).subs(x, 3).n() == cot(x).rewrite(exp).subs(x, coth(3)).n() assert cot(sin(x)).rewrite( exp).subs(x, 3).n() == cot(x).rewrite(exp).subs(x, sin(3)).n() assert cot(tan(x)).rewrite( exp).subs(x, 3).n() == cot(x).rewrite(exp).subs(x, tan(3)).n() assert cot(log(x)).rewrite(Pow) == -I*(x**-I + x**I)/(x**-I - x**I) assert cot(pi*Rational(4, 34)).rewrite(pow).ratsimp() == (cos(pi*Rational(4, 34))/sin(pi*Rational(4, 34))).rewrite(pow).ratsimp() assert cot(pi*Rational(4, 17)).rewrite(pow) == (cos(pi*Rational(4, 17))/sin(pi*Rational(4, 17))).rewrite(pow) assert cot(pi/19).rewrite(pow) == cot(pi/19) assert cot(pi/19).rewrite(sqrt) == cot(pi/19) assert cot(x).rewrite(sec) == sec(x - pi / 2, evaluate=False) / sec(x) assert cot(x).rewrite(csc) == csc(x) / csc(- x + pi / 2, evaluate=False) assert cot(sin(x)).rewrite(Pow) == cot(sin(x)) assert cot(pi*Rational(2, 5), evaluate=False).rewrite(sqrt) == (Rational(-1, 4) + sqrt(5)/4)/\ sqrt(sqrt(5)/8 + Rational(5, 8)) def test_cot_subs(): assert cot(x).subs(cot(x), y) == y assert cot(x).subs(x, y) == cot(y) assert cot(x).subs(x, 0) is zoo assert cot(x).subs(x, S.Pi) is zoo def test_cot_expansion(): assert cot(x + y).expand(trig=True) == ((cot(x)*cot(y) - 1)/(cot(x) + cot(y))).expand() assert cot(x - y).expand(trig=True) == (-(cot(x)*cot(y) + 1)/(cot(x) - cot(y))).expand() assert cot(x + y + z).expand(trig=True) == ( (cot(x)*cot(y)*cot(z) - cot(x) - cot(y) - cot(z))/ (-1 + cot(x)*cot(y) + cot(x)*cot(z) + cot(y)*cot(z))).expand() assert cot(3*x).expand(trig=True) == ((cot(x)**3 - 3*cot(x))/(3*cot(x)**2 - 1)).expand() assert 0 == cot(2*x).expand(trig=True).rewrite(cot).subs([(cot(x), Rational(1, 3))])*3 + 4 assert 0 == cot(3*x).expand(trig=True).rewrite(cot).subs([(cot(x), Rational(1, 5))])*55 - 37 assert 0 == cot(4*x - pi/4).expand(trig=True).rewrite(cot).subs([(cot(x), Rational(1, 7))])*863 + 191 def test_cot_AccumBounds(): assert cot(AccumBounds(-oo, oo)) == AccumBounds(-oo, oo) assert cot(AccumBounds(-S.Pi/3, S.Pi/3)) == AccumBounds(-oo, oo) assert cot(AccumBounds(S.Pi/6, S.Pi/3)) == AccumBounds(cot(S.Pi/3), cot(S.Pi/6)) def test_cot_fdiff(): assert cot(x).fdiff() == -cot(x)**2 - 1 raises(ArgumentIndexError, lambda: cot(x).fdiff(2)) def test_sinc(): assert isinstance(sinc(x), sinc) s = Symbol('s', zero=True) assert sinc(s) is S.One assert sinc(S.Infinity) is S.Zero assert sinc(S.NegativeInfinity) is S.Zero assert sinc(S.NaN) is S.NaN assert sinc(S.ComplexInfinity) is S.NaN n = Symbol('n', integer=True, nonzero=True) assert sinc(n*pi) is S.Zero assert sinc(-n*pi) is S.Zero assert sinc(pi/2) == 2 / pi assert sinc(-pi/2) == 2 / pi assert sinc(pi*Rational(5, 2)) == 2 / (5*pi) assert sinc(pi*Rational(7, 2)) == -2 / (7*pi) assert sinc(-x) == sinc(x) assert sinc(x).diff() == Piecewise(((x*cos(x) - sin(x)) / x**2, Ne(x, 0)), (0, True)) assert sinc(x).diff(x).equals(sinc(x).rewrite(sin).diff(x)) assert sinc(x).diff().subs(x, 0) is S.Zero assert sinc(x).series() == 1 - x**2/6 + x**4/120 + O(x**6) assert sinc(x).rewrite(jn) == jn(0, x) assert sinc(x).rewrite(sin) == Piecewise((sin(x)/x, Ne(x, 0)), (1, True)) def test_asin(): assert asin(nan) is nan assert asin.nargs == FiniteSet(1) assert asin(oo) == -I*oo assert asin(-oo) == I*oo assert asin(zoo) is zoo assert asin(0) == 0 assert asin(1) == pi/2 assert asin(-1) == -pi/2 assert asin(sqrt(3)/2) == pi/3 assert asin(-sqrt(3)/2) == -pi/3 assert asin(sqrt(2)/2) == pi/4 assert asin(-sqrt(2)/2) == -pi/4 assert asin(sqrt((5 - sqrt(5))/8)) == pi/5 assert asin(-sqrt((5 - sqrt(5))/8)) == -pi/5 assert asin(S.Half) == pi/6 assert asin(Rational(-1, 2)) == -pi/6 assert asin((sqrt(2 - sqrt(2)))/2) == pi/8 assert asin(-(sqrt(2 - sqrt(2)))/2) == -pi/8 assert asin((sqrt(5) - 1)/4) == pi/10 assert asin(-(sqrt(5) - 1)/4) == -pi/10 assert asin((sqrt(3) - 1)/sqrt(2**3)) == pi/12 assert asin(-(sqrt(3) - 1)/sqrt(2**3)) == -pi/12 for d in [5, 6, 8, 10, 12]: for n in range(-(d//2), d//2 + 1): if gcd(n, d) == 1: assert asin(sin(n*pi/d)) == n*pi/d assert asin(x).diff(x) == 1/sqrt(1 - x**2) assert asin(0.2).is_real is True assert asin(-2).is_real is False assert asin(r).is_real is None assert asin(-2*I) == -I*asinh(2) assert asin(Rational(1, 7), evaluate=False).is_positive is True assert asin(Rational(-1, 7), evaluate=False).is_positive is False assert asin(p).is_positive is None assert asin(sin(Rational(7, 2))) == Rational(-7, 2) + pi assert asin(sin(Rational(-7, 4))) == Rational(7, 4) - pi assert unchanged(asin, cos(x)) def test_asin_series(): assert asin(x).series(x, 0, 9) == \ x + x**3/6 + 3*x**5/40 + 5*x**7/112 + O(x**9) t5 = asin(x).taylor_term(5, x) assert t5 == 3*x**5/40 assert asin(x).taylor_term(7, x, t5, 0) == 5*x**7/112 def test_asin_rewrite(): assert asin(x).rewrite(log) == -I*log(I*x + sqrt(1 - x**2)) assert asin(x).rewrite(atan) == 2*atan(x/(1 + sqrt(1 - x**2))) assert asin(x).rewrite(acos) == S.Pi/2 - acos(x) assert asin(x).rewrite(acot) == 2*acot((sqrt(-x**2 + 1) + 1)/x) assert asin(x).rewrite(asec) == -asec(1/x) + pi/2 assert asin(x).rewrite(acsc) == acsc(1/x) def test_asin_fdiff(): assert asin(x).fdiff() == 1/sqrt(1 - x**2) raises(ArgumentIndexError, lambda: asin(x).fdiff(2)) def test_acos(): assert acos(nan) is nan assert acos(zoo) is zoo assert acos.nargs == FiniteSet(1) assert acos(oo) == I*oo assert acos(-oo) == -I*oo assert acos(0) == pi/2 assert acos(S.Half) == pi/3 assert acos(Rational(-1, 2)) == pi*Rational(2, 3) assert acos(1) == 0 assert acos(-1) == pi assert acos(sqrt(2)/2) == pi/4 assert acos(-sqrt(2)/2) == pi*Rational(3, 4) for d in [5, 6, 8, 10, 12]: for num in range(d): if gcd(num, d) == 1: assert acos(cos(num*pi/d)) == num*pi/d assert acos(2*I) == pi/2 - asin(2*I) assert acos(x).diff(x) == -1/sqrt(1 - x**2) assert acos(0.2).is_real is True assert acos(-2).is_real is False assert acos(r).is_real is None assert acos(Rational(1, 7), evaluate=False).is_positive is True assert acos(Rational(-1, 7), evaluate=False).is_positive is True assert acos(Rational(3, 2), evaluate=False).is_positive is False assert acos(p).is_positive is None assert acos(2 + p).conjugate() != acos(10 + p) assert acos(-3 + n).conjugate() != acos(-3 + n) assert acos(Rational(1, 3)).conjugate() == acos(Rational(1, 3)) assert acos(Rational(-1, 3)).conjugate() == acos(Rational(-1, 3)) assert acos(p + n*I).conjugate() == acos(p - n*I) assert acos(z).conjugate() != acos(conjugate(z)) def test_acos_series(): assert acos(x).series(x, 0, 8) == \ pi/2 - x - x**3/6 - 3*x**5/40 - 5*x**7/112 + O(x**8) assert acos(x).series(x, 0, 8) == pi/2 - asin(x).series(x, 0, 8) t5 = acos(x).taylor_term(5, x) assert t5 == -3*x**5/40 assert acos(x).taylor_term(7, x, t5, 0) == -5*x**7/112 assert acos(x).taylor_term(0, x) == pi/2 assert acos(x).taylor_term(2, x) is S.Zero def test_acos_rewrite(): assert acos(x).rewrite(log) == pi/2 + I*log(I*x + sqrt(1 - x**2)) assert acos(x).rewrite(atan) == \ atan(sqrt(1 - x**2)/x) + (pi/2)*(1 - x*sqrt(1/x**2)) assert acos(0).rewrite(atan) == S.Pi/2 assert acos(0.5).rewrite(atan) == acos(0.5).rewrite(log) assert acos(x).rewrite(asin) == S.Pi/2 - asin(x) assert acos(x).rewrite(acot) == -2*acot((sqrt(-x**2 + 1) + 1)/x) + pi/2 assert acos(x).rewrite(asec) == asec(1/x) assert acos(x).rewrite(acsc) == -acsc(1/x) + pi/2 def test_acos_fdiff(): assert acos(x).fdiff() == -1/sqrt(1 - x**2) raises(ArgumentIndexError, lambda: acos(x).fdiff(2)) def test_atan(): assert atan(nan) is nan assert atan.nargs == FiniteSet(1) assert atan(oo) == pi/2 assert atan(-oo) == -pi/2 assert atan(zoo) == AccumBounds(-pi/2, pi/2) assert atan(0) == 0 assert atan(1) == pi/4 assert atan(sqrt(3)) == pi/3 assert atan(-(1 + sqrt(2))) == pi*Rational(-3, 8) assert atan(sqrt((5 - 2 * sqrt(5)))) == pi/5 assert atan(-sqrt(1 - 2 * sqrt(5)/ 5)) == -pi/10 assert atan(sqrt(1 + 2 * sqrt(5) / 5)) == pi*Rational(3, 10) assert atan(-2 + sqrt(3)) == -pi/12 assert atan(2 + sqrt(3)) == pi*Rational(5, 12) assert atan(-2 - sqrt(3)) == pi*Rational(-5, 12) for d in [5, 6, 8, 10, 12]: for num in range(-(d//2), d//2 + 1): if gcd(num, d) == 1: assert atan(tan(num*pi/d)) == num*pi/d assert atan(oo) == pi/2 assert atan(x).diff(x) == 1/(1 + x**2) assert atan(r).is_real is True assert atan(-2*I) == -I*atanh(2) assert unchanged(atan, cot(x)) assert atan(cot(Rational(1, 4))) == Rational(-1, 4) + pi/2 assert acot(Rational(1, 4)).is_rational is False for s in (x, p, n, np, nn, nz, ep, en, enp, enn, enz): if s.is_real or s.is_extended_real is None: assert s.is_nonzero is atan(s).is_nonzero assert s.is_positive is atan(s).is_positive assert s.is_negative is atan(s).is_negative assert s.is_nonpositive is atan(s).is_nonpositive assert s.is_nonnegative is atan(s).is_nonnegative else: assert s.is_extended_nonzero is atan(s).is_nonzero assert s.is_extended_positive is atan(s).is_positive assert s.is_extended_negative is atan(s).is_negative assert s.is_extended_nonpositive is atan(s).is_nonpositive assert s.is_extended_nonnegative is atan(s).is_nonnegative assert s.is_extended_nonzero is atan(s).is_extended_nonzero assert s.is_extended_positive is atan(s).is_extended_positive assert s.is_extended_negative is atan(s).is_extended_negative assert s.is_extended_nonpositive is atan(s).is_extended_nonpositive assert s.is_extended_nonnegative is atan(s).is_extended_nonnegative def test_atan_rewrite(): assert atan(x).rewrite(log) == I*(log(1 - I*x)-log(1 + I*x))/2 assert atan(x).rewrite(asin) == (-asin(1/sqrt(x**2 + 1)) + pi/2)*sqrt(x**2)/x assert atan(x).rewrite(acos) == sqrt(x**2)*acos(1/sqrt(x**2 + 1))/x assert atan(x).rewrite(acot) == acot(1/x) assert atan(x).rewrite(asec) == sqrt(x**2)*asec(sqrt(x**2 + 1))/x assert atan(x).rewrite(acsc) == (-acsc(sqrt(x**2 + 1)) + pi/2)*sqrt(x**2)/x assert atan(-5*I).evalf() == atan(x).rewrite(log).evalf(subs={x:-5*I}) assert atan(5*I).evalf() == atan(x).rewrite(log).evalf(subs={x:5*I}) def test_atan_fdiff(): assert atan(x).fdiff() == 1/(x**2 + 1) raises(ArgumentIndexError, lambda: atan(x).fdiff(2)) def test_atan2(): assert atan2.nargs == FiniteSet(2) assert atan2(0, 0) is S.NaN assert atan2(0, 1) == 0 assert atan2(1, 1) == pi/4 assert atan2(1, 0) == pi/2 assert atan2(1, -1) == pi*Rational(3, 4) assert atan2(0, -1) == pi assert atan2(-1, -1) == pi*Rational(-3, 4) assert atan2(-1, 0) == -pi/2 assert atan2(-1, 1) == -pi/4 i = symbols('i', imaginary=True) r = symbols('r', real=True) eq = atan2(r, i) ans = -I*log((i + I*r)/sqrt(i**2 + r**2)) reps = ((r, 2), (i, I)) assert eq.subs(reps) == ans.subs(reps) x = Symbol('x', negative=True) y = Symbol('y', negative=True) assert atan2(y, x) == atan(y/x) - pi y = Symbol('y', nonnegative=True) assert atan2(y, x) == atan(y/x) + pi y = Symbol('y') assert atan2(y, x) == atan2(y, x, evaluate=False) u = Symbol("u", positive=True) assert atan2(0, u) == 0 u = Symbol("u", negative=True) assert atan2(0, u) == pi assert atan2(y, oo) == 0 assert atan2(y, -oo)== 2*pi*Heaviside(re(y)) - pi assert atan2(y, x).rewrite(log) == -I*log((x + I*y)/sqrt(x**2 + y**2)) assert atan2(0, 0) is S.NaN ex = atan2(y, x) - arg(x + I*y) assert ex.subs({x:2, y:3}).rewrite(arg) == 0 assert ex.subs({x:2, y:3*I}).rewrite(arg) == -pi - I*log(sqrt(5)*I/5) assert ex.subs({x:2*I, y:3}).rewrite(arg) == -pi/2 - I*log(sqrt(5)*I) assert ex.subs({x:2*I, y:3*I}).rewrite(arg) == -pi + atan(Rational(2, 3)) + atan(Rational(3, 2)) i = symbols('i', imaginary=True) r = symbols('r', real=True) e = atan2(i, r) rewrite = e.rewrite(arg) reps = {i: I, r: -2} assert rewrite == -I*log(abs(I*i + r)/sqrt(abs(i**2 + r**2))) + arg((I*i + r)/sqrt(i**2 + r**2)) assert (e - rewrite).subs(reps).equals(0) assert atan2(0, x).rewrite(atan) == Piecewise((pi, re(x) < 0), (0, Ne(x, 0)), (nan, True)) assert atan2(0, r).rewrite(atan) == Piecewise((pi, r < 0), (0, Ne(r, 0)), (S.NaN, True)) assert atan2(0, i),rewrite(atan) == 0 assert atan2(0, r + i).rewrite(atan) == Piecewise((pi, r < 0), (0, True)) assert atan2(y, x).rewrite(atan) == Piecewise( (2*atan(y/(x + sqrt(x**2 + y**2))), Ne(y, 0)), (pi, re(x) < 0), (0, (re(x) > 0) | Ne(im(x), 0)), (nan, True)) assert conjugate(atan2(x, y)) == atan2(conjugate(x), conjugate(y)) assert diff(atan2(y, x), x) == -y/(x**2 + y**2) assert diff(atan2(y, x), y) == x/(x**2 + y**2) assert simplify(diff(atan2(y, x).rewrite(log), x)) == -y/(x**2 + y**2) assert simplify(diff(atan2(y, x).rewrite(log), y)) == x/(x**2 + y**2) assert str(atan2(1, 2).evalf(5)) == '0.46365' raises(ArgumentIndexError, lambda: atan2(x, y).fdiff(3)) def test_issue_17461(): class A(Symbol): is_extended_real = True def _eval_evalf(self, prec): return Float(5.0) x = A('X') y = A('Y') assert abs(atan2(x, y).evalf() - 0.785398163397448) <= 1e-10 def test_acot(): assert acot(nan) is nan assert acot.nargs == FiniteSet(1) assert acot(-oo) == 0 assert acot(oo) == 0 assert acot(zoo) == 0 assert acot(1) == pi/4 assert acot(0) == pi/2 assert acot(sqrt(3)/3) == pi/3 assert acot(1/sqrt(3)) == pi/3 assert acot(-1/sqrt(3)) == -pi/3 assert acot(x).diff(x) == -1/(1 + x**2) assert acot(r).is_extended_real is True assert acot(I*pi) == -I*acoth(pi) assert acot(-2*I) == I*acoth(2) assert acot(x).is_positive is None assert acot(n).is_positive is False assert acot(p).is_positive is True assert acot(I).is_positive is False assert acot(Rational(1, 4)).is_rational is False assert unchanged(acot, cot(x)) assert unchanged(acot, tan(x)) assert acot(cot(Rational(1, 4))) == Rational(1, 4) assert acot(tan(Rational(-1, 4))) == Rational(1, 4) - pi/2 def test_acot_rewrite(): assert acot(x).rewrite(log) == I*(log(1 - I/x)-log(1 + I/x))/2 assert acot(x).rewrite(asin) == x*(-asin(sqrt(-x**2)/sqrt(-x**2 - 1)) + pi/2)*sqrt(x**(-2)) assert acot(x).rewrite(acos) == x*sqrt(x**(-2))*acos(sqrt(-x**2)/sqrt(-x**2 - 1)) assert acot(x).rewrite(atan) == atan(1/x) assert acot(x).rewrite(asec) == x*sqrt(x**(-2))*asec(sqrt((x**2 + 1)/x**2)) assert acot(x).rewrite(acsc) == x*(-acsc(sqrt((x**2 + 1)/x**2)) + pi/2)*sqrt(x**(-2)) assert acot(-I/5).evalf() == acot(x).rewrite(log).evalf(subs={x:-I/5}) assert acot(I/5).evalf() == acot(x).rewrite(log).evalf(subs={x:I/5}) def test_acot_fdiff(): assert acot(x).fdiff() == -1/(x**2 + 1) raises(ArgumentIndexError, lambda: acot(x).fdiff(2)) def test_attributes(): assert sin(x).args == (x,) def test_sincos_rewrite(): assert sin(pi/2 - x) == cos(x) assert sin(pi - x) == sin(x) assert cos(pi/2 - x) == sin(x) assert cos(pi - x) == -cos(x) def _check_even_rewrite(func, arg): return func(arg).args[0] == -arg def _check_odd_rewrite(func, arg): return func(arg).func.is_Mul def _check_no_rewrite(func, arg): return func(arg).args[0] == arg def test_evenodd_rewrite(): a = cos(2) b = sin(1) even = [cos] odd = [sin, tan, cot, asin, atan, acot] with_minus = [-1, -2**1024 * E, -pi/105, -x*y, -x - y] for func in even: for expr in with_minus: assert _check_even_rewrite(func, expr) assert _check_no_rewrite(func, a*b) assert func( x - y) == func(y - x) for func in odd: for expr in with_minus: assert _check_odd_rewrite(func, expr) assert _check_no_rewrite(func, a*b) assert func( x - y) == -func(y - x) # it doesn't matter which form is canonical def test_issue_4547(): assert sin(x).rewrite(cot) == 2*cot(x/2)/(1 + cot(x/2)**2) assert cos(x).rewrite(cot) == -(1 - cot(x/2)**2)/(1 + cot(x/2)**2) assert tan(x).rewrite(cot) == 1/cot(x) assert cot(x).fdiff() == -1 - cot(x)**2 def test_as_leading_term_issue_5272(): assert sin(x).as_leading_term(x) == x assert cos(x).as_leading_term(x) == 1 assert tan(x).as_leading_term(x) == x assert cot(x).as_leading_term(x) == 1/x assert asin(x).as_leading_term(x) == x assert acos(x).as_leading_term(x) == x assert atan(x).as_leading_term(x) == x assert acot(x).as_leading_term(x) == x def test_leading_terms(): for func in [sin, cos, tan, cot, asin, acos, atan, acot]: for arg in (1/x, S.Half): eq = func(arg) assert eq.as_leading_term(x) == eq def test_atan2_expansion(): assert cancel(atan2(x**2, x + 1).diff(x) - atan(x**2/(x + 1)).diff(x)) == 0 assert cancel(atan(y/x).series(y, 0, 5) - atan2(y, x).series(y, 0, 5) + atan2(0, x) - atan(0)) == O(y**5) assert cancel(atan(y/x).series(x, 1, 4) - atan2(y, x).series(x, 1, 4) + atan2(y, 1) - atan(y)) == O((x - 1)**4, (x, 1)) assert cancel(atan((y + x)/x).series(x, 1, 3) - atan2(y + x, x).series(x, 1, 3) + atan2(1 + y, 1) - atan(1 + y)) == O((x - 1)**3, (x, 1)) assert Matrix([atan2(y, x)]).jacobian([y, x]) == \ Matrix([[x/(y**2 + x**2), -y/(y**2 + x**2)]]) def test_aseries(): def t(n, v, d, e): assert abs( n(1/v).evalf() - n(1/x).series(x, dir=d).removeO().subs(x, v)) < e t(atan, 0.1, '+', 1e-5) t(atan, -0.1, '-', 1e-5) t(acot, 0.1, '+', 1e-5) t(acot, -0.1, '-', 1e-5) def test_issue_4420(): i = Symbol('i', integer=True) e = Symbol('e', even=True) o = Symbol('o', odd=True) assert cos(4*i*pi) == 1 assert sin(4*i*pi) == 0 assert tan(4*i*pi) == 0 assert cot(4*i*pi) is zoo assert cos(3*i*pi) == cos(pi*i) assert sin(3*i*pi) == 0 assert tan(3*i*pi) == 0 assert cot(3*i*pi) is zoo assert cos(4.0*i*pi) == 1 assert sin(4.0*i*pi) == 0 assert tan(4.0*i*pi) == 0 assert cot(4.0*i*pi) is zoo assert cos(3.0*i*pi) == cos(pi*i) assert sin(3.0*i*pi) == 0 assert tan(3.0*i*pi) == 0 assert cot(3.0*i*pi) is zoo assert cos(4.5*i*pi) == cos(0.5*pi*i) assert sin(4.5*i*pi) == sin(0.5*pi*i) assert tan(4.5*i*pi) == tan(0.5*pi*i) assert cot(4.5*i*pi) == cot(0.5*pi*i) assert cos(4*e*pi) == 1 assert sin(4*e*pi) == 0 assert tan(4*e*pi) == 0 assert cot(4*e*pi) is zoo assert cos(3*e*pi) == 1 assert sin(3*e*pi) == 0 assert tan(3*e*pi) == 0 assert cot(3*e*pi) is zoo assert cos(4.0*e*pi) == 1 assert sin(4.0*e*pi) == 0 assert tan(4.0*e*pi) == 0 assert cot(4.0*e*pi) is zoo assert cos(3.0*e*pi) == 1 assert sin(3.0*e*pi) == 0 assert tan(3.0*e*pi) == 0 assert cot(3.0*e*pi) is zoo assert cos(4.5*e*pi) == cos(0.5*pi*e) assert sin(4.5*e*pi) == sin(0.5*pi*e) assert tan(4.5*e*pi) == tan(0.5*pi*e) assert cot(4.5*e*pi) == cot(0.5*pi*e) assert cos(4*o*pi) == 1 assert sin(4*o*pi) == 0 assert tan(4*o*pi) == 0 assert cot(4*o*pi) is zoo assert cos(3*o*pi) == -1 assert sin(3*o*pi) == 0 assert tan(3*o*pi) == 0 assert cot(3*o*pi) is zoo assert cos(4.0*o*pi) == 1 assert sin(4.0*o*pi) == 0 assert tan(4.0*o*pi) == 0 assert cot(4.0*o*pi) is zoo assert cos(3.0*o*pi) == -1 assert sin(3.0*o*pi) == 0 assert tan(3.0*o*pi) == 0 assert cot(3.0*o*pi) is zoo assert cos(4.5*o*pi) == cos(0.5*pi*o) assert sin(4.5*o*pi) == sin(0.5*pi*o) assert tan(4.5*o*pi) == tan(0.5*pi*o) assert cot(4.5*o*pi) == cot(0.5*pi*o) assert cos(4*x*pi) == cos(4*pi*x) assert sin(4*x*pi) == sin(4*pi*x) assert tan(4*x*pi) == tan(4*pi*x) assert cot(4*x*pi) == cot(4*pi*x) assert cos(3*x*pi) == cos(3*pi*x) assert sin(3*x*pi) == sin(3*pi*x) assert tan(3*x*pi) == tan(3*pi*x) assert cot(3*x*pi) == cot(3*pi*x) assert cos(4.0*x*pi) == cos(4.0*pi*x) assert sin(4.0*x*pi) == sin(4.0*pi*x) assert tan(4.0*x*pi) == tan(4.0*pi*x) assert cot(4.0*x*pi) == cot(4.0*pi*x) assert cos(3.0*x*pi) == cos(3.0*pi*x) assert sin(3.0*x*pi) == sin(3.0*pi*x) assert tan(3.0*x*pi) == tan(3.0*pi*x) assert cot(3.0*x*pi) == cot(3.0*pi*x) assert cos(4.5*x*pi) == cos(4.5*pi*x) assert sin(4.5*x*pi) == sin(4.5*pi*x) assert tan(4.5*x*pi) == tan(4.5*pi*x) assert cot(4.5*x*pi) == cot(4.5*pi*x) def test_inverses(): raises(AttributeError, lambda: sin(x).inverse()) raises(AttributeError, lambda: cos(x).inverse()) assert tan(x).inverse() == atan assert cot(x).inverse() == acot raises(AttributeError, lambda: csc(x).inverse()) raises(AttributeError, lambda: sec(x).inverse()) assert asin(x).inverse() == sin assert acos(x).inverse() == cos assert atan(x).inverse() == tan assert acot(x).inverse() == cot def test_real_imag(): a, b = symbols('a b', real=True) z = a + b*I for deep in [True, False]: assert sin( z).as_real_imag(deep=deep) == (sin(a)*cosh(b), cos(a)*sinh(b)) assert cos( z).as_real_imag(deep=deep) == (cos(a)*cosh(b), -sin(a)*sinh(b)) assert tan(z).as_real_imag(deep=deep) == (sin(2*a)/(cos(2*a) + cosh(2*b)), sinh(2*b)/(cos(2*a) + cosh(2*b))) assert cot(z).as_real_imag(deep=deep) == (-sin(2*a)/(cos(2*a) - cosh(2*b)), -sinh(2*b)/(cos(2*a) - cosh(2*b))) assert sin(a).as_real_imag(deep=deep) == (sin(a), 0) assert cos(a).as_real_imag(deep=deep) == (cos(a), 0) assert tan(a).as_real_imag(deep=deep) == (tan(a), 0) assert cot(a).as_real_imag(deep=deep) == (cot(a), 0) @XFAIL def test_sin_cos_with_infinity(): assert sin(oo) is S.NaN assert cos(oo) is S.NaN @slow def test_sincos_rewrite_sqrt(): for p in [1, 3, 5, 17]: for t in [1, 8]: n = t*p # not belonging to an `m`-gon for `m < n`(`gcd(i, n) == 1`). # For large `n` this makes the test too slow, therefore # the vertices are limited to those of index `i < 10`. for i in range(1, min((n + 1)//2 + 1, 10)): if 1 == gcd(i, n): x = i*pi/n s1 = sin(x).rewrite(sqrt) c1 = cos(x).rewrite(sqrt) assert not s1.has(cos, sin), "fails for %d*pi/%d" % (i, n) assert not c1.has(cos, sin), "fails for %d*pi/%d" % (i, n) assert 1e-3 > abs(sin(x.evalf(5)) - s1.evalf(2)), "fails for %d*pi/%d" % (i, n) assert 1e-3 > abs(cos(x.evalf(5)) - c1.evalf(2)), "fails for %d*pi/%d" % (i, n) assert cos(pi/14).rewrite(sqrt) == sqrt(cos(pi/7)/2 + S.Half) assert cos(pi/257).rewrite(sqrt).evalf(64) == cos(pi/257).evalf(64) assert cos(pi*Rational(-15, 2)/11, evaluate=False).rewrite( sqrt) == -sqrt(-cos(pi*Rational(4, 11))/2 + S.Half) assert cos(Mul(2, pi, S.Half, evaluate=False), evaluate=False).rewrite( sqrt) == -1 e = cos(pi/3/17) # don't use pi/15 since that is caught at instantiation a = ( -3*sqrt(-sqrt(17) + 17)*sqrt(sqrt(17) + 17)/64 - 3*sqrt(34)*sqrt(sqrt(17) + 17)/128 - sqrt(sqrt(17) + 17)*sqrt(-8*sqrt(2)*sqrt(sqrt(17) + 17) - sqrt(2)*sqrt(-sqrt(17) + 17) + sqrt(34)*sqrt(-sqrt(17) + 17) + 6*sqrt(17) + 34)/64 - sqrt(-sqrt(17) + 17)*sqrt(-8*sqrt(2)*sqrt(sqrt(17) + 17) - sqrt(2)*sqrt(-sqrt(17) + 17) + sqrt(34)*sqrt(-sqrt(17) + 17) + 6*sqrt(17) + 34)/128 - Rational(1, 32) + sqrt(2)*sqrt(-8*sqrt(2)*sqrt(sqrt(17) + 17) - sqrt(2)*sqrt(-sqrt(17) + 17) + sqrt(34)*sqrt(-sqrt(17) + 17) + 6*sqrt(17) + 34)/64 + 3*sqrt(2)*sqrt(sqrt(17) + 17)/128 + sqrt(34)*sqrt(-sqrt(17) + 17)/128 + 13*sqrt(2)*sqrt(-sqrt(17) + 17)/128 + sqrt(17)*sqrt(-sqrt(17) + 17)*sqrt(-8*sqrt(2)*sqrt(sqrt(17) + 17) - sqrt(2)*sqrt(-sqrt(17) + 17) + sqrt(34)*sqrt(-sqrt(17) + 17) + 6*sqrt(17) + 34)/128 + 5*sqrt(17)/32 + sqrt(3)*sqrt(-sqrt(2)*sqrt(sqrt(17) + 17)*sqrt(sqrt(17)/32 + sqrt(2)*sqrt(-sqrt(17) + 17)/32 + sqrt(2)*sqrt(-8*sqrt(2)*sqrt(sqrt(17) + 17) - sqrt(2)*sqrt(-sqrt(17) + 17) + sqrt(34)*sqrt(-sqrt(17) + 17) + 6*sqrt(17) + 34)/32 + Rational(15, 32))/8 - 5*sqrt(2)*sqrt(sqrt(17)/32 + sqrt(2)*sqrt(-sqrt(17) + 17)/32 + sqrt(2)*sqrt(-8*sqrt(2)*sqrt(sqrt(17) + 17) - sqrt(2)*sqrt(-sqrt(17) + 17) + sqrt(34)*sqrt(-sqrt(17) + 17) + 6*sqrt(17) + 34)/32 + Rational(15, 32))*sqrt(-8*sqrt(2)*sqrt(sqrt(17) + 17) - sqrt(2)*sqrt(-sqrt(17) + 17) + sqrt(34)*sqrt(-sqrt(17) + 17) + 6*sqrt(17) + 34)/64 - 3*sqrt(2)*sqrt(-sqrt(17) + 17)*sqrt(sqrt(17)/32 + sqrt(2)*sqrt(-sqrt(17) + 17)/32 + sqrt(2)*sqrt(-8*sqrt(2)*sqrt(sqrt(17) + 17) - sqrt(2)*sqrt(-sqrt(17) + 17) + sqrt(34)*sqrt(-sqrt(17) + 17) + 6*sqrt(17) + 34)/32 + Rational(15, 32))/32 + sqrt(34)*sqrt(sqrt(17)/32 + sqrt(2)*sqrt(-sqrt(17) + 17)/32 + sqrt(2)*sqrt(-8*sqrt(2)*sqrt(sqrt(17) + 17) - sqrt(2)*sqrt(-sqrt(17) + 17) + sqrt(34)*sqrt(-sqrt(17) + 17) + 6*sqrt(17) + 34)/32 + Rational(15, 32))*sqrt(-8*sqrt(2)*sqrt(sqrt(17) + 17) - sqrt(2)*sqrt(-sqrt(17) + 17) + sqrt(34)*sqrt(-sqrt(17) + 17) + 6*sqrt(17) + 34)/64 + sqrt(sqrt(17)/32 + sqrt(2)*sqrt(-sqrt(17) + 17)/32 + sqrt(2)*sqrt(-8*sqrt(2)*sqrt(sqrt(17) + 17) - sqrt(2)*sqrt(-sqrt(17) + 17) + sqrt(34)*sqrt(-sqrt(17) + 17) + 6*sqrt(17) + 34)/32 + Rational(15, 32))/2 + S.Half + sqrt(-sqrt(17) + 17)*sqrt(sqrt(17)/32 + sqrt(2)*sqrt(-sqrt(17) + 17)/32 + sqrt(2)*sqrt(-8*sqrt(2)*sqrt(sqrt(17) + 17) - sqrt(2)*sqrt(-sqrt(17) + 17) + sqrt(34)*sqrt(-sqrt(17) + 17) + 6*sqrt(17) + 34)/32 + Rational(15, 32))*sqrt(-8*sqrt(2)*sqrt(sqrt(17) + 17) - sqrt(2)*sqrt(-sqrt(17) + 17) + sqrt(34)*sqrt(-sqrt(17) + 17) + 6*sqrt(17) + 34)/32 + sqrt(34)*sqrt(-sqrt(17) + 17)*sqrt(sqrt(17)/32 + sqrt(2)*sqrt(-sqrt(17) + 17)/32 + sqrt(2)*sqrt(-8*sqrt(2)*sqrt(sqrt(17) + 17) - sqrt(2)*sqrt(-sqrt(17) + 17) + sqrt(34)*sqrt(-sqrt(17) + 17) + 6*sqrt(17) + 34)/32 + Rational(15, 32))/32)/2) assert e.rewrite(sqrt) == a assert e.n() == a.n() assert cos(pi/9/17).rewrite(sqrt) == \ sin(pi/9)*sin(pi*Rational(2, 17)) + cos(pi/9)*cos(pi*Rational(2, 17)) @slow def test_tancot_rewrite_sqrt(): for p in [1, 3, 5, 17]: for t in [1, 8]: n = t*p for i in range(1, min((n + 1)//2 + 1, 10)): if 1 == gcd(i, n): x = i*pi/n if 2*i != n and 3*i != 2*n: t1 = tan(x).rewrite(sqrt) assert not t1.has(cot, tan), "fails for %d*pi/%d" % (i, n) assert 1e-3 > abs( tan(x.evalf(7)) - t1.evalf(4) ), "fails for %d*pi/%d" % (i, n) if i != 0 and i != n: c1 = cot(x).rewrite(sqrt) assert not c1.has(cot, tan), "fails for %d*pi/%d" % (i, n) assert 1e-3 > abs( cot(x.evalf(7)) - c1.evalf(4) ), "fails for %d*pi/%d" % (i, n) def test_sec(): x = symbols('x', real=True) z = symbols('z') assert sec.nargs == FiniteSet(1) assert sec(zoo) is nan assert sec(0) == 1 assert sec(pi) == -1 assert sec(pi/2) is zoo assert sec(-pi/2) is zoo assert sec(pi/6) == 2*sqrt(3)/3 assert sec(pi/3) == 2 assert sec(pi*Rational(5, 2)) is zoo assert sec(pi*Rational(9, 7)) == -sec(pi*Rational(2, 7)) assert sec(pi*Rational(3, 4)) == -sqrt(2) assert sec(I) == 1/cosh(1) assert sec(x*I) == 1/cosh(x) assert sec(-x) == sec(x) assert sec(asec(x)) == x assert sec(z).conjugate() == sec(conjugate(z)) assert (sec(z).as_real_imag() == (cos(re(z))*cosh(im(z))/(sin(re(z))**2*sinh(im(z))**2 + cos(re(z))**2*cosh(im(z))**2), sin(re(z))*sinh(im(z))/(sin(re(z))**2*sinh(im(z))**2 + cos(re(z))**2*cosh(im(z))**2))) assert sec(x).expand(trig=True) == 1/cos(x) assert sec(2*x).expand(trig=True) == 1/(2*cos(x)**2 - 1) assert sec(x).is_extended_real == True assert sec(z).is_real == None assert sec(a).is_algebraic is None assert sec(na).is_algebraic is False assert sec(x).as_leading_term() == sec(x) assert sec(0).is_finite == True assert sec(x).is_finite == None assert sec(pi/2).is_finite == False assert series(sec(x), x, x0=0, n=6) == 1 + x**2/2 + 5*x**4/24 + O(x**6) assert series(sqrt(sec(x))) == 1 + x**2/4 + 7*x**4/96 + O(x**6) assert (series(sqrt(sec(x)), x, x0=pi*3/2, n=4) == 1/sqrt(x - pi*Rational(3, 2)) + (x - pi*Rational(3, 2))**Rational(3, 2)/12 + (x - pi*Rational(3, 2))**Rational(7, 2)/160 + O((x - pi*Rational(3, 2))**4, (x, pi*Rational(3, 2)))) assert sec(x).diff(x) == tan(x)*sec(x) assert sec(z).taylor_term(4, z) == 5*z**4/24 assert sec(z).taylor_term(6, z) == 61*z**6/720 assert sec(z).taylor_term(5, z) == 0 def test_sec_rewrite(): assert sec(x).rewrite(exp) == 1/(exp(I*x)/2 + exp(-I*x)/2) assert sec(x).rewrite(cos) == 1/cos(x) assert sec(x).rewrite(tan) == (tan(x/2)**2 + 1)/(-tan(x/2)**2 + 1) assert sec(x).rewrite(pow) == sec(x) assert sec(x).rewrite(sqrt) == sec(x) assert sec(z).rewrite(cot) == (cot(z/2)**2 + 1)/(cot(z/2)**2 - 1) assert sec(x).rewrite(sin) == 1 / sin(x + pi / 2, evaluate=False) assert sec(x).rewrite(tan) == (tan(x / 2)**2 + 1) / (-tan(x / 2)**2 + 1) assert sec(x).rewrite(csc) == csc(-x + pi/2, evaluate=False) def test_sec_fdiff(): assert sec(x).fdiff() == tan(x)*sec(x) raises(ArgumentIndexError, lambda: sec(x).fdiff(2)) def test_csc(): x = symbols('x', real=True) z = symbols('z') cosecant = csc('x') alternate = 1/sin('x') assert cosecant.equals(alternate) == True assert alternate.equals(cosecant) == True assert csc.nargs == FiniteSet(1) assert csc(0) is zoo assert csc(pi) is zoo assert csc(zoo) is nan assert csc(pi/2) == 1 assert csc(-pi/2) == -1 assert csc(pi/6) == 2 assert csc(pi/3) == 2*sqrt(3)/3 assert csc(pi*Rational(5, 2)) == 1 assert csc(pi*Rational(9, 7)) == -csc(pi*Rational(2, 7)) assert csc(pi*Rational(3, 4)) == sqrt(2) assert csc(I) == -I/sinh(1) assert csc(x*I) == -I/sinh(x) assert csc(-x) == -csc(x) assert csc(acsc(x)) == x assert csc(z).conjugate() == csc(conjugate(z)) assert (csc(z).as_real_imag() == (sin(re(z))*cosh(im(z))/(sin(re(z))**2*cosh(im(z))**2 + cos(re(z))**2*sinh(im(z))**2), -cos(re(z))*sinh(im(z))/(sin(re(z))**2*cosh(im(z))**2 + cos(re(z))**2*sinh(im(z))**2))) assert csc(x).expand(trig=True) == 1/sin(x) assert csc(2*x).expand(trig=True) == 1/(2*sin(x)*cos(x)) assert csc(x).is_extended_real == True assert csc(z).is_real == None assert csc(a).is_algebraic is None assert csc(na).is_algebraic is False assert csc(x).as_leading_term() == csc(x) assert csc(0).is_finite == False assert csc(x).is_finite == None assert csc(pi/2).is_finite == True assert series(csc(x), x, x0=pi/2, n=6) == \ 1 + (x - pi/2)**2/2 + 5*(x - pi/2)**4/24 + O((x - pi/2)**6, (x, pi/2)) assert series(csc(x), x, x0=0, n=6) == \ 1/x + x/6 + 7*x**3/360 + 31*x**5/15120 + O(x**6) assert csc(x).diff(x) == -cot(x)*csc(x) assert csc(x).taylor_term(2, x) == 0 assert csc(x).taylor_term(3, x) == 7*x**3/360 assert csc(x).taylor_term(5, x) == 31*x**5/15120 raises(ArgumentIndexError, lambda: csc(x).fdiff(2)) def test_asec(): z = Symbol('z', zero=True) assert asec(z) is zoo assert asec(nan) is nan assert asec(1) == 0 assert asec(-1) == pi assert asec(oo) == pi/2 assert asec(-oo) == pi/2 assert asec(zoo) == pi/2 assert asec(sec(pi*Rational(13, 4))) == pi*Rational(3, 4) assert asec(1 + sqrt(5)) == pi*Rational(2, 5) assert asec(2/sqrt(3)) == pi/6 assert asec(sqrt(4 - 2*sqrt(2))) == pi/8 assert asec(-sqrt(4 + 2*sqrt(2))) == pi*Rational(5, 8) assert asec(sqrt(2 + 2*sqrt(5)/5)) == pi*Rational(3, 10) assert asec(-sqrt(2 + 2*sqrt(5)/5)) == pi*Rational(7, 10) assert asec(sqrt(2) - sqrt(6)) == pi*Rational(11, 12) assert asec(x).diff(x) == 1/(x**2*sqrt(1 - 1/x**2)) assert asec(x).as_leading_term(x) == log(x) assert asec(x).rewrite(log) == I*log(sqrt(1 - 1/x**2) + I/x) + pi/2 assert asec(x).rewrite(asin) == -asin(1/x) + pi/2 assert asec(x).rewrite(acos) == acos(1/x) assert asec(x).rewrite(atan) == (2*atan(x + sqrt(x**2 - 1)) - pi/2)*sqrt(x**2)/x assert asec(x).rewrite(acot) == (2*acot(x - sqrt(x**2 - 1)) - pi/2)*sqrt(x**2)/x assert asec(x).rewrite(acsc) == -acsc(x) + pi/2 raises(ArgumentIndexError, lambda: asec(x).fdiff(2)) def test_asec_is_real(): assert asec(S.Half).is_real is False n = Symbol('n', positive=True, integer=True) assert asec(n).is_extended_real is True assert asec(x).is_real is None assert asec(r).is_real is None t = Symbol('t', real=False, finite=True) assert asec(t).is_real is False def test_acsc(): assert acsc(nan) is nan assert acsc(1) == pi/2 assert acsc(-1) == -pi/2 assert acsc(oo) == 0 assert acsc(-oo) == 0 assert acsc(zoo) == 0 assert acsc(0) is zoo assert acsc(csc(3)) == -3 + pi assert acsc(csc(4)) == -4 + pi assert acsc(csc(6)) == 6 - 2*pi assert unchanged(acsc, csc(x)) assert unchanged(acsc, sec(x)) assert acsc(2/sqrt(3)) == pi/3 assert acsc(csc(pi*Rational(13, 4))) == -pi/4 assert acsc(sqrt(2 + 2*sqrt(5)/5)) == pi/5 assert acsc(-sqrt(2 + 2*sqrt(5)/5)) == -pi/5 assert acsc(-2) == -pi/6 assert acsc(-sqrt(4 + 2*sqrt(2))) == -pi/8 assert acsc(sqrt(4 - 2*sqrt(2))) == pi*Rational(3, 8) assert acsc(1 + sqrt(5)) == pi/10 assert acsc(sqrt(2) - sqrt(6)) == pi*Rational(-5, 12) assert acsc(x).diff(x) == -1/(x**2*sqrt(1 - 1/x**2)) assert acsc(x).as_leading_term(x) == log(x) assert acsc(x).rewrite(log) == -I*log(sqrt(1 - 1/x**2) + I/x) assert acsc(x).rewrite(asin) == asin(1/x) assert acsc(x).rewrite(acos) == -acos(1/x) + pi/2 assert acsc(x).rewrite(atan) == (-atan(sqrt(x**2 - 1)) + pi/2)*sqrt(x**2)/x assert acsc(x).rewrite(acot) == (-acot(1/sqrt(x**2 - 1)) + pi/2)*sqrt(x**2)/x assert acsc(x).rewrite(asec) == -asec(x) + pi/2 raises(ArgumentIndexError, lambda: acsc(x).fdiff(2)) def test_csc_rewrite(): assert csc(x).rewrite(pow) == csc(x) assert csc(x).rewrite(sqrt) == csc(x) assert csc(x).rewrite(exp) == 2*I/(exp(I*x) - exp(-I*x)) assert csc(x).rewrite(sin) == 1/sin(x) assert csc(x).rewrite(tan) == (tan(x/2)**2 + 1)/(2*tan(x/2)) assert csc(x).rewrite(cot) == (cot(x/2)**2 + 1)/(2*cot(x/2)) assert csc(x).rewrite(cos) == 1/cos(x - pi/2, evaluate=False) assert csc(x).rewrite(sec) == sec(-x + pi/2, evaluate=False) assert csc(1 - exp(-besselj(I, I))).rewrite(cos) == \ -1/cos(-pi/2 - 1 + cos(I*besselj(I, I)) + I*cos(-pi/2 + I*besselj(I, I), evaluate=False), evaluate=False) def test_issue_8653(): n = Symbol('n', integer=True) assert sin(n).is_irrational is None assert cos(n).is_irrational is None assert tan(n).is_irrational is None def test_issue_9157(): n = Symbol('n', integer=True, positive=True) assert atan(n - 1).is_nonnegative is True def test_trig_period(): x, y = symbols('x, y') assert sin(x).period() == 2*pi assert cos(x).period() == 2*pi assert tan(x).period() == pi assert cot(x).period() == pi assert sec(x).period() == 2*pi assert csc(x).period() == 2*pi assert sin(2*x).period() == pi assert cot(4*x - 6).period() == pi/4 assert cos((-3)*x).period() == pi*Rational(2, 3) assert cos(x*y).period(x) == 2*pi/abs(y) assert sin(3*x*y + 2*pi).period(y) == 2*pi/abs(3*x) assert tan(3*x).period(y) is S.Zero raises(NotImplementedError, lambda: sin(x**2).period(x)) def test_issue_7171(): assert sin(x).rewrite(sqrt) == sin(x) assert sin(x).rewrite(pow) == sin(x) def test_issue_11864(): w, k = symbols('w, k', real=True) F = Piecewise((1, Eq(2*pi*k, 0)), (sin(pi*k)/(pi*k), True)) soln = Piecewise((1, Eq(2*pi*k, 0)), (sinc(pi*k), True)) assert F.rewrite(sinc) == soln def test_real_assumptions(): z = Symbol('z', real=False, finite=True) assert sin(z).is_real is None assert cos(z).is_real is None assert tan(z).is_real is False assert sec(z).is_real is None assert csc(z).is_real is None assert cot(z).is_real is False assert asin(p).is_real is None assert asin(n).is_real is None assert asec(p).is_real is None assert asec(n).is_real is None assert acos(p).is_real is None assert acos(n).is_real is None assert acsc(p).is_real is None assert acsc(n).is_real is None assert atan(p).is_positive is True assert atan(n).is_negative is True assert acot(p).is_positive is True assert acot(n).is_negative is True def test_issue_14320(): assert asin(sin(2)) == -2 + pi and (-pi/2 <= -2 + pi <= pi/2) and sin(2) == sin(-2 + pi) assert asin(cos(2)) == -2 + pi/2 and (-pi/2 <= -2 + pi/2 <= pi/2) and cos(2) == sin(-2 + pi/2) assert acos(sin(2)) == -pi/2 + 2 and (0 <= -pi/2 + 2 <= pi) and sin(2) == cos(-pi/2 + 2) assert acos(cos(20)) == -6*pi + 20 and (0 <= -6*pi + 20 <= pi) and cos(20) == cos(-6*pi + 20) assert acos(cos(30)) == -30 + 10*pi and (0 <= -30 + 10*pi <= pi) and cos(30) == cos(-30 + 10*pi) assert atan(tan(17)) == -5*pi + 17 and (-pi/2 < -5*pi + 17 < pi/2) and tan(17) == tan(-5*pi + 17) assert atan(tan(15)) == -5*pi + 15 and (-pi/2 < -5*pi + 15 < pi/2) and tan(15) == tan(-5*pi + 15) assert atan(cot(12)) == -12 + pi*Rational(7, 2) and (-pi/2 < -12 + pi*Rational(7, 2) < pi/2) and cot(12) == tan(-12 + pi*Rational(7, 2)) assert acot(cot(15)) == -5*pi + 15 and (-pi/2 < -5*pi + 15 <= pi/2) and cot(15) == cot(-5*pi + 15) assert acot(tan(19)) == -19 + pi*Rational(13, 2) and (-pi/2 < -19 + pi*Rational(13, 2) <= pi/2) and tan(19) == cot(-19 + pi*Rational(13, 2)) assert asec(sec(11)) == -11 + 4*pi and (0 <= -11 + 4*pi <= pi) and cos(11) == cos(-11 + 4*pi) assert asec(csc(13)) == -13 + pi*Rational(9, 2) and (0 <= -13 + pi*Rational(9, 2) <= pi) and sin(13) == cos(-13 + pi*Rational(9, 2)) assert acsc(csc(14)) == -4*pi + 14 and (-pi/2 <= -4*pi + 14 <= pi/2) and sin(14) == sin(-4*pi + 14) assert acsc(sec(10)) == pi*Rational(-7, 2) + 10 and (-pi/2 <= pi*Rational(-7, 2) + 10 <= pi/2) and cos(10) == sin(pi*Rational(-7, 2) + 10) def test_issue_14543(): assert sec(2*pi + 11) == sec(11) assert sec(2*pi - 11) == sec(11) assert sec(pi + 11) == -sec(11) assert sec(pi - 11) == -sec(11) assert csc(2*pi + 17) == csc(17) assert csc(2*pi - 17) == -csc(17) assert csc(pi + 17) == -csc(17) assert csc(pi - 17) == csc(17) x = Symbol('x') assert csc(pi/2 + x) == sec(x) assert csc(pi/2 - x) == sec(x) assert csc(pi*Rational(3, 2) + x) == -sec(x) assert csc(pi*Rational(3, 2) - x) == -sec(x) assert sec(pi/2 - x) == csc(x) assert sec(pi/2 + x) == -csc(x) assert sec(pi*Rational(3, 2) + x) == csc(x) assert sec(pi*Rational(3, 2) - x) == -csc(x)
true
true
f70126b5191645e11ab83f366d3774b7a8cd4bef
27,829
py
Python
contrib/fsl_integration/sct_plugin.py
kousu-1/spinalcordtoolbox
9b1c2179fe31be489dab7f08c43e9bd5902931c0
[ "MIT" ]
null
null
null
contrib/fsl_integration/sct_plugin.py
kousu-1/spinalcordtoolbox
9b1c2179fe31be489dab7f08c43e9bd5902931c0
[ "MIT" ]
null
null
null
contrib/fsl_integration/sct_plugin.py
kousu-1/spinalcordtoolbox
9b1c2179fe31be489dab7f08c43e9bd5902931c0
[ "MIT" ]
null
null
null
# coding=utf-8 ######################################################################################### # This code provides SCT integration into FSLeyes for the following tools: # # - sct_propseg # - sct_deepseg_gm # - sct_deepseg_sc # - sct_label_vertebrae # - sct_register_to_template # - sct_process_segmentation # - sct_dmri_moco # - sct_dmri_compute_dti # # # --------------------------------------------------------------------------------------- # Copyright (c) 2018 Polytechnique Montreal <www.neuro.polymtl.ca> # Authors: Christian S. Perone, Thiago JR Rezende, Julien Cohen-Adad ########################################################################################## # TODO: add keyboard shortcuts to Run (ctrl+R) # TODO: add help when user leaves cursor on button import os import select import subprocess import signal from threading import Thread import logging import webbrowser import wx import wx.lib.agw.aui as aui import wx.html as html logger = logging.getLogger(__name__) aui_manager = frame.getAuiManager() # from FSLeyes context class ErrorDialog(wx.Dialog): """ Panel to display if there is an error, instructing user what to do. """ def __init__(self, parent, msg=None): wx.Dialog.__init__(self, parent, title="An Error Occurred") self.SetSize((600, 275)) if msg is None: msg = "An error has occurred while running SCT. Please go to the Terminal, copy all the content and paste it as a new issue in SCT's forum: \ http://forum.spinalcordmri.org/" vbox = wx.BoxSizer(wx.VERTICAL) error_msg_box = wx.TextCtrl(self, wx.ID_ANY, size=(500,150), style = wx.TE_MULTILINE|wx.TE_READONLY|wx.HSCROLL) error_msg_box.AppendText(msg) vbox.Add(error_msg_box, 0, wx.TOP|wx.EXPAND, 20) btns = self.CreateSeparatedButtonSizer(wx.OK) vbox.Add(btns, 0, wx.CENTER|wx.ALL, 10) hbox = wx.BoxSizer(wx.HORIZONTAL) save_ico = wx.ArtProvider.GetBitmap(wx.ART_ERROR, wx.ART_TOOLBAR, (50, 50)) img_info = wx.StaticBitmap(self, -1, save_ico, wx.DefaultPosition, (save_ico.GetWidth(), save_ico.GetHeight())) hbox.Add(img_info, 0, wx.ALL, 20) hbox.Add(vbox, 0, wx.ALL, 0) self.SetSizer(hbox) self.Centre() self.CenterOnParent() class ProgressDialog(wx.Dialog): """ Panel to display while running SCT command. """ def __init__(self, parent): self.stop_run = False wx.Dialog.__init__(self, parent, title="SCT Processing") self.SetSize((300, 120)) vbox = wx.BoxSizer(wx.VERTICAL) lbldesc = wx.StaticText(self, id=wx.ID_ANY, label="Processing, please wait...") vbox.Add(lbldesc, 0, wx.ALIGN_CENTER|wx.ALL, 10) stop_button = wx.Button(self, wx.ID_CANCEL, 'Stop') vbox.Add(stop_button, 0, wx.CENTER|wx.ALL, 10) hbox = wx.BoxSizer(wx.HORIZONTAL) # TODO: use a nicer image, showing two gears (similar to ID_EXECUTE) save_ico = wx.ArtProvider.GetBitmap(wx.ART_INFORMATION, wx.ART_TOOLBAR, (50, 50)) img_info = wx.StaticBitmap(self, -1, save_ico, wx.DefaultPosition, (save_ico.GetWidth(), save_ico.GetHeight())) hbox.Add(img_info, 0, wx.ALL, 10) hbox.Add(vbox, 0, wx.ALL, 0) self.SetSizer(hbox) self.Centre() self.CenterOnParent() stop_button.Bind(wx.EVT_BUTTON, self.OnStop) def OnStop(self, event): print(f"Stop was pressed. event={event}") self.stop_run = True self.Destroy() class SCTCallThread(Thread): def __init__(self, command, text_window_ctrl): Thread.__init__(self) self.command = [command] self.status = None self.stdout = "" self.stderr = "" self.text_window = text_window_ctrl def sct_call(self, command): env = os.environ.copy() if 'PYTHONHOME' in env: del env["PYTHONHOME"] if 'PYTHONPATH' in env: del env["PYTHONPATH"] proc = subprocess.Popen(self.command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, env=env) self.p = proc stdout_fd = proc.stdout.fileno() stderr_fd = proc.stderr.fileno() os.set_blocking(stdout_fd, False) os.set_blocking(stderr_fd, False) while proc.poll() is None: timeout = 1 rs = [ proc.stdout, proc.stderr ] ws = [] xs = [] rs, ws, xs = select.select(rs, ws, xs, timeout) for r in rs: msg = None if r is proc.stdout: msg = os.read(stdout_fd, 1024) if msg: self.stdout += msg.decode('utf-8') elif r is proc.stderr: msg = os.read(stderr_fd, 1024) if msg: self.stderr += msg.decode('utf-8') if msg: wx.CallAfter(self.text_window.WriteText, msg) return proc.returncode, self.stdout, self.stderr def sct_interrupt(self): if self.p: self.p.send_signal(signal.SIGINT) else: print("No process running?") def run(self): """ overrides Thread.run() function :return: """ self.status, self.stdout, self.stderr = self.sct_call(self.command) class TextBox: """ Create a horizontal box composed of a button (left) and a text box (right). When the button is pressed, the file name highlighted in the list of overlay is fetched and passed into the text box. This file name can be accessed by: TextBox.textctrl.GetValue() """ def __init__(self, sctpanel, label=""): """ :param sctpanel: SCTPanel Class :param label: Label to display on the button """ self.textctrl = wx.TextCtrl(sctpanel) self.hbox = wx.BoxSizer(wx.HORIZONTAL) button_fetch_file = wx.Button(sctpanel, -1, label=label) button_fetch_file.Bind(wx.EVT_BUTTON, self.get_highlighted_file_name) self.hbox.Add(button_fetch_file, 0, wx.ALIGN_LEFT| wx.RIGHT, 10) self.hbox.Add(self.textctrl, 1, wx.ALIGN_LEFT|wx.LEFT, 10) def get_highlighted_file_name(self, event): """ Fetch path to file highlighted in the Overlay list. """ selected_overlay = displayCtx.getSelectedOverlay() # displayCtx is a class from FSLeyes filename_path = selected_overlay.dataSource print("Fetched file name: {}".format(filename_path)) self.textctrl.SetValue(filename_path) def get_file_name(self): return self.textctrl.GetValue() # Creates the standard panel for each tool class SCTPanel(wx.Panel): """ Creates the standard panel for each tool :param sizer_h: Main wx.BoxSizer object that encloses SCT information, for each panel """ DESCRIPTION_SCT = """ <br><br><b>General citation (please always cite)</b>:<br> De Leener B, Levy S, Dupont SM, Fonov VS, Stikov N, Louis Collins D, Callot V, Cohen-Adad J. <i>SCT: Spinal Cord Toolbox, an open-source software for processing spinal cord MRI data</i>. Neuroimage. 2017 Jan 15;145(Pt A):24-43. """ SCT_DIR_ENV = 'SCT_DIR' SCT_LOGO_REL_PATH = 'documentation/imgs/logo_sct_small.png' SCT_TUTORIAL_PATH = 'documentation/Manual_v1_SCT.pdf' # TODO: fix this path def __init__(self, parent, id_): super(SCTPanel, self).__init__(parent=parent, id=id_) # main layout consists of one row with 3 main columns self.main_row = wx.BoxSizer(wx.HORIZONTAL) self.column_left = wx.BoxSizer(wx.VERTICAL) self.column_center = wx.BoxSizer(wx.VERTICAL) self.column_right = wx.BoxSizer(wx.VERTICAL) sct_logo = self.get_logo() logo_help_hbox = wx.BoxSizer(wx.HORIZONTAL) logo_help_hbox.Add(sct_logo, 1, wx.HORIZONTAL, 5) button_help = wx.Button(self, id=id_, label="Help") button_help.Bind(wx.EVT_BUTTON, self.help_url) logo_help_hbox.Add(button_help, 0, wx.ALIGN_BOTTOM|wx.LEFT, 90) self.column_left.Add(logo_help_hbox, proportion=0, flag=wx.ALL, border=5) html_desc_window = self.get_description() self.column_left.Add(html_desc_window, 0, wx.ALL, 5) self.log_window = wx.TextCtrl(self, wx.ID_ANY, size=(100, 300), style = wx.TE_MULTILINE|wx.TE_READONLY|wx.HSCROLL) self.column_right.Add(self.log_window, 1, wx.EXPAND|wx.ALL, 5) self.main_row.Add(self.column_left, 0, wx.ALL, 10) self.main_row.Add(self.column_center, 1, wx.ALL, 10) self.main_row.Add(self.column_right, 1, wx.ALL, 10) self.SetSizerAndFit(self.main_row) def log_to_window(self, msg, level=None): if level is None: self.log_window.AppendText("{}\n".format(msg)) else: self.log_window.AppendText("{}: {}\n".format(level, msg)) def tutorial(self,event): pdfpath = os.path.join(os.environ[self.SCT_DIR_ENV],self.SCT_TUTORIAL_PATH) print('PDF path:', pdfpath) cmd_line = "open {}".format(pdfpath) print('Command line:', cmd_line) self.call_sct_command(cmd_line) def help_url(self, event): url = "http://forum.spinalcordmri.org/c/sct" webbrowser.open(url) def get_logo(self): logo_file = os.path.join(os.environ[self.SCT_DIR_ENV], self.SCT_LOGO_REL_PATH) png = wx.Image(logo_file, wx.BITMAP_TYPE_ANY).ConvertToBitmap() img_logo = wx.StaticBitmap(self, -1, png, wx.DefaultPosition, (png.GetWidth(), png.GetHeight())) return img_logo def get_description(self): txt_style = wx.VSCROLL | \ wx.HSCROLL | wx.TE_READONLY | \ wx.BORDER_SIMPLE htmlw = html.HtmlWindow(self, wx.ID_ANY, size=(400, 220), style=txt_style) htmlw.SetPage(self.DESCRIPTION + self.DESCRIPTION_SCT) htmlw.SetStandardFonts(size=10, normal_face="Noto Sans") return htmlw def call_sct_command(self, command): self.log_to_window("Running: {}".format(command), level="INFO") progress_dialog = ProgressDialog(frame) progress_dialog.Show() thr = SCTCallThread(command, self.log_window) thr.start() # No access to app.pending() from here while True: thr.join(0.1) wx.Yield() if not thr.isAlive(): break if progress_dialog.stop_run: thr.sct_interrupt() thr.join() self.log_to_window("Command completed.", level="INFO") if progress_dialog: progress_dialog.Destroy() # show stderr output if an error occurred if thr.status: self.log_to_window("An error occurred", level="ERROR") error_dialog = ErrorDialog(frame, msg=thr.stderr) error_dialog.Show() class TabPanelPropSeg(SCTPanel): """ sct_propseg """ DESCRIPTION = """ <b>Function description</b>:<br> Segment the spinal cord using a deformable 3D mesh. This method is fast and robust, but could be prone to "leaking" if the contrast between the cord and the CSF is not high enough. <br><br> <b>Usage</b>: <br> Select an image from the overlay list, then click the "Input file" button to fetch the file name. Then, select the appropriate contrast and click "Run". For more options, please use the Terminal version of this function. <br><br> <b>Specific citation</b>: <br> De Leener et al. <i>Robust, accurate and fast automatic segmentation of the spinal cord.</i> Neuroimage 2014 """ def __init__(self, parent): super(TabPanelPropSeg, self).__init__(parent=parent, id_=wx.ID_ANY) self.hbox_filein = TextBox(self, label="Input file") lbl_contrasts = ['t1', 't2', 't2s', 'dwi'] self.rbox_contrast = wx.RadioBox(self, label='Select contrast:', choices=lbl_contrasts, majorDimension=1, style=wx.RA_SPECIFY_ROWS) button_run = wx.Button(self, id=wx.ID_ANY, label="Run") button_run.Bind(wx.EVT_BUTTON, self.on_button_run) self.column_center.Add(self.hbox_filein.hbox, 0, wx.EXPAND|wx.ALL, 5) self.column_center.Add(self.rbox_contrast, 0, wx.ALL, 5) self.column_center.Add(button_run, 0, wx.ALL, 5) def on_button_run(self, event): # Build and run SCT command fname_input = self.hbox_filein.get_file_name() if not fname_input: msg = "No input file selected! Select a file from the overlay list and then press Input file." self.log_to_window(msg, level="ERROR") error_dialog = ErrorDialog(frame, msg=msg) error_dialog.Show() return contrast = self.rbox_contrast.GetStringSelection() base_name = os.path.basename(fname_input) fname, fext = base_name.split(os.extsep, 1) fname_out = "{}_seg.{}".format(fname, fext) cmd_line = "sct_propseg -i {} -c {}".format(fname_input, contrast) self.call_sct_command(cmd_line) # Add output to the list of overlay image = Image(fname_out) # <class 'fsl.data.image.Image'> overlayList.append(image) opts = displayCtx.getOpts(image) opts.cmap = 'red' class TabPanelSCSeg(SCTPanel): """ sct_deepseg_sc """ DESCRIPTION = """ <b>Function description</b>:<br> Segment the spinal cord using deep learning. The convolutional neural network was trained on ~1,500 subjects from multiple centers, and including various pathologies (compression, MS, ALS, etc.). <br><br> <b>Usage</b>: <br> Select an image from the overlay list, then click the "Input file" button to fetch the file name. Then, select the appropriate contrast and click "Run". For more options, please use the Terminal version of this function. <br><br> <b>Specific citation</b>: <br> Gros et al. <i>Automatic segmentation of the spinal cord and intramedullary multiple sclerosis lesions with convolutional neural networks.</i> Neuroimage 2019 """ def __init__(self, parent): super(TabPanelSCSeg, self).__init__(parent=parent, id_=wx.ID_ANY) self.hbox_filein = TextBox(self, label="Input file") lbl_contrasts = ['t1', 't2', 't2s', 'dwi'] self.rbox_contrast = wx.RadioBox(self, label='Select contrast:', choices=lbl_contrasts, majorDimension=1, style=wx.RA_SPECIFY_ROWS) button_run = wx.Button(self, id=wx.ID_ANY, label="Run") button_run.Bind(wx.EVT_BUTTON, self.on_button_run) self.column_center.Add(self.hbox_filein.hbox, 0, wx.EXPAND|wx.ALL, 5) self.column_center.Add(self.rbox_contrast, 0, wx.ALL, 5) self.column_center.Add(button_run, 0, wx.ALL, 5) def on_button_run(self, event): # Build and run SCT command fname_input = self.hbox_filein.get_file_name() if not fname_input: msg = "No input file selected! Select a file from the overlay list and then press Input file." self.log_to_window(msg, level="ERROR") error_dialog = ErrorDialog(frame, msg=msg) error_dialog.Show() return contrast = self.rbox_contrast.GetStringSelection() base_name = os.path.basename(fname_input) fname, fext = base_name.split(os.extsep, 1) fname_out = "{}_seg.{}".format(fname, fext) cmd_line = "sct_deepseg_sc -i {} -c {}".format(fname_input, contrast) self.call_sct_command(cmd_line) # Add output to the list of overlay image = Image(fname_out) # <class 'fsl.data.image.Image'> overlayList.append(image) opts = displayCtx.getOpts(image) opts.cmap = 'red' class TabPanelGMSeg(SCTPanel): """ sct_deepseg_gm """ DESCRIPTION = """ <b>Function description</b>:<br> Segment the spinal cord gray matter using deep learning. The convolutional neural network features dilated convolutions and was trained on 232 subjects (3963 axial slices) from multiple centers, and including various pathologies (compression, MS, ALS, etc.). <br><br> <b>Usage</b>: <br> Select an image from the overlay list that has a good white and gray matter contrast (e.g., T2*-weighted image), then click "Run". For more options, please use the Terminal version of this function. <br><br> <b>Specific citation</b>: <br> Perone et al. <i>Spinal cord gray matter segmentation using deep dilated convolutions.</i> Sci Rep. 2018 """ def __init__(self, parent): super(TabPanelGMSeg, self).__init__(parent=parent, id_=wx.ID_ANY) self.hbox_filein = TextBox(self, label="Input file") button_run = wx.Button(self, id=wx.ID_ANY, label="Run") button_run.Bind(wx.EVT_BUTTON, self.on_button_run) self.column_center.Add(self.hbox_filein.hbox, 0, wx.EXPAND|wx.ALL, 5) self.column_center.Add(button_run, 0, wx.ALL, 5) def on_button_run(self, event): # Build and run SCT command fname_input = self.hbox_filein.get_file_name() if not fname_input: msg = "No input file selected! Select a file from the overlay list and then press Input file." self.log_to_window(msg, level="ERROR") error_dialog = ErrorDialog(frame, msg=msg) error_dialog.Show() return base_name = os.path.basename(fname_input) fname, fext = base_name.split(os.extsep, 1) fname_out = "{}_gmseg.{}".format(fname, fext) cmd_line = "sct_deepseg_gm -i {} -o {}".format(fname_input, fname_out) self.call_sct_command(cmd_line) # Add output to the list of overlay image = Image(fname_out) # <class 'fsl.data.image.Image'> overlayList.append(image) opts = displayCtx.getOpts(image) opts.cmap = 'yellow' class TabPanelVertLB(SCTPanel): """ sct_label_vertebrae """ DESCRIPTION = """ <b>Function description</b>:<br> Automatically find intervertebral discs and label an input segmentation with vertebral levels. The values on the output labeled segmentation corresponds to the level, e.g., 2 corresponds to C2, 8 corresponds to T1, etc. <br><br> <b>Usage</b>: <br> Select an image from the overlay list where discs are clearly visible (e.g., T1w or T2w scans are usually good for this task). Then, select a segmentation associated with the image, select the appropriate contrast and click "Run". For more options, please use the Terminal version of this function. <br><br> <b>Specific citation</b>: <br> Ullmann et al. <i>Automatic labeling of vertebral levels using a robust template-based approach.</i> Int J Biomed Imaging 2014 """ def __init__(self, parent): super(TabPanelVertLB, self).__init__(parent=parent, id_=wx.ID_ANY) self.hbox_im = TextBox(self, label="Input image") self.hbox_seg = TextBox(self, label="Input segmentation") lbl_contrasts = ['t1', 't2'] self.rbox_contrast = wx.RadioBox(self, label='Select contrast:', choices=lbl_contrasts, majorDimension=1, style=wx.RA_SPECIFY_ROWS) # Run button button_run = wx.Button(self, id=wx.ID_ANY, label="Run") button_run.Bind(wx.EVT_BUTTON, self.on_button_run) self.column_center.Add(self.hbox_im.hbox, 0, wx.EXPAND|wx.ALL, 5) self.column_center.Add(self.hbox_seg.hbox, 0, wx.EXPAND|wx.ALL, 5) self.column_center.Add(self.rbox_contrast, 0, wx.ALL, 5) self.column_center.Add(button_run, 0, wx.ALL, 5) def on_button_run(self, event): # Build and run SCT command fname_im = self.hbox_im.textctrl.GetValue() fname_seg = self.hbox_seg.textctrl.GetValue() fname_im = self.hbox_im.get_file_name() if not fname_im: msg = "No input image selected! Select an image from the overlay list and then press Input image." self.log_to_window(msg, level="ERROR") error_dialog = ErrorDialog(frame, msg=msg) error_dialog.Show() return fname_seg = self.hbox_seg.get_file_name() if not fname_seg: msg = "No input segmentation selected! Select a segmentation file from the overlay list and then press Input segmentation." self.log_to_window(msg, level="ERROR") error_dialog = ErrorDialog(frame, msg=msg) error_dialog.Show() return contrast = self.rbox_contrast.GetStringSelection() base_name = os.path.basename(fname_seg) fname, fext = base_name.split(os.extsep, 1) fname_out = "{}_labeled.{}".format(fname, fext) cmd_line = "sct_label_vertebrae -i {} -s {} -c {}".format(fname_im, fname_seg, contrast) self.call_sct_command(cmd_line) # Add output to the list of overlay image = Image(fname_out) # <class 'fsl.data.image.Image'> overlayList.append(image) opts = displayCtx.getOpts(image) opts.cmap = 'subcortical' class TabPanelRegisterToTemplate(SCTPanel): """ sct_register_to_template """ DESCRIPTION = """ <b>Function description</b>:<br> Register an image with the default PAM50 spinal cord MRI template. <br><br> <b>Usage</b>: <br> Select an image, its segmentation and a label file. The label file contains single-pixel labels located at the posterior edge of the intervertebral discs. The value of the label corresponds to the lower vertebrae, e.g., label 3 corresponds to the C2-C3 disc. This label file can be created within FSLeyes by clicking on Tools > Edit mode, then Edit > Create mask. Select the "pen", adjust the size to one pixel width and select the proper label value, then click on the image and save the label(s): Overlay > save. Then, select the appropriate contrast and click "Run". For more options, please use the Terminal version of this function. <br><br> <b>Specific citation</b>: <br> De Leener et al. <i>PAM50: Unbiased multimodal template of the brainstem and spinal cord aligned with the ICBM152 space.</i> Neuroimage 2017 """ def __init__(self, parent): super(TabPanelRegisterToTemplate, self).__init__(parent=parent, id_=wx.ID_ANY) self.hbox_im = TextBox(self, label="Input image") self.hbox_seg = TextBox(self, label="Input segmentation") self.hbox_label = TextBox(self, label="Input labels") lbl_contrasts = ['t1', 't2'] self.rbox_contrast = wx.RadioBox(self, label='Select contrast:', choices=lbl_contrasts, majorDimension=1, style=wx.RA_SPECIFY_ROWS) button_run = wx.Button(self, id=wx.ID_ANY, label="Run") button_run.Bind(wx.EVT_BUTTON, self.on_button_run) self.column_center.Add(self.hbox_im.hbox, 0, wx.EXPAND|wx.ALL, 5) self.column_center.Add(self.hbox_seg.hbox, 0, wx.EXPAND|wx.ALL, 5) self.column_center.Add(self.hbox_label.hbox, 0, wx.EXPAND|wx.ALL, 5) self.column_center.Add(self.rbox_contrast, 0, wx.ALL, 5) self.column_center.Add(button_run, 0, wx.ALL, 5) def on_button_run(self, event): # Build and run SCT command fname_im = self.hbox_im.textctrl.GetValue() fname_seg = self.hbox_seg.textctrl.GetValue() fname_label = self.hbox_label.textctrl.GetValue() fname_im = self.hbox_im.textctrl.GetValue() fname_seg = self.hbox_seg.textctrl.GetValue() fname_im = self.hbox_im.get_file_name() if not fname_im: msg = "No input image selected! Select an image from the overlay list and then press Input image." self.log_to_window(msg, level="ERROR") error_dialog = ErrorDialog(frame, msg=msg) error_dialog.Show() return fname_seg = self.hbox_seg.get_file_name() if not fname_seg: msg = "No input segmentation selected! Select a segmentation file from the overlay list and then press Input segmentation." self.log_to_window(msg, level="ERROR") error_dialog = ErrorDialog(frame, msg=msg) error_dialog.Show() return fname_label = self.hbox_label.get_file_name() if not fname_label: msg = "No input labels selected! Select input labels from the overlay list and then press Input labels." self.log_to_window(msg, level="ERROR") error_dialog = ErrorDialog(frame, msg=msg) error_dialog.Show() return contrast = self.rbox_contrast.GetStringSelection() cmd_line = \ "sct_register_to_template -i {} -s {} -ldisc {} -c {}".format(fname_im, fname_seg, fname_label, contrast) self.call_sct_command(cmd_line) # Add output to the list of overlay base_name = os.path.basename(fname_im) fname, fext = base_name.split(os.extsep, 1) # TODO: at some point we will modify SCT's function to output the file name below # fname_out = "PAM50_{}_reg.{}".format(contrast, fext) fname_out = 'template2anat.nii.gz' image = Image(fname_out) # <class 'fsl.data.image.Image'> overlayList.append(image) opts = displayCtx.getOpts(image) opts.cmap = 'gray' def run_main(): window = aui_manager.GetManagedWindow() if 'SCT_DIR' not in os.environ: dlg = wx.MessageDialog(window, 'Spinal Cord Toolbox (SCT) was not ' 'found in your system. Make sure you open fsleyes ' 'from the Terminal (not by clicking on the App). ' 'If you are indeed running from the Terminal, please ' 'check the installation procedure at: ' 'https://github.com/neuropoly/spinalcordtoolbox', 'SCT not found!', wx.OK | wx.ICON_INFORMATION) dlg.ShowModal() dlg.Destroy() return # Adding panels notebook = aui.AuiNotebook(parent=window) panel_propseg = TabPanelPropSeg(parent=notebook) panel_sc = TabPanelSCSeg(parent=notebook) panel_gm = TabPanelGMSeg(parent=notebook) panel_vlb = TabPanelVertLB(parent=notebook) panel_reg = TabPanelRegisterToTemplate(parent=notebook) notebook.AddPage(page=panel_propseg, caption="sct_propseg", select=True) notebook.AddPage(page=panel_sc, caption="sct_deepseg_sc", select=False) notebook.AddPage(page=panel_gm, caption="sct_deepseg_gm", select=False) notebook.AddPage(page=panel_vlb, caption="sct_label_vertebrae", select=False) notebook.AddPage(page=panel_reg, caption="sct_register_to_template", select=False) aui_manager.AddPane(notebook, aui.AuiPaneInfo().Name("notebook_content").CenterPane().PaneBorder(False)) aui_manager.Update() run_main()
38.705146
153
0.622839
import os import select import subprocess import signal from threading import Thread import logging import webbrowser import wx import wx.lib.agw.aui as aui import wx.html as html logger = logging.getLogger(__name__) aui_manager = frame.getAuiManager() class ErrorDialog(wx.Dialog): def __init__(self, parent, msg=None): wx.Dialog.__init__(self, parent, title="An Error Occurred") self.SetSize((600, 275)) if msg is None: msg = "An error has occurred while running SCT. Please go to the Terminal, copy all the content and paste it as a new issue in SCT's forum: \ http://forum.spinalcordmri.org/" vbox = wx.BoxSizer(wx.VERTICAL) error_msg_box = wx.TextCtrl(self, wx.ID_ANY, size=(500,150), style = wx.TE_MULTILINE|wx.TE_READONLY|wx.HSCROLL) error_msg_box.AppendText(msg) vbox.Add(error_msg_box, 0, wx.TOP|wx.EXPAND, 20) btns = self.CreateSeparatedButtonSizer(wx.OK) vbox.Add(btns, 0, wx.CENTER|wx.ALL, 10) hbox = wx.BoxSizer(wx.HORIZONTAL) save_ico = wx.ArtProvider.GetBitmap(wx.ART_ERROR, wx.ART_TOOLBAR, (50, 50)) img_info = wx.StaticBitmap(self, -1, save_ico, wx.DefaultPosition, (save_ico.GetWidth(), save_ico.GetHeight())) hbox.Add(img_info, 0, wx.ALL, 20) hbox.Add(vbox, 0, wx.ALL, 0) self.SetSizer(hbox) self.Centre() self.CenterOnParent() class ProgressDialog(wx.Dialog): def __init__(self, parent): self.stop_run = False wx.Dialog.__init__(self, parent, title="SCT Processing") self.SetSize((300, 120)) vbox = wx.BoxSizer(wx.VERTICAL) lbldesc = wx.StaticText(self, id=wx.ID_ANY, label="Processing, please wait...") vbox.Add(lbldesc, 0, wx.ALIGN_CENTER|wx.ALL, 10) stop_button = wx.Button(self, wx.ID_CANCEL, 'Stop') vbox.Add(stop_button, 0, wx.CENTER|wx.ALL, 10) hbox = wx.BoxSizer(wx.HORIZONTAL) # TODO: use a nicer image, showing two gears (similar to ID_EXECUTE) save_ico = wx.ArtProvider.GetBitmap(wx.ART_INFORMATION, wx.ART_TOOLBAR, (50, 50)) img_info = wx.StaticBitmap(self, -1, save_ico, wx.DefaultPosition, (save_ico.GetWidth(), save_ico.GetHeight())) hbox.Add(img_info, 0, wx.ALL, 10) hbox.Add(vbox, 0, wx.ALL, 0) self.SetSizer(hbox) self.Centre() self.CenterOnParent() stop_button.Bind(wx.EVT_BUTTON, self.OnStop) def OnStop(self, event): print(f"Stop was pressed. event={event}") self.stop_run = True self.Destroy() class SCTCallThread(Thread): def __init__(self, command, text_window_ctrl): Thread.__init__(self) self.command = [command] self.status = None self.stdout = "" self.stderr = "" self.text_window = text_window_ctrl def sct_call(self, command): env = os.environ.copy() if 'PYTHONHOME' in env: del env["PYTHONHOME"] if 'PYTHONPATH' in env: del env["PYTHONPATH"] proc = subprocess.Popen(self.command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, env=env) self.p = proc stdout_fd = proc.stdout.fileno() stderr_fd = proc.stderr.fileno() os.set_blocking(stdout_fd, False) os.set_blocking(stderr_fd, False) while proc.poll() is None: timeout = 1 rs = [ proc.stdout, proc.stderr ] ws = [] xs = [] rs, ws, xs = select.select(rs, ws, xs, timeout) for r in rs: msg = None if r is proc.stdout: msg = os.read(stdout_fd, 1024) if msg: self.stdout += msg.decode('utf-8') elif r is proc.stderr: msg = os.read(stderr_fd, 1024) if msg: self.stderr += msg.decode('utf-8') if msg: wx.CallAfter(self.text_window.WriteText, msg) return proc.returncode, self.stdout, self.stderr def sct_interrupt(self): if self.p: self.p.send_signal(signal.SIGINT) else: print("No process running?") def run(self): self.status, self.stdout, self.stderr = self.sct_call(self.command) class TextBox: def __init__(self, sctpanel, label=""): self.textctrl = wx.TextCtrl(sctpanel) self.hbox = wx.BoxSizer(wx.HORIZONTAL) button_fetch_file = wx.Button(sctpanel, -1, label=label) button_fetch_file.Bind(wx.EVT_BUTTON, self.get_highlighted_file_name) self.hbox.Add(button_fetch_file, 0, wx.ALIGN_LEFT| wx.RIGHT, 10) self.hbox.Add(self.textctrl, 1, wx.ALIGN_LEFT|wx.LEFT, 10) def get_highlighted_file_name(self, event): selected_overlay = displayCtx.getSelectedOverlay() # displayCtx is a class from FSLeyes filename_path = selected_overlay.dataSource print("Fetched file name: {}".format(filename_path)) self.textctrl.SetValue(filename_path) def get_file_name(self): return self.textctrl.GetValue() # Creates the standard panel for each tool class SCTPanel(wx.Panel): DESCRIPTION_SCT = """ <br><br><b>General citation (please always cite)</b>:<br> De Leener B, Levy S, Dupont SM, Fonov VS, Stikov N, Louis Collins D, Callot V, Cohen-Adad J. <i>SCT: Spinal Cord Toolbox, an open-source software for processing spinal cord MRI data</i>. Neuroimage. 2017 Jan 15;145(Pt A):24-43. """ SCT_DIR_ENV = 'SCT_DIR' SCT_LOGO_REL_PATH = 'documentation/imgs/logo_sct_small.png' SCT_TUTORIAL_PATH = 'documentation/Manual_v1_SCT.pdf' # TODO: fix this path def __init__(self, parent, id_): super(SCTPanel, self).__init__(parent=parent, id=id_) # main layout consists of one row with 3 main columns self.main_row = wx.BoxSizer(wx.HORIZONTAL) self.column_left = wx.BoxSizer(wx.VERTICAL) self.column_center = wx.BoxSizer(wx.VERTICAL) self.column_right = wx.BoxSizer(wx.VERTICAL) sct_logo = self.get_logo() logo_help_hbox = wx.BoxSizer(wx.HORIZONTAL) logo_help_hbox.Add(sct_logo, 1, wx.HORIZONTAL, 5) button_help = wx.Button(self, id=id_, label="Help") button_help.Bind(wx.EVT_BUTTON, self.help_url) logo_help_hbox.Add(button_help, 0, wx.ALIGN_BOTTOM|wx.LEFT, 90) self.column_left.Add(logo_help_hbox, proportion=0, flag=wx.ALL, border=5) html_desc_window = self.get_description() self.column_left.Add(html_desc_window, 0, wx.ALL, 5) self.log_window = wx.TextCtrl(self, wx.ID_ANY, size=(100, 300), style = wx.TE_MULTILINE|wx.TE_READONLY|wx.HSCROLL) self.column_right.Add(self.log_window, 1, wx.EXPAND|wx.ALL, 5) self.main_row.Add(self.column_left, 0, wx.ALL, 10) self.main_row.Add(self.column_center, 1, wx.ALL, 10) self.main_row.Add(self.column_right, 1, wx.ALL, 10) self.SetSizerAndFit(self.main_row) def log_to_window(self, msg, level=None): if level is None: self.log_window.AppendText("{}\n".format(msg)) else: self.log_window.AppendText("{}: {}\n".format(level, msg)) def tutorial(self,event): pdfpath = os.path.join(os.environ[self.SCT_DIR_ENV],self.SCT_TUTORIAL_PATH) print('PDF path:', pdfpath) cmd_line = "open {}".format(pdfpath) print('Command line:', cmd_line) self.call_sct_command(cmd_line) def help_url(self, event): url = "http://forum.spinalcordmri.org/c/sct" webbrowser.open(url) def get_logo(self): logo_file = os.path.join(os.environ[self.SCT_DIR_ENV], self.SCT_LOGO_REL_PATH) png = wx.Image(logo_file, wx.BITMAP_TYPE_ANY).ConvertToBitmap() img_logo = wx.StaticBitmap(self, -1, png, wx.DefaultPosition, (png.GetWidth(), png.GetHeight())) return img_logo def get_description(self): txt_style = wx.VSCROLL | \ wx.HSCROLL | wx.TE_READONLY | \ wx.BORDER_SIMPLE htmlw = html.HtmlWindow(self, wx.ID_ANY, size=(400, 220), style=txt_style) htmlw.SetPage(self.DESCRIPTION + self.DESCRIPTION_SCT) htmlw.SetStandardFonts(size=10, normal_face="Noto Sans") return htmlw def call_sct_command(self, command): self.log_to_window("Running: {}".format(command), level="INFO") progress_dialog = ProgressDialog(frame) progress_dialog.Show() thr = SCTCallThread(command, self.log_window) thr.start() # No access to app.pending() from here while True: thr.join(0.1) wx.Yield() if not thr.isAlive(): break if progress_dialog.stop_run: thr.sct_interrupt() thr.join() self.log_to_window("Command completed.", level="INFO") if progress_dialog: progress_dialog.Destroy() # show stderr output if an error occurred if thr.status: self.log_to_window("An error occurred", level="ERROR") error_dialog = ErrorDialog(frame, msg=thr.stderr) error_dialog.Show() class TabPanelPropSeg(SCTPanel): DESCRIPTION = """ <b>Function description</b>:<br> Segment the spinal cord using a deformable 3D mesh. This method is fast and robust, but could be prone to "leaking" if the contrast between the cord and the CSF is not high enough. <br><br> <b>Usage</b>: <br> Select an image from the overlay list, then click the "Input file" button to fetch the file name. Then, select the appropriate contrast and click "Run". For more options, please use the Terminal version of this function. <br><br> <b>Specific citation</b>: <br> De Leener et al. <i>Robust, accurate and fast automatic segmentation of the spinal cord.</i> Neuroimage 2014 """ def __init__(self, parent): super(TabPanelPropSeg, self).__init__(parent=parent, id_=wx.ID_ANY) self.hbox_filein = TextBox(self, label="Input file") lbl_contrasts = ['t1', 't2', 't2s', 'dwi'] self.rbox_contrast = wx.RadioBox(self, label='Select contrast:', choices=lbl_contrasts, majorDimension=1, style=wx.RA_SPECIFY_ROWS) button_run = wx.Button(self, id=wx.ID_ANY, label="Run") button_run.Bind(wx.EVT_BUTTON, self.on_button_run) self.column_center.Add(self.hbox_filein.hbox, 0, wx.EXPAND|wx.ALL, 5) self.column_center.Add(self.rbox_contrast, 0, wx.ALL, 5) self.column_center.Add(button_run, 0, wx.ALL, 5) def on_button_run(self, event): # Build and run SCT command fname_input = self.hbox_filein.get_file_name() if not fname_input: msg = "No input file selected! Select a file from the overlay list and then press Input file." self.log_to_window(msg, level="ERROR") error_dialog = ErrorDialog(frame, msg=msg) error_dialog.Show() return contrast = self.rbox_contrast.GetStringSelection() base_name = os.path.basename(fname_input) fname, fext = base_name.split(os.extsep, 1) fname_out = "{}_seg.{}".format(fname, fext) cmd_line = "sct_propseg -i {} -c {}".format(fname_input, contrast) self.call_sct_command(cmd_line) # Add output to the list of overlay image = Image(fname_out) # <class 'fsl.data.image.Image'> overlayList.append(image) opts = displayCtx.getOpts(image) opts.cmap = 'red' class TabPanelSCSeg(SCTPanel): DESCRIPTION = """ <b>Function description</b>:<br> Segment the spinal cord using deep learning. The convolutional neural network was trained on ~1,500 subjects from multiple centers, and including various pathologies (compression, MS, ALS, etc.). <br><br> <b>Usage</b>: <br> Select an image from the overlay list, then click the "Input file" button to fetch the file name. Then, select the appropriate contrast and click "Run". For more options, please use the Terminal version of this function. <br><br> <b>Specific citation</b>: <br> Gros et al. <i>Automatic segmentation of the spinal cord and intramedullary multiple sclerosis lesions with convolutional neural networks.</i> Neuroimage 2019 """ def __init__(self, parent): super(TabPanelSCSeg, self).__init__(parent=parent, id_=wx.ID_ANY) self.hbox_filein = TextBox(self, label="Input file") lbl_contrasts = ['t1', 't2', 't2s', 'dwi'] self.rbox_contrast = wx.RadioBox(self, label='Select contrast:', choices=lbl_contrasts, majorDimension=1, style=wx.RA_SPECIFY_ROWS) button_run = wx.Button(self, id=wx.ID_ANY, label="Run") button_run.Bind(wx.EVT_BUTTON, self.on_button_run) self.column_center.Add(self.hbox_filein.hbox, 0, wx.EXPAND|wx.ALL, 5) self.column_center.Add(self.rbox_contrast, 0, wx.ALL, 5) self.column_center.Add(button_run, 0, wx.ALL, 5) def on_button_run(self, event): # Build and run SCT command fname_input = self.hbox_filein.get_file_name() if not fname_input: msg = "No input file selected! Select a file from the overlay list and then press Input file." self.log_to_window(msg, level="ERROR") error_dialog = ErrorDialog(frame, msg=msg) error_dialog.Show() return contrast = self.rbox_contrast.GetStringSelection() base_name = os.path.basename(fname_input) fname, fext = base_name.split(os.extsep, 1) fname_out = "{}_seg.{}".format(fname, fext) cmd_line = "sct_deepseg_sc -i {} -c {}".format(fname_input, contrast) self.call_sct_command(cmd_line) # Add output to the list of overlay image = Image(fname_out) # <class 'fsl.data.image.Image'> overlayList.append(image) opts = displayCtx.getOpts(image) opts.cmap = 'red' class TabPanelGMSeg(SCTPanel): DESCRIPTION = """ <b>Function description</b>:<br> Segment the spinal cord gray matter using deep learning. The convolutional neural network features dilated convolutions and was trained on 232 subjects (3963 axial slices) from multiple centers, and including various pathologies (compression, MS, ALS, etc.). <br><br> <b>Usage</b>: <br> Select an image from the overlay list that has a good white and gray matter contrast (e.g., T2*-weighted image), then click "Run". For more options, please use the Terminal version of this function. <br><br> <b>Specific citation</b>: <br> Perone et al. <i>Spinal cord gray matter segmentation using deep dilated convolutions.</i> Sci Rep. 2018 """ def __init__(self, parent): super(TabPanelGMSeg, self).__init__(parent=parent, id_=wx.ID_ANY) self.hbox_filein = TextBox(self, label="Input file") button_run = wx.Button(self, id=wx.ID_ANY, label="Run") button_run.Bind(wx.EVT_BUTTON, self.on_button_run) self.column_center.Add(self.hbox_filein.hbox, 0, wx.EXPAND|wx.ALL, 5) self.column_center.Add(button_run, 0, wx.ALL, 5) def on_button_run(self, event): # Build and run SCT command fname_input = self.hbox_filein.get_file_name() if not fname_input: msg = "No input file selected! Select a file from the overlay list and then press Input file." self.log_to_window(msg, level="ERROR") error_dialog = ErrorDialog(frame, msg=msg) error_dialog.Show() return base_name = os.path.basename(fname_input) fname, fext = base_name.split(os.extsep, 1) fname_out = "{}_gmseg.{}".format(fname, fext) cmd_line = "sct_deepseg_gm -i {} -o {}".format(fname_input, fname_out) self.call_sct_command(cmd_line) # Add output to the list of overlay image = Image(fname_out) # <class 'fsl.data.image.Image'> overlayList.append(image) opts = displayCtx.getOpts(image) opts.cmap = 'yellow' class TabPanelVertLB(SCTPanel): DESCRIPTION = """ <b>Function description</b>:<br> Automatically find intervertebral discs and label an input segmentation with vertebral levels. The values on the output labeled segmentation corresponds to the level, e.g., 2 corresponds to C2, 8 corresponds to T1, etc. <br><br> <b>Usage</b>: <br> Select an image from the overlay list where discs are clearly visible (e.g., T1w or T2w scans are usually good for this task). Then, select a segmentation associated with the image, select the appropriate contrast and click "Run". For more options, please use the Terminal version of this function. <br><br> <b>Specific citation</b>: <br> Ullmann et al. <i>Automatic labeling of vertebral levels using a robust template-based approach.</i> Int J Biomed Imaging 2014 """ def __init__(self, parent): super(TabPanelVertLB, self).__init__(parent=parent, id_=wx.ID_ANY) self.hbox_im = TextBox(self, label="Input image") self.hbox_seg = TextBox(self, label="Input segmentation") lbl_contrasts = ['t1', 't2'] self.rbox_contrast = wx.RadioBox(self, label='Select contrast:', choices=lbl_contrasts, majorDimension=1, style=wx.RA_SPECIFY_ROWS) # Run button button_run = wx.Button(self, id=wx.ID_ANY, label="Run") button_run.Bind(wx.EVT_BUTTON, self.on_button_run) self.column_center.Add(self.hbox_im.hbox, 0, wx.EXPAND|wx.ALL, 5) self.column_center.Add(self.hbox_seg.hbox, 0, wx.EXPAND|wx.ALL, 5) self.column_center.Add(self.rbox_contrast, 0, wx.ALL, 5) self.column_center.Add(button_run, 0, wx.ALL, 5) def on_button_run(self, event): # Build and run SCT command fname_im = self.hbox_im.textctrl.GetValue() fname_seg = self.hbox_seg.textctrl.GetValue() fname_im = self.hbox_im.get_file_name() if not fname_im: msg = "No input image selected! Select an image from the overlay list and then press Input image." self.log_to_window(msg, level="ERROR") error_dialog = ErrorDialog(frame, msg=msg) error_dialog.Show() return fname_seg = self.hbox_seg.get_file_name() if not fname_seg: msg = "No input segmentation selected! Select a segmentation file from the overlay list and then press Input segmentation." self.log_to_window(msg, level="ERROR") error_dialog = ErrorDialog(frame, msg=msg) error_dialog.Show() return contrast = self.rbox_contrast.GetStringSelection() base_name = os.path.basename(fname_seg) fname, fext = base_name.split(os.extsep, 1) fname_out = "{}_labeled.{}".format(fname, fext) cmd_line = "sct_label_vertebrae -i {} -s {} -c {}".format(fname_im, fname_seg, contrast) self.call_sct_command(cmd_line) # Add output to the list of overlay image = Image(fname_out) # <class 'fsl.data.image.Image'> overlayList.append(image) opts = displayCtx.getOpts(image) opts.cmap = 'subcortical' class TabPanelRegisterToTemplate(SCTPanel): DESCRIPTION = """ <b>Function description</b>:<br> Register an image with the default PAM50 spinal cord MRI template. <br><br> <b>Usage</b>: <br> Select an image, its segmentation and a label file. The label file contains single-pixel labels located at the posterior edge of the intervertebral discs. The value of the label corresponds to the lower vertebrae, e.g., label 3 corresponds to the C2-C3 disc. This label file can be created within FSLeyes by clicking on Tools > Edit mode, then Edit > Create mask. Select the "pen", adjust the size to one pixel width and select the proper label value, then click on the image and save the label(s): Overlay > save. Then, select the appropriate contrast and click "Run". For more options, please use the Terminal version of this function. <br><br> <b>Specific citation</b>: <br> De Leener et al. <i>PAM50: Unbiased multimodal template of the brainstem and spinal cord aligned with the ICBM152 space.</i> Neuroimage 2017 """ def __init__(self, parent): super(TabPanelRegisterToTemplate, self).__init__(parent=parent, id_=wx.ID_ANY) self.hbox_im = TextBox(self, label="Input image") self.hbox_seg = TextBox(self, label="Input segmentation") self.hbox_label = TextBox(self, label="Input labels") lbl_contrasts = ['t1', 't2'] self.rbox_contrast = wx.RadioBox(self, label='Select contrast:', choices=lbl_contrasts, majorDimension=1, style=wx.RA_SPECIFY_ROWS) button_run = wx.Button(self, id=wx.ID_ANY, label="Run") button_run.Bind(wx.EVT_BUTTON, self.on_button_run) self.column_center.Add(self.hbox_im.hbox, 0, wx.EXPAND|wx.ALL, 5) self.column_center.Add(self.hbox_seg.hbox, 0, wx.EXPAND|wx.ALL, 5) self.column_center.Add(self.hbox_label.hbox, 0, wx.EXPAND|wx.ALL, 5) self.column_center.Add(self.rbox_contrast, 0, wx.ALL, 5) self.column_center.Add(button_run, 0, wx.ALL, 5) def on_button_run(self, event): # Build and run SCT command fname_im = self.hbox_im.textctrl.GetValue() fname_seg = self.hbox_seg.textctrl.GetValue() fname_label = self.hbox_label.textctrl.GetValue() fname_im = self.hbox_im.textctrl.GetValue() fname_seg = self.hbox_seg.textctrl.GetValue() fname_im = self.hbox_im.get_file_name() if not fname_im: msg = "No input image selected! Select an image from the overlay list and then press Input image." self.log_to_window(msg, level="ERROR") error_dialog = ErrorDialog(frame, msg=msg) error_dialog.Show() return fname_seg = self.hbox_seg.get_file_name() if not fname_seg: msg = "No input segmentation selected! Select a segmentation file from the overlay list and then press Input segmentation." self.log_to_window(msg, level="ERROR") error_dialog = ErrorDialog(frame, msg=msg) error_dialog.Show() return fname_label = self.hbox_label.get_file_name() if not fname_label: msg = "No input labels selected! Select input labels from the overlay list and then press Input labels." self.log_to_window(msg, level="ERROR") error_dialog = ErrorDialog(frame, msg=msg) error_dialog.Show() return contrast = self.rbox_contrast.GetStringSelection() cmd_line = \ "sct_register_to_template -i {} -s {} -ldisc {} -c {}".format(fname_im, fname_seg, fname_label, contrast) self.call_sct_command(cmd_line) # Add output to the list of overlay base_name = os.path.basename(fname_im) fname, fext = base_name.split(os.extsep, 1) # TODO: at some point we will modify SCT's function to output the file name below fname_out = 'template2anat.nii.gz' image = Image(fname_out) overlayList.append(image) opts = displayCtx.getOpts(image) opts.cmap = 'gray' def run_main(): window = aui_manager.GetManagedWindow() if 'SCT_DIR' not in os.environ: dlg = wx.MessageDialog(window, 'Spinal Cord Toolbox (SCT) was not ' 'found in your system. Make sure you open fsleyes ' 'from the Terminal (not by clicking on the App). ' 'If you are indeed running from the Terminal, please ' 'check the installation procedure at: ' 'https://github.com/neuropoly/spinalcordtoolbox', 'SCT not found!', wx.OK | wx.ICON_INFORMATION) dlg.ShowModal() dlg.Destroy() return notebook = aui.AuiNotebook(parent=window) panel_propseg = TabPanelPropSeg(parent=notebook) panel_sc = TabPanelSCSeg(parent=notebook) panel_gm = TabPanelGMSeg(parent=notebook) panel_vlb = TabPanelVertLB(parent=notebook) panel_reg = TabPanelRegisterToTemplate(parent=notebook) notebook.AddPage(page=panel_propseg, caption="sct_propseg", select=True) notebook.AddPage(page=panel_sc, caption="sct_deepseg_sc", select=False) notebook.AddPage(page=panel_gm, caption="sct_deepseg_gm", select=False) notebook.AddPage(page=panel_vlb, caption="sct_label_vertebrae", select=False) notebook.AddPage(page=panel_reg, caption="sct_register_to_template", select=False) aui_manager.AddPane(notebook, aui.AuiPaneInfo().Name("notebook_content").CenterPane().PaneBorder(False)) aui_manager.Update() run_main()
true
true
f70127728f34971adc084ca3467eeab69cdeec01
2,441
py
Python
huaweicloud-sdk-sms/huaweicloudsdksms/v3/model/update_task_status_response.py
huaweicloud/huaweicloud-sdk-python-v3
7a6270390fcbf192b3882bf763e7016e6026ef78
[ "Apache-2.0" ]
64
2020-06-12T07:05:07.000Z
2022-03-30T03:32:50.000Z
huaweicloud-sdk-sms/huaweicloudsdksms/v3/model/update_task_status_response.py
huaweicloud/huaweicloud-sdk-python-v3
7a6270390fcbf192b3882bf763e7016e6026ef78
[ "Apache-2.0" ]
11
2020-07-06T07:56:54.000Z
2022-01-11T11:14:40.000Z
huaweicloud-sdk-sms/huaweicloudsdksms/v3/model/update_task_status_response.py
huaweicloud/huaweicloud-sdk-python-v3
7a6270390fcbf192b3882bf763e7016e6026ef78
[ "Apache-2.0" ]
24
2020-06-08T11:42:13.000Z
2022-03-04T06:44:08.000Z
# coding: utf-8 import re import six from huaweicloudsdkcore.sdk_response import SdkResponse from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization class UpdateTaskStatusResponse(SdkResponse): """ Attributes: openapi_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ sensitive_list = [] openapi_types = { } attribute_map = { } def __init__(self): """UpdateTaskStatusResponse - a model defined in huaweicloud sdk""" super(UpdateTaskStatusResponse, self).__init__() self.discriminator = None def to_dict(self): """Returns the model properties as a dict""" result = {} for attr, _ in six.iteritems(self.openapi_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: if attr in self.sensitive_list: result[attr] = "****" else: result[attr] = value return result def to_str(self): """Returns the string representation of the model""" import simplejson as json if six.PY2: import sys reload(sys) sys.setdefaultencoding("utf-8") return json.dumps(sanitize_for_serialization(self), ensure_ascii=False) def __repr__(self): """For `print`""" return self.to_str() def __eq__(self, other): """Returns true if both objects are equal""" if not isinstance(other, UpdateTaskStatusResponse): return False return self.__dict__ == other.__dict__ def __ne__(self, other): """Returns true if both objects are not equal""" return not self == other
28.383721
79
0.552233
import re import six from huaweicloudsdkcore.sdk_response import SdkResponse from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization class UpdateTaskStatusResponse(SdkResponse): sensitive_list = [] openapi_types = { } attribute_map = { } def __init__(self): super(UpdateTaskStatusResponse, self).__init__() self.discriminator = None def to_dict(self): result = {} for attr, _ in six.iteritems(self.openapi_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: if attr in self.sensitive_list: result[attr] = "****" else: result[attr] = value return result def to_str(self): import simplejson as json if six.PY2: import sys reload(sys) sys.setdefaultencoding("utf-8") return json.dumps(sanitize_for_serialization(self), ensure_ascii=False) def __repr__(self): return self.to_str() def __eq__(self, other): if not isinstance(other, UpdateTaskStatusResponse): return False return self.__dict__ == other.__dict__ def __ne__(self, other): return not self == other
true
true
f701278cdccca27a655b9dcb4c115270db374458
1,472
py
Python
freshmaker/handlers/internal/__init__.py
mulaievaRH/freshmaker
809b435d7cab1907eb74ecd898693835a92db9d8
[ "MIT" ]
5
2020-06-17T11:29:16.000Z
2022-03-24T07:20:16.000Z
freshmaker/handlers/internal/__init__.py
ronnyhlim/freshmaker
b7635dcfe631759e917c85e6ef6654024a3fb91c
[ "MIT" ]
96
2020-06-29T15:01:23.000Z
2022-03-30T08:07:06.000Z
freshmaker/handlers/internal/__init__.py
ronnyhlim/freshmaker
b7635dcfe631759e917c85e6ef6654024a3fb91c
[ "MIT" ]
20
2020-06-16T01:30:08.000Z
2022-02-19T15:34:55.000Z
# -*- coding: utf-8 -*- # Copyright (c) 2017 Red Hat, Inc. # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. from .update_db_on_advisory_change import UpdateDBOnAdvisoryChange # noqa from .generate_advisory_signed_event_on_rpm_sign import GenerateAdvisorySignedEventOnRPMSign # noqa from .update_db_on_odcs_compose_fail import UpdateDBOnODCSComposeFail # noqa from .cancel_event_on_freshmaker_manage_request import CancelEventOnFreshmakerManageRequest # noqa
56.615385
100
0.799592
from .update_db_on_advisory_change import UpdateDBOnAdvisoryChange from .generate_advisory_signed_event_on_rpm_sign import GenerateAdvisorySignedEventOnRPMSign from .update_db_on_odcs_compose_fail import UpdateDBOnODCSComposeFail from .cancel_event_on_freshmaker_manage_request import CancelEventOnFreshmakerManageRequest
true
true
f701278f4a4a911c0820ec5f68a982b36d79019f
219
py
Python
ppr-api/src/utils/datetime.py
gh2os/ppr
9f67321baa5bbb450ac5e06755e2838497a2cf96
[ "Apache-2.0" ]
null
null
null
ppr-api/src/utils/datetime.py
gh2os/ppr
9f67321baa5bbb450ac5e06755e2838497a2cf96
[ "Apache-2.0" ]
2
2020-03-18T23:26:53.000Z
2020-03-18T23:40:19.000Z
ppr-api/src/utils/datetime.py
gh2os/ppr
9f67321baa5bbb450ac5e06755e2838497a2cf96
[ "Apache-2.0" ]
null
null
null
import datetime import time import pytz PACIFIC_TZ = pytz.timezone('America/Vancouver') def today_pacific(): now_pacific = datetime.datetime.fromtimestamp(time.time(), PACIFIC_TZ) return now_pacific.date()
16.846154
74
0.762557
import datetime import time import pytz PACIFIC_TZ = pytz.timezone('America/Vancouver') def today_pacific(): now_pacific = datetime.datetime.fromtimestamp(time.time(), PACIFIC_TZ) return now_pacific.date()
true
true
f701283274220115b63b69d493059573a505dd5c
4,698
py
Python
pybrain/tools/functions.py
metabacchi/FuzzyClassificator
f59c10364b872edce342403db6ef26e30d7f69b8
[ "MIT" ]
48
2015-01-02T12:52:58.000Z
2022-02-21T01:05:10.000Z
pybrain/tools/functions.py
metabacchi/FuzzyClassificator
f59c10364b872edce342403db6ef26e30d7f69b8
[ "MIT" ]
null
null
null
pybrain/tools/functions.py
metabacchi/FuzzyClassificator
f59c10364b872edce342403db6ef26e30d7f69b8
[ "MIT" ]
10
2015-04-05T03:44:59.000Z
2022-03-30T02:19:10.000Z
__author__ = 'Tom Schaul, [email protected]' from scipy import array, exp, tanh, clip, log, dot, sqrt, power, pi, tan, diag, rand, real_if_close from scipy.linalg import inv, det, svd, logm, expm2 def semilinear(x): """ This function ensures that the values of the array are always positive. It is x+1 for x=>0 and exp(x) for x<0. """ try: # assume x is a numpy array shape = x.shape x.flatten() x = x.tolist() except AttributeError: # no, it wasn't: build shape from length of list shape = (1, len(x)) def f(val): if val < 0: # exponential function for x<0 return safeExp(val) else: # linear function for x>=0 return val + 1.0 return array(map(f, x)).reshape(shape) def semilinearPrime(x): """ This function is the first derivative of the semilinear function (above). It is needed for the backward pass of the module. """ try: # assume x is a numpy array shape = x.shape x.flatten() x = x.tolist() except AttributeError: # no, it wasn't: build shape from length of list shape = (1, len(x)) def f(val): if val < 0: # exponential function for x<0 return safeExp(val) else: # linear function for x>=0 return 1.0 return array(map(f, x)).reshape(shape) def safeExp(x): """ Bounded range for the exponential function (won't produce inf or NaN). """ return exp(clip(x, -500, 500)) def sigmoid(x): """ Logistic sigmoid function. """ return 1. / (1. + safeExp(-x)) def sigmoidPrime(x): """ Derivative of logistic sigmoid. """ tmp = sigmoid(x) return tmp * (1 - tmp) def tanhPrime(x): """ Derivative of tanh. """ tmp = tanh(x) return 1 - tmp * tmp def ranking(R): """ Produces a linear ranking of the values in R. """ l = sorted(list(enumerate(R)), cmp=lambda a, b: cmp(a[1], b[1])) l = sorted(list(enumerate(l)), cmp=lambda a, b: cmp(a[1], b[1])) return array(map(lambda kv: kv[0], l)) def expln(x): """ This continuous function ensures that the values of the array are always positive. It is ln(x+1)+1 for x >= 0 and exp(x) for x < 0. """ def f(val): if val < 0: # exponential function for x < 0 return exp(val) else: # natural log function for x >= 0 return log(val + 1.0) + 1 try: result = array(map(f, x)) except TypeError: result = array(f(x)) return result def explnPrime(x): """ This function is the first derivative of the expln function (above). It is needed for the backward pass of the module. """ def f(val): if val < 0: # exponential function for x<0 return exp(val) else: # linear function for x>=0 return 1.0 / (val + 1.0) try: result = array(map(f, x)) except TypeError: result = array(f(x)) return result def multivariateNormalPdf(z, x, sigma): """ The pdf of a multivariate normal distribution (not in scipy). The sample z and the mean x should be 1-dim-arrays, and sigma a square 2-dim-array. """ assert len(z.shape) == 1 and len(x.shape) == 1 and len(x) == len(z) and sigma.shape == (len(x), len(z)) tmp = -0.5 * dot(dot((z - x), inv(sigma)), (z - x)) res = (1. / power(2.0 * pi, len(z) / 2.)) * (1. / sqrt(det(sigma))) * exp(tmp) return res def simpleMultivariateNormalPdf(z, detFactorSigma): """ Assuming z has been transformed to a mean of zero and an identity matrix of covariances. Needs to provide the determinant of the factorized (real) covariance matrix. """ dim = len(z) return exp(-0.5 * dot(z, z)) / (power(2.0 * pi, dim / 2.) * detFactorSigma) def multivariateCauchy(mu, sigma, onlyDiagonal=True): """ Generates a sample according to a given multivariate Cauchy distribution. """ if not onlyDiagonal: u, s, d = svd(sigma) coeffs = sqrt(s) else: coeffs = diag(sigma) r = rand(len(mu)) res = coeffs * tan(pi * (r - 0.5)) if not onlyDiagonal: res = dot(d, dot(res, u)) return res + mu def approxChiFunction(dim): """ Returns Chi (expectation of the length of a normal random vector) approximation according to: Ostermeier 1997. """ dim = float(dim) return sqrt(dim) * (1 - 1 / (4 * dim) + 1 / (21 * dim ** 2)) def sqrtm(M): """ Returns the symmetric semi-definite positive square root of a matrix. """ r = real_if_close(expm2(0.5 * logm(M)), 1e-8) return (r + r.T) / 2
30.115385
107
0.578118
__author__ = 'Tom Schaul, [email protected]' from scipy import array, exp, tanh, clip, log, dot, sqrt, power, pi, tan, diag, rand, real_if_close from scipy.linalg import inv, det, svd, logm, expm2 def semilinear(x): try: shape = x.shape x.flatten() x = x.tolist() except AttributeError: shape = (1, len(x)) def f(val): if val < 0: # exponential function for x<0 return safeExp(val) else: # linear function for x>=0 return val + 1.0 return array(map(f, x)).reshape(shape) def semilinearPrime(x): try: # assume x is a numpy array shape = x.shape x.flatten() x = x.tolist() except AttributeError: # no, it wasn't: build shape from length of list shape = (1, len(x)) def f(val): if val < 0: return safeExp(val) else: return 1.0 return array(map(f, x)).reshape(shape) def safeExp(x): return exp(clip(x, -500, 500)) def sigmoid(x): return 1. / (1. + safeExp(-x)) def sigmoidPrime(x): tmp = sigmoid(x) return tmp * (1 - tmp) def tanhPrime(x): tmp = tanh(x) return 1 - tmp * tmp def ranking(R): l = sorted(list(enumerate(R)), cmp=lambda a, b: cmp(a[1], b[1])) l = sorted(list(enumerate(l)), cmp=lambda a, b: cmp(a[1], b[1])) return array(map(lambda kv: kv[0], l)) def expln(x): def f(val): if val < 0: return exp(val) else: return log(val + 1.0) + 1 try: result = array(map(f, x)) except TypeError: result = array(f(x)) return result def explnPrime(x): def f(val): if val < 0: return exp(val) else: return 1.0 / (val + 1.0) try: result = array(map(f, x)) except TypeError: result = array(f(x)) return result def multivariateNormalPdf(z, x, sigma): assert len(z.shape) == 1 and len(x.shape) == 1 and len(x) == len(z) and sigma.shape == (len(x), len(z)) tmp = -0.5 * dot(dot((z - x), inv(sigma)), (z - x)) res = (1. / power(2.0 * pi, len(z) / 2.)) * (1. / sqrt(det(sigma))) * exp(tmp) return res def simpleMultivariateNormalPdf(z, detFactorSigma): dim = len(z) return exp(-0.5 * dot(z, z)) / (power(2.0 * pi, dim / 2.) * detFactorSigma) def multivariateCauchy(mu, sigma, onlyDiagonal=True): if not onlyDiagonal: u, s, d = svd(sigma) coeffs = sqrt(s) else: coeffs = diag(sigma) r = rand(len(mu)) res = coeffs * tan(pi * (r - 0.5)) if not onlyDiagonal: res = dot(d, dot(res, u)) return res + mu def approxChiFunction(dim): dim = float(dim) return sqrt(dim) * (1 - 1 / (4 * dim) + 1 / (21 * dim ** 2)) def sqrtm(M): r = real_if_close(expm2(0.5 * logm(M)), 1e-8) return (r + r.T) / 2
true
true
f7012832763745f5716873aa165b3bd66774aa82
9,525
py
Python
opacus/tests/ddp_hook_check.py
RQuispeC/opacus
5c83d59fc169e93667946204f7a6859827a38ace
[ "Apache-2.0" ]
null
null
null
opacus/tests/ddp_hook_check.py
RQuispeC/opacus
5c83d59fc169e93667946204f7a6859827a38ace
[ "Apache-2.0" ]
null
null
null
opacus/tests/ddp_hook_check.py
RQuispeC/opacus
5c83d59fc169e93667946204f7a6859827a38ace
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/env python3 # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved import os import sys import unittest import torch import torch.distributed as dist import torch.multiprocessing as mp import torch.nn as nn import torch.optim as optim from opacus import PrivacyEngine from opacus.distributed import DifferentiallyPrivateDistributedDataParallel as DPDDP from torch.nn.parallel import DistributedDataParallel as DDP PRIVACY_ALPHAS = [1 + x / 10.0 for x in range(1, 100)] + list(range(12, 64)) def setup_and_get_device(rank, world_size, nonce=0): """ Initialize the torch.distributed process group. If you run multiple groups in parallel or if you have zombie processes, you can add a nonce to avoid errors. """ device = 0 if sys.platform == "win32": # Distributed package only covers collective communications with Gloo # backend and FileStore on Windows platform. Set init_method parameter # in init_process_group to a local file. # Example init_method="file:///f:/libtmp/some_file" init_method = "file:///{your local file path}" # initialize the process group dist.init_process_group( "gloo", init_method=init_method, rank=rank, world_size=world_size ) device = rank elif os.environ.get("SLURM_NTASKS") is not None: # Running on a Slurm cluster os.environ["MASTER_ADDR"] = "127.0.0.1" os.environ["MASTER_PORT"] = str(7440 + nonce) local_rank = int(os.environ.get("SLURM_LOCALID")) dist.init_process_group(backend="gloo", rank=rank, world_size=world_size) # The device is the local rank (if you have 2 nodes with 8 GPUs each, you will have two "cuda:0" devices) device = local_rank else: os.environ["MASTER_ADDR"] = "localhost" os.environ["MASTER_PORT"] = "12355" os.environ["RANK"] = str(rank) os.environ["WORLD_SIZE"] = str(world_size) dist.init_process_group( init_method="env://", backend="nccl", ) # Single node experiment device = rank return device def cleanup(): dist.destroy_process_group() class ToyModel(nn.Module): def __init__(self): super(ToyModel, self).__init__() self.net1 = nn.Linear(10, 10) self.relu = nn.ReLU() self.net2 = nn.Linear(10, 5) def forward(self, x): return self.net2(self.relu(self.net1(x))) def demo_basic(rank, world_size, weight, dp, noise_multiplier=0, max_grad_norm=1e8): # We don't want the 2 GPUs to work on the same examples/labels in parallel torch.manual_seed(rank) batch_size = 32 withdp = "with" + ("out " if not dp else "") print(f"Running basic DDP {withdp} differential privacy example on rank {rank}.") device = setup_and_get_device(rank, world_size) # create model and move it to GPU with id rank model = ToyModel().to(device) print(f"Initial weight: {model.net1.weight.data}") # Freeze all the parameters except one, to ensure that the noise is the same # (the DDP hook does not browse the layers in the same order as the naive implementation) model.net1.bias.requires_grad = False model.net2.bias.requires_grad = False model.net2.weight.requires_grad = False if dp: ddp_model = DPDDP(model) engine = PrivacyEngine( ddp_model, batch_size=batch_size, sample_size=10 * batch_size, alphas=PRIVACY_ALPHAS, noise_multiplier=noise_multiplier, max_grad_norm=[max_grad_norm], ) engine.random_number_generator = engine._set_seed(0) else: ddp_model = DDP(model, device_ids=[device]) loss_fn = nn.MSELoss() optimizer = optim.SGD(ddp_model.parameters(), lr=1) if dp: engine.attach(optimizer) optimizer.zero_grad() labels = torch.randn(batch_size, 5).to(device) outputs = ddp_model(torch.randn(batch_size, 10).to(device)) loss_fn(outputs, labels).backward() optimizer.step() weight.copy_(model.net1.weight.data.cpu()) cleanup() def demo_ddp_hook(rank, world_size, weight, dp, noise_multiplier, max_grad_norm): torch.manual_seed(rank) batch_size = 32 withdp = "with" + ("out " if not dp else "") print(f"Running DDP hook {withdp} differential privacy example on rank {rank}.") device = setup_and_get_device(rank, world_size, nonce=1) # create model and move it to GPU with id rank model = ToyModel().to(device) model.net1.bias.requires_grad = False model.net2.bias.requires_grad = False model.net2.weight.requires_grad = False ddp_model = DDP(model, device_ids=[device]) if dp: engine = PrivacyEngine( ddp_model, batch_size=batch_size, sample_size=10 * batch_size, alphas=PRIVACY_ALPHAS, noise_multiplier=noise_multiplier, max_grad_norm=[max_grad_norm], ) engine.random_number_generator = engine._set_seed(0) loss_fn = nn.MSELoss() optimizer = optim.SGD(ddp_model.parameters(), lr=1) if dp: engine.attach(optimizer) optimizer.zero_grad() labels = torch.randn(batch_size, 5).to(device) outputs = ddp_model(torch.randn(batch_size, 10).to(device)) loss_fn(outputs, labels).backward() optimizer.step() weight.copy_(model.net1.weight.data.cpu()) del ddp_model cleanup() def add_remove_ddp_hooks( rank, world_size, remaining_hooks, dp, noise_multiplier=0, max_grad_norm=1e8 ): device = setup_and_get_device(rank, world_size, nonce=2) model = ToyModel().to(device) ddp_model = nn.parallel.DistributedDataParallel(model, device_ids=[device]) engine = PrivacyEngine( ddp_model, batch_size=1, sample_size=10, alphas=PRIVACY_ALPHAS, noise_multiplier=noise_multiplier, max_grad_norm=[max_grad_norm], ) optimizer = optim.SGD(ddp_model.parameters(), lr=1) engine.attach(optimizer) remaining_hooks["attached"] = { p: p._backward_hooks for p in engine.module.parameters() if p._backward_hooks } engine.detach() remaining_hooks["detached"] = { p: p._backward_hooks for p in engine.module.parameters() if p._backward_hooks } cleanup() def debug(rank, world_size, tensor, dp, noise_multiplier=0, max_grad_norm=1e8): local_rank = setup_and_get_device(rank, world_size) print(f"Rank: {rank},World size: {world_size}, local_rank: {local_rank}") tensor = tensor.to(local_rank) print(f"dp: {dp}") print(tensor) cleanup() def run_function(local_function, tensor, dp, noise_multiplier=0, max_grad_norm=1e8): if os.environ.get("SLURM_NTASKS") is not None: world_size = int(os.environ.get("SLURM_NTASKS")) rank = int(os.environ.get("SLURM_PROCID")) print(f"Running on a Slurm cluster with {world_size} tasks.") local_function(rank, world_size, tensor, dp, noise_multiplier, max_grad_norm) else: world_size = torch.cuda.device_count() print(f"Spawning multiple processes on a local machine with {world_size} GPUs") # The rank will be passed as the first argument mp.spawn( local_function, args=( world_size, tensor, dp, noise_multiplier, max_grad_norm, ), nprocs=world_size, join=True, ) return world_size class GradientComputationTest(unittest.TestCase): def test_connection(self): tensor = torch.zeros(10, 10) world_size = run_function(debug, tensor, dp=True) self.assertTrue( world_size >= 2, f"Need at least 2 gpus but was provided only {world_size}." ) def test_gradient_noclip_zeronoise(self): # Tests that gradient is the same with DP or with DDP weight_dp, weight_nodp = torch.zeros(10, 10), torch.zeros(10, 10) run_function(demo_basic, weight_dp, dp=True) run_function(demo_basic, weight_nodp, dp=False) self.assertTrue(torch.norm(weight_dp - weight_nodp) < 1e-7) def test_ddp_hook(self): # Tests that the DDP hook does the same thing as naive aggregation with per layer clipping weight_ddp_naive, weight_ddp_hook = torch.zeros(10, 10), torch.zeros(10, 10) run_function( demo_basic, weight_ddp_naive, dp=True, noise_multiplier=0.1, max_grad_norm=1.0, ) run_function( demo_ddp_hook, weight_ddp_hook, dp=True, noise_multiplier=0.1, max_grad_norm=1.0, ) self.assertTrue( torch.norm(weight_ddp_naive - weight_ddp_hook) < 1e-7, f"DDP naive: {weight_ddp_naive}\nDDP hook: {weight_ddp_hook}", ) def test_add_remove_ddp_hooks(self): remaining_hooks = { "attached": None, "detached": None, } run_function( add_remove_ddp_hooks, remaining_hooks, dp=True, noise_multiplier=0.1, max_grad_norm=1.0, ) assert remaining_hooks["attached"], "There are no hooks." assert not remaining_hooks[ "detached" ], f"Some hooks remain after .remove_hooks(): {remaining_hooks}"
31.026059
113
0.64357
import os import sys import unittest import torch import torch.distributed as dist import torch.multiprocessing as mp import torch.nn as nn import torch.optim as optim from opacus import PrivacyEngine from opacus.distributed import DifferentiallyPrivateDistributedDataParallel as DPDDP from torch.nn.parallel import DistributedDataParallel as DDP PRIVACY_ALPHAS = [1 + x / 10.0 for x in range(1, 100)] + list(range(12, 64)) def setup_and_get_device(rank, world_size, nonce=0): device = 0 if sys.platform == "win32": init_method = "file:///{your local file path}" dist.init_process_group( "gloo", init_method=init_method, rank=rank, world_size=world_size ) device = rank elif os.environ.get("SLURM_NTASKS") is not None: os.environ["MASTER_ADDR"] = "127.0.0.1" os.environ["MASTER_PORT"] = str(7440 + nonce) local_rank = int(os.environ.get("SLURM_LOCALID")) dist.init_process_group(backend="gloo", rank=rank, world_size=world_size) device = local_rank else: os.environ["MASTER_ADDR"] = "localhost" os.environ["MASTER_PORT"] = "12355" os.environ["RANK"] = str(rank) os.environ["WORLD_SIZE"] = str(world_size) dist.init_process_group( init_method="env://", backend="nccl", ) device = rank return device def cleanup(): dist.destroy_process_group() class ToyModel(nn.Module): def __init__(self): super(ToyModel, self).__init__() self.net1 = nn.Linear(10, 10) self.relu = nn.ReLU() self.net2 = nn.Linear(10, 5) def forward(self, x): return self.net2(self.relu(self.net1(x))) def demo_basic(rank, world_size, weight, dp, noise_multiplier=0, max_grad_norm=1e8): torch.manual_seed(rank) batch_size = 32 withdp = "with" + ("out " if not dp else "") print(f"Running basic DDP {withdp} differential privacy example on rank {rank}.") device = setup_and_get_device(rank, world_size) # create model and move it to GPU with id rank model = ToyModel().to(device) print(f"Initial weight: {model.net1.weight.data}") # Freeze all the parameters except one, to ensure that the noise is the same # (the DDP hook does not browse the layers in the same order as the naive implementation) model.net1.bias.requires_grad = False model.net2.bias.requires_grad = False model.net2.weight.requires_grad = False if dp: ddp_model = DPDDP(model) engine = PrivacyEngine( ddp_model, batch_size=batch_size, sample_size=10 * batch_size, alphas=PRIVACY_ALPHAS, noise_multiplier=noise_multiplier, max_grad_norm=[max_grad_norm], ) engine.random_number_generator = engine._set_seed(0) else: ddp_model = DDP(model, device_ids=[device]) loss_fn = nn.MSELoss() optimizer = optim.SGD(ddp_model.parameters(), lr=1) if dp: engine.attach(optimizer) optimizer.zero_grad() labels = torch.randn(batch_size, 5).to(device) outputs = ddp_model(torch.randn(batch_size, 10).to(device)) loss_fn(outputs, labels).backward() optimizer.step() weight.copy_(model.net1.weight.data.cpu()) cleanup() def demo_ddp_hook(rank, world_size, weight, dp, noise_multiplier, max_grad_norm): torch.manual_seed(rank) batch_size = 32 withdp = "with" + ("out " if not dp else "") print(f"Running DDP hook {withdp} differential privacy example on rank {rank}.") device = setup_and_get_device(rank, world_size, nonce=1) # create model and move it to GPU with id rank model = ToyModel().to(device) model.net1.bias.requires_grad = False model.net2.bias.requires_grad = False model.net2.weight.requires_grad = False ddp_model = DDP(model, device_ids=[device]) if dp: engine = PrivacyEngine( ddp_model, batch_size=batch_size, sample_size=10 * batch_size, alphas=PRIVACY_ALPHAS, noise_multiplier=noise_multiplier, max_grad_norm=[max_grad_norm], ) engine.random_number_generator = engine._set_seed(0) loss_fn = nn.MSELoss() optimizer = optim.SGD(ddp_model.parameters(), lr=1) if dp: engine.attach(optimizer) optimizer.zero_grad() labels = torch.randn(batch_size, 5).to(device) outputs = ddp_model(torch.randn(batch_size, 10).to(device)) loss_fn(outputs, labels).backward() optimizer.step() weight.copy_(model.net1.weight.data.cpu()) del ddp_model cleanup() def add_remove_ddp_hooks( rank, world_size, remaining_hooks, dp, noise_multiplier=0, max_grad_norm=1e8 ): device = setup_and_get_device(rank, world_size, nonce=2) model = ToyModel().to(device) ddp_model = nn.parallel.DistributedDataParallel(model, device_ids=[device]) engine = PrivacyEngine( ddp_model, batch_size=1, sample_size=10, alphas=PRIVACY_ALPHAS, noise_multiplier=noise_multiplier, max_grad_norm=[max_grad_norm], ) optimizer = optim.SGD(ddp_model.parameters(), lr=1) engine.attach(optimizer) remaining_hooks["attached"] = { p: p._backward_hooks for p in engine.module.parameters() if p._backward_hooks } engine.detach() remaining_hooks["detached"] = { p: p._backward_hooks for p in engine.module.parameters() if p._backward_hooks } cleanup() def debug(rank, world_size, tensor, dp, noise_multiplier=0, max_grad_norm=1e8): local_rank = setup_and_get_device(rank, world_size) print(f"Rank: {rank},World size: {world_size}, local_rank: {local_rank}") tensor = tensor.to(local_rank) print(f"dp: {dp}") print(tensor) cleanup() def run_function(local_function, tensor, dp, noise_multiplier=0, max_grad_norm=1e8): if os.environ.get("SLURM_NTASKS") is not None: world_size = int(os.environ.get("SLURM_NTASKS")) rank = int(os.environ.get("SLURM_PROCID")) print(f"Running on a Slurm cluster with {world_size} tasks.") local_function(rank, world_size, tensor, dp, noise_multiplier, max_grad_norm) else: world_size = torch.cuda.device_count() print(f"Spawning multiple processes on a local machine with {world_size} GPUs") # The rank will be passed as the first argument mp.spawn( local_function, args=( world_size, tensor, dp, noise_multiplier, max_grad_norm, ), nprocs=world_size, join=True, ) return world_size class GradientComputationTest(unittest.TestCase): def test_connection(self): tensor = torch.zeros(10, 10) world_size = run_function(debug, tensor, dp=True) self.assertTrue( world_size >= 2, f"Need at least 2 gpus but was provided only {world_size}." ) def test_gradient_noclip_zeronoise(self): # Tests that gradient is the same with DP or with DDP weight_dp, weight_nodp = torch.zeros(10, 10), torch.zeros(10, 10) run_function(demo_basic, weight_dp, dp=True) run_function(demo_basic, weight_nodp, dp=False) self.assertTrue(torch.norm(weight_dp - weight_nodp) < 1e-7) def test_ddp_hook(self): # Tests that the DDP hook does the same thing as naive aggregation with per layer clipping weight_ddp_naive, weight_ddp_hook = torch.zeros(10, 10), torch.zeros(10, 10) run_function( demo_basic, weight_ddp_naive, dp=True, noise_multiplier=0.1, max_grad_norm=1.0, ) run_function( demo_ddp_hook, weight_ddp_hook, dp=True, noise_multiplier=0.1, max_grad_norm=1.0, ) self.assertTrue( torch.norm(weight_ddp_naive - weight_ddp_hook) < 1e-7, f"DDP naive: {weight_ddp_naive}\nDDP hook: {weight_ddp_hook}", ) def test_add_remove_ddp_hooks(self): remaining_hooks = { "attached": None, "detached": None, } run_function( add_remove_ddp_hooks, remaining_hooks, dp=True, noise_multiplier=0.1, max_grad_norm=1.0, ) assert remaining_hooks["attached"], "There are no hooks." assert not remaining_hooks[ "detached" ], f"Some hooks remain after .remove_hooks(): {remaining_hooks}"
true
true
f70128589f0532bb05af79995e8f8a3a096cb6b4
2,081
py
Python
motion_sensor.py
arkochatterjee/motion-sensor
69366d5f5ad33ce2a8e26dedb57074a1f40ca603
[ "MIT" ]
1
2020-05-14T07:16:01.000Z
2020-05-14T07:16:01.000Z
motion_sensor.py
arkochatterjee/motion-sensor
69366d5f5ad33ce2a8e26dedb57074a1f40ca603
[ "MIT" ]
null
null
null
motion_sensor.py
arkochatterjee/motion-sensor
69366d5f5ad33ce2a8e26dedb57074a1f40ca603
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- """ Created on Sat Dec 16 19:55:29 2017 @author: Arko Chatterjee """ import datetime import imutils import time import cv2 camera = cv2.VideoCapture(0) time.sleep(0.50) print("Hello!") firstFrame = None # loop over the frames of the video while True: # grab the current frame and initialize the occupied/unoccupied text (grabbed, frame) = camera.read() text = "Unossssccupied" if not grabbed: break # resize the frame, convert it to grayscale, and blur it frame = imutils.resize(frame, width=500) gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY) gray = cv2.GaussianBlur(gray, (21, 21), 0) if firstFrame is None: firstFrame = gray continue # compute the absolute difference between the current frame and first frame frameDelta = cv2.absdiff(firstFrame, gray) thresh = cv2.threshold(frameDelta, 25, 255, cv2.THRESH_BINARY)[1] thresh = cv2.dilate(thresh, None, iterations=2) (_, cnts, _) = cv2.findContours(thresh.copy(), cv2.RETR_EXTERNAL,cv2.CHAIN_APPROX_SIMPLE) # loop over the contours for c in cnts: # if the contour is too small, ignore it if cv2.contourArea(c) < 500: continue # compute the bounding box for the contour, draw it on the frame, # and update the text (x, y, w, h) = cv2.boundingRect(c) cv2.rectangle(frame, (x, y), (x + w, y + h), (0, 255, 0), 2) text = "Occupied" # draw the text and timestamp on the frame cv2.putText(frame, "Room Status: {}".format(text), (10, 20), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2) cv2.putText(frame, datetime.datetime.now().strftime("%A %d %B %Y %I:%M:%S%p"), (10, frame.shape[0] - 10), cv2.FONT_HERSHEY_SIMPLEX, 0.35, (0, 0, 255), 1) # show the frame and record if the user presses a key cv2.imshow("Security Feed", frame) #cv2.imshow("Thresh", thresh) #cv2.imshow("Frame Delta", frameDelta) key = cv2.waitKey(1) & 0xFF # if the `q` key is pressed, break from the lop if key == ord("q"): break camera.release() cv2.destroyAllWindows()
25.378049
91
0.658818
import datetime import imutils import time import cv2 camera = cv2.VideoCapture(0) time.sleep(0.50) print("Hello!") firstFrame = None while True: (grabbed, frame) = camera.read() text = "Unossssccupied" if not grabbed: break frame = imutils.resize(frame, width=500) gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY) gray = cv2.GaussianBlur(gray, (21, 21), 0) if firstFrame is None: firstFrame = gray continue frameDelta = cv2.absdiff(firstFrame, gray) thresh = cv2.threshold(frameDelta, 25, 255, cv2.THRESH_BINARY)[1] thresh = cv2.dilate(thresh, None, iterations=2) (_, cnts, _) = cv2.findContours(thresh.copy(), cv2.RETR_EXTERNAL,cv2.CHAIN_APPROX_SIMPLE) for c in cnts: if cv2.contourArea(c) < 500: continue (x, y, w, h) = cv2.boundingRect(c) cv2.rectangle(frame, (x, y), (x + w, y + h), (0, 255, 0), 2) text = "Occupied" cv2.putText(frame, "Room Status: {}".format(text), (10, 20), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2) cv2.putText(frame, datetime.datetime.now().strftime("%A %d %B %Y %I:%M:%S%p"), (10, frame.shape[0] - 10), cv2.FONT_HERSHEY_SIMPLEX, 0.35, (0, 0, 255), 1) cv2.imshow("Security Feed", frame) key = cv2.waitKey(1) & 0xFF if key == ord("q"): break camera.release() cv2.destroyAllWindows()
true
true
f70128e8a84d420b105cc3b9c456a7da52f802bb
1,507
py
Python
dev/Gems/CloudGemFramework/v1/AWS/common-code/ServiceClient_Python/test/test_unit_service_client.py
brianherrera/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
[ "AML" ]
1,738
2017-09-21T10:59:12.000Z
2022-03-31T21:05:46.000Z
dev/Gems/CloudGemFramework/v1/AWS/common-code/ServiceClient_Python/test/test_unit_service_client.py
olivier-be/lumberyard
3d688932f919dbf5821f0cb8a210ce24abe39e9e
[ "AML" ]
427
2017-09-29T22:54:36.000Z
2022-02-15T19:26:50.000Z
dev/Gems/CloudGemFramework/v1/AWS/common-code/ServiceClient_Python/test/test_unit_service_client.py
olivier-be/lumberyard
3d688932f919dbf5821f0cb8a210ce24abe39e9e
[ "AML" ]
671
2017-09-21T08:04:01.000Z
2022-03-29T14:30:07.000Z
# # All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or # its licensors. # # For complete copyright and license terms please see the LICENSE at the root of this # distribution (the "License"). All use of this software is governed by the License, # or, if provided, by the license below or the license accompanying this file. Do not # remove or modify any license notices. This file is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # # $Revision: #4 $ import unittest from unittest import mock import cgf_service_client class UnitTest_CloudGemFramework_ServiceClient_service_client(unittest.TestCase): def test_service_client_imports(self): self.assertIsNotNone(cgf_service_client.Data) self.assertIsNotNone(cgf_service_client.Path) self.assertIsNotNone(cgf_service_client.HttpError) self.assertIsNotNone(cgf_service_client.ClientError) self.assertIsNotNone(cgf_service_client.NotFoundError) self.assertIsNotNone(cgf_service_client.NotAllowedError) self.assertIsNotNone(cgf_service_client.ServerError) @mock.patch('cgf_service_client.Path') def test_for_url(self, mock_Path): client = cgf_service_client.for_url('http://example.com', A = 10, B = 20) self.assertIs(client, mock_Path.return_value) mock_Path.assert_called_once_with('http://example.com', A = 10, B = 20) if __name__ == '__main__': unittest.main()
36.756098
85
0.752488
import unittest from unittest import mock import cgf_service_client class UnitTest_CloudGemFramework_ServiceClient_service_client(unittest.TestCase): def test_service_client_imports(self): self.assertIsNotNone(cgf_service_client.Data) self.assertIsNotNone(cgf_service_client.Path) self.assertIsNotNone(cgf_service_client.HttpError) self.assertIsNotNone(cgf_service_client.ClientError) self.assertIsNotNone(cgf_service_client.NotFoundError) self.assertIsNotNone(cgf_service_client.NotAllowedError) self.assertIsNotNone(cgf_service_client.ServerError) @mock.patch('cgf_service_client.Path') def test_for_url(self, mock_Path): client = cgf_service_client.for_url('http://example.com', A = 10, B = 20) self.assertIs(client, mock_Path.return_value) mock_Path.assert_called_once_with('http://example.com', A = 10, B = 20) if __name__ == '__main__': unittest.main()
true
true
f70128eaa726355d32230de7fc3c58190f5ad3f0
163
py
Python
aspc/wsgi.py
DDKZ/mainsite
425a8e147a17b6bdd1af420460a9ea83462252fc
[ "MIT" ]
8
2015-09-27T07:57:32.000Z
2018-10-28T06:08:40.000Z
aspc/wsgi.py
DDKZ/mainsite
425a8e147a17b6bdd1af420460a9ea83462252fc
[ "MIT" ]
132
2015-01-17T01:22:09.000Z
2018-11-13T22:05:32.000Z
aspc/wsgi.py
DDKZ/mainsite
425a8e147a17b6bdd1af420460a9ea83462252fc
[ "MIT" ]
20
2015-01-16T04:32:30.000Z
2018-09-03T22:55:05.000Z
import os os.environ.setdefault("DJANGO_SETTINGS_MODULE", "aspc.settings") from django.core.wsgi import get_wsgi_application application = get_wsgi_application()
27.166667
64
0.834356
import os os.environ.setdefault("DJANGO_SETTINGS_MODULE", "aspc.settings") from django.core.wsgi import get_wsgi_application application = get_wsgi_application()
true
true
f70129a6d927002f9ee9bd1305429827f5cd6c84
1,712
py
Python
mnist/app/app/mnist.py
scpepper69/ml
13ad41dd7b22d3fa152cf3665fc4dc7c1c747917
[ "MIT" ]
null
null
null
mnist/app/app/mnist.py
scpepper69/ml
13ad41dd7b22d3fa152cf3665fc4dc7c1c747917
[ "MIT" ]
null
null
null
mnist/app/app/mnist.py
scpepper69/ml
13ad41dd7b22d3fa152cf3665fc4dc7c1c747917
[ "MIT" ]
null
null
null
from datetime import datetime import cv2 import re import base64 from flask import Flask, render_template, request, jsonify from flask_cors import CORS import numpy as np from io import BytesIO from PIL import Image, ImageOps import os,sys import requests from graphpipe import remote from matplotlib import pylab as plt app = Flask(__name__) CORS(app) # To Post by Ajax @app.route('/', methods=['GET', 'POST']) def index(): if request.method == 'POST': ans,t1,t2,t3 = get_answer(request) return jsonify({'ans': ans, 't1': t1, 't2': t2, 't3': t3}) else: return render_template('index.html') def result(img): img = img.reshape(1, 784) img = img.astype(np.float32) img = np.multiply(img, 1.0 / 255.0) pred = remote.execute("http://localhost:9001", img) r = np.argmax(pred, axis=1) pp = pred*100 top1 = str(np.argsort(-pp)[0][0])+ " (" +str(int(np.sort(-pp)[0][0]*-1))+"%)" top2 = str(np.argsort(-pp)[0][1])+ " (" +str(int(np.sort(-pp)[0][1]*-1))+"%)" top3 = str(np.argsort(-pp)[0][2])+ " (" +str(int(np.sort(-pp)[0][2]*-1))+"%)" # return int(r) return r,top1,top2,top3 def get_answer(req): img_str = re.search(r'base64,(.*)', req.form['img']).group(1) nparr = np.fromstring(base64.b64decode(img_str), np.uint8) img_src = cv2.imdecode(nparr, cv2.IMREAD_COLOR) img_negaposi = 255 - img_src img_gray = cv2.cvtColor(img_negaposi, cv2.COLOR_BGR2GRAY) img_resize = cv2.resize(img_gray,(28,28)) cv2.imwrite(f"images/{datetime.now().strftime('%s')}.jpg",img_resize) ans,t1,t2,t3 = result(img_resize) return int(ans),t1,t2,t3 if __name__ == "__main__": app.run(debug=False, host='0.0.0.0', port=8001)
31.703704
81
0.643107
from datetime import datetime import cv2 import re import base64 from flask import Flask, render_template, request, jsonify from flask_cors import CORS import numpy as np from io import BytesIO from PIL import Image, ImageOps import os,sys import requests from graphpipe import remote from matplotlib import pylab as plt app = Flask(__name__) CORS(app) @app.route('/', methods=['GET', 'POST']) def index(): if request.method == 'POST': ans,t1,t2,t3 = get_answer(request) return jsonify({'ans': ans, 't1': t1, 't2': t2, 't3': t3}) else: return render_template('index.html') def result(img): img = img.reshape(1, 784) img = img.astype(np.float32) img = np.multiply(img, 1.0 / 255.0) pred = remote.execute("http://localhost:9001", img) r = np.argmax(pred, axis=1) pp = pred*100 top1 = str(np.argsort(-pp)[0][0])+ " (" +str(int(np.sort(-pp)[0][0]*-1))+"%)" top2 = str(np.argsort(-pp)[0][1])+ " (" +str(int(np.sort(-pp)[0][1]*-1))+"%)" top3 = str(np.argsort(-pp)[0][2])+ " (" +str(int(np.sort(-pp)[0][2]*-1))+"%)" return r,top1,top2,top3 def get_answer(req): img_str = re.search(r'base64,(.*)', req.form['img']).group(1) nparr = np.fromstring(base64.b64decode(img_str), np.uint8) img_src = cv2.imdecode(nparr, cv2.IMREAD_COLOR) img_negaposi = 255 - img_src img_gray = cv2.cvtColor(img_negaposi, cv2.COLOR_BGR2GRAY) img_resize = cv2.resize(img_gray,(28,28)) cv2.imwrite(f"images/{datetime.now().strftime('%s')}.jpg",img_resize) ans,t1,t2,t3 = result(img_resize) return int(ans),t1,t2,t3 if __name__ == "__main__": app.run(debug=False, host='0.0.0.0', port=8001)
true
true
f7012b0cf43442a9c11a24292ca2dd8a8e267c50
3,203
py
Python
buildtrigger/test/test_githubhandler.py
anwarchk/quay
23c5120790c619174e7d36784ca5aab7f4eece5c
[ "Apache-2.0" ]
1
2021-05-30T01:54:21.000Z
2021-05-30T01:54:21.000Z
buildtrigger/test/test_githubhandler.py
anwarchk/quay
23c5120790c619174e7d36784ca5aab7f4eece5c
[ "Apache-2.0" ]
20
2019-12-26T17:32:34.000Z
2022-03-21T22:18:06.000Z
buildtrigger/test/test_githubhandler.py
anwarchk/quay
23c5120790c619174e7d36784ca5aab7f4eece5c
[ "Apache-2.0" ]
1
2020-05-31T16:28:40.000Z
2020-05-31T16:28:40.000Z
import json import pytest from buildtrigger.test.githubmock import get_github_trigger from buildtrigger.triggerutil import (SkipRequestException, ValidationRequestException, InvalidPayloadException) from endpoints.building import PreparedBuild from util.morecollections import AttrDict @pytest.fixture def github_trigger(): return get_github_trigger() @pytest.mark.parametrize('payload, expected_error, expected_message', [ ('{"zen": true}', SkipRequestException, ""), ('{}', InvalidPayloadException, "Missing 'repository' on request"), ('{"repository": "foo"}', InvalidPayloadException, "Missing 'owner' on repository"), # Valid payload: ('''{ "repository": { "owner": { "name": "someguy" }, "name": "somerepo", "ssh_url": "someurl" }, "ref": "refs/tags/foo", "head_commit": { "id": "11d6fbc", "url": "http://some/url", "message": "some message", "timestamp": "NOW" } }''', None, None), # Skip message: ('''{ "repository": { "owner": { "name": "someguy" }, "name": "somerepo", "ssh_url": "someurl" }, "ref": "refs/tags/foo", "head_commit": { "id": "11d6fbc", "url": "http://some/url", "message": "[skip build]", "timestamp": "NOW" } }''', SkipRequestException, ''), ]) def test_handle_trigger_request(github_trigger, payload, expected_error, expected_message): def get_payload(): return json.loads(payload) request = AttrDict(dict(get_json=get_payload)) if expected_error is not None: with pytest.raises(expected_error) as ipe: github_trigger.handle_trigger_request(request) assert str(ipe.value) == expected_message else: assert isinstance(github_trigger.handle_trigger_request(request), PreparedBuild) @pytest.mark.parametrize('dockerfile_path, contents', [ ('/Dockerfile', 'hello world'), ('somesubdir/Dockerfile', 'hi universe'), ('unknownpath', None), ]) def test_load_dockerfile_contents(dockerfile_path, contents): trigger = get_github_trigger(dockerfile_path) assert trigger.load_dockerfile_contents() == contents @pytest.mark.parametrize('username, expected_response', [ ('unknownuser', None), ('knownuser', {'html_url': 'https://bitbucket.org/knownuser', 'avatar_url': 'avatarurl'}), ]) def test_lookup_user(username, expected_response, github_trigger): assert github_trigger.lookup_user(username) == expected_response def test_list_build_subdirs(github_trigger): assert github_trigger.list_build_subdirs() == ['Dockerfile', 'somesubdir/Dockerfile'] def test_list_build_source_namespaces(github_trigger): namespaces_expected = [ { 'personal': True, 'score': 1, 'avatar_url': 'avatarurl', 'id': 'knownuser', 'title': 'knownuser', 'url': 'https://bitbucket.org/knownuser', }, { 'score': 0, 'title': 'someorg', 'personal': False, 'url': '', 'avatar_url': 'avatarurl', 'id': 'someorg' } ] found = github_trigger.list_build_source_namespaces() found.sort() namespaces_expected.sort() assert found == namespaces_expected
27.144068
92
0.656884
import json import pytest from buildtrigger.test.githubmock import get_github_trigger from buildtrigger.triggerutil import (SkipRequestException, ValidationRequestException, InvalidPayloadException) from endpoints.building import PreparedBuild from util.morecollections import AttrDict @pytest.fixture def github_trigger(): return get_github_trigger() @pytest.mark.parametrize('payload, expected_error, expected_message', [ ('{"zen": true}', SkipRequestException, ""), ('{}', InvalidPayloadException, "Missing 'repository' on request"), ('{"repository": "foo"}', InvalidPayloadException, "Missing 'owner' on repository"), ('''{ "repository": { "owner": { "name": "someguy" }, "name": "somerepo", "ssh_url": "someurl" }, "ref": "refs/tags/foo", "head_commit": { "id": "11d6fbc", "url": "http://some/url", "message": "some message", "timestamp": "NOW" } }''', None, None), ('''{ "repository": { "owner": { "name": "someguy" }, "name": "somerepo", "ssh_url": "someurl" }, "ref": "refs/tags/foo", "head_commit": { "id": "11d6fbc", "url": "http://some/url", "message": "[skip build]", "timestamp": "NOW" } }''', SkipRequestException, ''), ]) def test_handle_trigger_request(github_trigger, payload, expected_error, expected_message): def get_payload(): return json.loads(payload) request = AttrDict(dict(get_json=get_payload)) if expected_error is not None: with pytest.raises(expected_error) as ipe: github_trigger.handle_trigger_request(request) assert str(ipe.value) == expected_message else: assert isinstance(github_trigger.handle_trigger_request(request), PreparedBuild) @pytest.mark.parametrize('dockerfile_path, contents', [ ('/Dockerfile', 'hello world'), ('somesubdir/Dockerfile', 'hi universe'), ('unknownpath', None), ]) def test_load_dockerfile_contents(dockerfile_path, contents): trigger = get_github_trigger(dockerfile_path) assert trigger.load_dockerfile_contents() == contents @pytest.mark.parametrize('username, expected_response', [ ('unknownuser', None), ('knownuser', {'html_url': 'https://bitbucket.org/knownuser', 'avatar_url': 'avatarurl'}), ]) def test_lookup_user(username, expected_response, github_trigger): assert github_trigger.lookup_user(username) == expected_response def test_list_build_subdirs(github_trigger): assert github_trigger.list_build_subdirs() == ['Dockerfile', 'somesubdir/Dockerfile'] def test_list_build_source_namespaces(github_trigger): namespaces_expected = [ { 'personal': True, 'score': 1, 'avatar_url': 'avatarurl', 'id': 'knownuser', 'title': 'knownuser', 'url': 'https://bitbucket.org/knownuser', }, { 'score': 0, 'title': 'someorg', 'personal': False, 'url': '', 'avatar_url': 'avatarurl', 'id': 'someorg' } ] found = github_trigger.list_build_source_namespaces() found.sort() namespaces_expected.sort() assert found == namespaces_expected
true
true
f7012b92e6dafbeed00ef8bafd0ecc45538ef7bf
2,706
py
Python
instruments/Opus/QOpus.py
davidgrier/QInstrument
339b4c855017db2c10c7f283c48e5fa0d50cc286
[ "BSD-3-Clause" ]
null
null
null
instruments/Opus/QOpus.py
davidgrier/QInstrument
339b4c855017db2c10c7f283c48e5fa0d50cc286
[ "BSD-3-Clause" ]
null
null
null
instruments/Opus/QOpus.py
davidgrier/QInstrument
339b4c855017db2c10c7f283c48e5fa0d50cc286
[ "BSD-3-Clause" ]
null
null
null
from QInstrument.lib import QInstrumentInterface from QInstrument.instruments.Opus.Opus import Opus from PyQt5.QtCore import (pyqtSlot, QTimer) import logging logging.basicConfig() logger = logging.getLogger(__name__) logger.setLevel(logging.DEBUG) class QOpusWidget(QInstrumentInterface): def __init__(self, *args, interval=None, **kwargs): super().__init__(*args, uiFile='OpusWidget.ui', deviceClass=Opus, **kwargs) self.interval = interval or 200 self.timer = QTimer() self.connectSignals() self.startPolling() def connectSignals(self): self.timer.timeout.connect(self.poll) self.ui.PowerDial.valueChanged.connect(self.updatePower) self.ui.Power.editingFinished.connect(self.updatePowerDial) self.ui.PowerDial.valueChanged.connect(self.uncheck) self.ui.SendPower.clicked.connect(self.check) self.device.dataReady.connect(self.updateValues) self.ui.Disable.clicked.connect(self.disable) def startPolling(self): if self.isEnabled(): self.timer.start(self.interval) return self def stopPolling(self): self.timer.stop() @pyqtSlot() def poll(self): self.device.send('POWER?') self.device.send('CURRENT?') self.device.send('STATUS?') @pyqtSlot(int) def updatePower(self, value): self.ui.Power.setValue(value) @pyqtSlot(str) def updateValues(self, data): if 'mW' in data: numeric_filter = filter(str.isdigit, data) p = float((int("".join(numeric_filter))/10)) if p == 0.0: self.ui.EnableSwitch.setChecked(False) if p != 0.0: self.ui.EnableSwitch.setChecked(True) self.ui.ActualPower.setValue(p) if '%' in data: numeric_filter = filter(str.isdigit, data) p = float((int("".join(numeric_filter))/10)) self.ui.CurrentBox.setValue(p) @pyqtSlot() def check(self): self.ui.sentCheck.setChecked(True) a = self.ui.Power.value() self.device.set_power(a) @pyqtSlot() def uncheck(self): self.ui.sentCheck.setChecked(False) @pyqtSlot() def updatePowerDial(self): value = self.ui.Power.value() self.ui.PowerDial.setValue(int(value)) def disable(self): self.device.send('OFF') def main(): import sys from PyQt5.QtWidgets import QApplication app = QApplication(sys.argv) widget = QOpusWidget() widget.show() sys.exit(app.exec_()) if __name__ == '__main__': main()
27.896907
67
0.613452
from QInstrument.lib import QInstrumentInterface from QInstrument.instruments.Opus.Opus import Opus from PyQt5.QtCore import (pyqtSlot, QTimer) import logging logging.basicConfig() logger = logging.getLogger(__name__) logger.setLevel(logging.DEBUG) class QOpusWidget(QInstrumentInterface): def __init__(self, *args, interval=None, **kwargs): super().__init__(*args, uiFile='OpusWidget.ui', deviceClass=Opus, **kwargs) self.interval = interval or 200 self.timer = QTimer() self.connectSignals() self.startPolling() def connectSignals(self): self.timer.timeout.connect(self.poll) self.ui.PowerDial.valueChanged.connect(self.updatePower) self.ui.Power.editingFinished.connect(self.updatePowerDial) self.ui.PowerDial.valueChanged.connect(self.uncheck) self.ui.SendPower.clicked.connect(self.check) self.device.dataReady.connect(self.updateValues) self.ui.Disable.clicked.connect(self.disable) def startPolling(self): if self.isEnabled(): self.timer.start(self.interval) return self def stopPolling(self): self.timer.stop() @pyqtSlot() def poll(self): self.device.send('POWER?') self.device.send('CURRENT?') self.device.send('STATUS?') @pyqtSlot(int) def updatePower(self, value): self.ui.Power.setValue(value) @pyqtSlot(str) def updateValues(self, data): if 'mW' in data: numeric_filter = filter(str.isdigit, data) p = float((int("".join(numeric_filter))/10)) if p == 0.0: self.ui.EnableSwitch.setChecked(False) if p != 0.0: self.ui.EnableSwitch.setChecked(True) self.ui.ActualPower.setValue(p) if '%' in data: numeric_filter = filter(str.isdigit, data) p = float((int("".join(numeric_filter))/10)) self.ui.CurrentBox.setValue(p) @pyqtSlot() def check(self): self.ui.sentCheck.setChecked(True) a = self.ui.Power.value() self.device.set_power(a) @pyqtSlot() def uncheck(self): self.ui.sentCheck.setChecked(False) @pyqtSlot() def updatePowerDial(self): value = self.ui.Power.value() self.ui.PowerDial.setValue(int(value)) def disable(self): self.device.send('OFF') def main(): import sys from PyQt5.QtWidgets import QApplication app = QApplication(sys.argv) widget = QOpusWidget() widget.show() sys.exit(app.exec_()) if __name__ == '__main__': main()
true
true
f7012c1387aa97777a625149b41ef2c6e1c926f4
690
py
Python
DQM/EcalPreshowerMonitorModule/python/es_dqm_source_offline_cff.py
pasmuss/cmssw
566f40c323beef46134485a45ea53349f59ae534
[ "Apache-2.0" ]
null
null
null
DQM/EcalPreshowerMonitorModule/python/es_dqm_source_offline_cff.py
pasmuss/cmssw
566f40c323beef46134485a45ea53349f59ae534
[ "Apache-2.0" ]
null
null
null
DQM/EcalPreshowerMonitorModule/python/es_dqm_source_offline_cff.py
pasmuss/cmssw
566f40c323beef46134485a45ea53349f59ae534
[ "Apache-2.0" ]
null
null
null
import FWCore.ParameterSet.Config as cms from DQM.EcalPreshowerMonitorModule.ESRawDataTask_cfi import * from DQM.EcalPreshowerMonitorModule.ESIntegrityTask_cfi import * ecalPreshowerIntegrityTask.DoLumiAnalysis = True from DQM.EcalPreshowerMonitorModule.ESFEDIntegrityTask_cfi import * from DQM.EcalPreshowerMonitorModule.ESOccupancyTask_cfi import * from DQM.EcalPreshowerMonitorModule.ESTrendTask_cfi import * dqmInfoES = cms.EDAnalyzer("DQMEventInfo", subSystemFolder = cms.untracked.string('EcalPreshower') ) es_dqm_source_offline = cms.Sequence(ecalPreshowerRawDataTask*ecalPreshowerFEDIntegrityTask*ecalPreshowerIntegrityTask*ecalPreshowerOccupancyTask*ecalPreshowerTrendTask)
46
169
0.876812
import FWCore.ParameterSet.Config as cms from DQM.EcalPreshowerMonitorModule.ESRawDataTask_cfi import * from DQM.EcalPreshowerMonitorModule.ESIntegrityTask_cfi import * ecalPreshowerIntegrityTask.DoLumiAnalysis = True from DQM.EcalPreshowerMonitorModule.ESFEDIntegrityTask_cfi import * from DQM.EcalPreshowerMonitorModule.ESOccupancyTask_cfi import * from DQM.EcalPreshowerMonitorModule.ESTrendTask_cfi import * dqmInfoES = cms.EDAnalyzer("DQMEventInfo", subSystemFolder = cms.untracked.string('EcalPreshower') ) es_dqm_source_offline = cms.Sequence(ecalPreshowerRawDataTask*ecalPreshowerFEDIntegrityTask*ecalPreshowerIntegrityTask*ecalPreshowerOccupancyTask*ecalPreshowerTrendTask)
true
true
f7012caea091390e3c93212966f0b19844281957
4,495
py
Python
fastai2/callback/progress.py
Tato14/fastai2
0ce2ffc95ac3a7b1dc5eaf90fc7c3997e7125651
[ "Apache-2.0" ]
1
2020-07-09T22:01:29.000Z
2020-07-09T22:01:29.000Z
fastai2/callback/progress.py
Tato14/fastai2
0ce2ffc95ac3a7b1dc5eaf90fc7c3997e7125651
[ "Apache-2.0" ]
1
2020-02-22T05:15:19.000Z
2020-02-22T05:15:19.000Z
fastai2/callback/progress.py
Tato14/fastai2
0ce2ffc95ac3a7b1dc5eaf90fc7c3997e7125651
[ "Apache-2.0" ]
null
null
null
# AUTOGENERATED! DO NOT EDIT! File to edit: nbs/16_callback.progress.ipynb (unless otherwise specified). __all__ = ['ProgressCallback', 'no_bar', 'ShowGraphCallback', 'CSVLogger'] # Cell from ..basics import * # Cell @docs class ProgressCallback(Callback): "A `Callback` to handle the display of progress bars" run_after=Recorder def begin_fit(self): assert hasattr(self.learn, 'recorder') if self.create_mbar: self.mbar = master_bar(list(range(self.n_epoch))) if self.learn.logger != noop: self.old_logger,self.learn.logger = self.logger,self._write_stats self._write_stats(self.recorder.metric_names) else: self.old_logger = noop def begin_epoch(self): if getattr(self, 'mbar', False): self.mbar.update(self.epoch) def begin_train(self): self._launch_pbar() def begin_validate(self): self._launch_pbar() def after_train(self): self.pbar.on_iter_end() def after_validate(self): self.pbar.on_iter_end() def after_batch(self): self.pbar.update(self.iter+1) if hasattr(self, 'smooth_loss'): self.pbar.comment = f'{self.smooth_loss:.4f}' def _launch_pbar(self): self.pbar = progress_bar(self.dl, parent=getattr(self, 'mbar', None), leave=False) self.pbar.update(0) def after_fit(self): if getattr(self, 'mbar', False): self.mbar.on_iter_end() delattr(self, 'mbar') self.learn.logger = self.old_logger def _write_stats(self, log): if getattr(self, 'mbar', False): self.mbar.write([f'{l:.6f}' if isinstance(l, float) else str(l) for l in log], table=True) _docs = dict(begin_fit="Setup the master bar over the epochs", begin_epoch="Update the master bar", begin_train="Launch a progress bar over the training dataloader", begin_validate="Launch a progress bar over the validation dataloader", after_train="Close the progress bar over the training dataloader", after_validate="Close the progress bar over the validation dataloader", after_batch="Update the current progress bar", after_fit="Close the master bar") defaults.callbacks = [TrainEvalCallback, Recorder, ProgressCallback] # Cell @patch @contextmanager def no_bar(self:Learner): "Context manager that deactivates the use of progress bars" has_progress = hasattr(self, 'progress') if has_progress: self.remove_cb(self.progress) yield self if has_progress: self.add_cb(ProgressCallback()) # Cell class ShowGraphCallback(Callback): "Update a graph of training and validation loss" run_after=ProgressCallback def begin_fit(self): self.run = not hasattr(self.learn, 'lr_finder') and not hasattr(self, "gather_preds") self.nb_batches = [] assert hasattr(self.learn, 'progress') def after_train(self): self.nb_batches.append(self.train_iter) def after_epoch(self): "Plot validation loss in the pbar graph" rec = self.learn.recorder iters = range_of(rec.losses) val_losses = [v[1] for v in rec.values] x_bounds = (0, (self.n_epoch - len(self.nb_batches)) * self.nb_batches[0] + len(rec.losses)) y_bounds = (0, max((max(Tensor(rec.losses)), max(Tensor(val_losses))))) self.progress.mbar.update_graph([(iters, rec.losses), (self.nb_batches, val_losses)], x_bounds, y_bounds) # Cell class CSVLogger(Callback): run_after=Recorder "Log the results displayed in `learn.path/fname`" def __init__(self, fname='history.csv', append=False): self.fname,self.append = Path(fname),append def read_log(self): "Convenience method to quickly access the log." return pd.read_csv(self.path/self.fname) def begin_fit(self): "Prepare file with metric names." self.path.parent.mkdir(parents=True, exist_ok=True) self.file = (self.path/self.fname).open('a' if self.append else 'w') self.file.write(','.join(self.recorder.metric_names) + '\n') self.old_logger,self.learn.logger = self.logger,self._write_line def _write_line(self, log): "Write a line with `log` and call the old logger." self.file.write(','.join([str(t) for t in log]) + '\n') self.old_logger(log) def after_fit(self): "Close the file and clean up." self.file.close() self.learn.logger = self.old_logger
39.429825
131
0.663404
__all__ = ['ProgressCallback', 'no_bar', 'ShowGraphCallback', 'CSVLogger'] from ..basics import * @docs class ProgressCallback(Callback): run_after=Recorder def begin_fit(self): assert hasattr(self.learn, 'recorder') if self.create_mbar: self.mbar = master_bar(list(range(self.n_epoch))) if self.learn.logger != noop: self.old_logger,self.learn.logger = self.logger,self._write_stats self._write_stats(self.recorder.metric_names) else: self.old_logger = noop def begin_epoch(self): if getattr(self, 'mbar', False): self.mbar.update(self.epoch) def begin_train(self): self._launch_pbar() def begin_validate(self): self._launch_pbar() def after_train(self): self.pbar.on_iter_end() def after_validate(self): self.pbar.on_iter_end() def after_batch(self): self.pbar.update(self.iter+1) if hasattr(self, 'smooth_loss'): self.pbar.comment = f'{self.smooth_loss:.4f}' def _launch_pbar(self): self.pbar = progress_bar(self.dl, parent=getattr(self, 'mbar', None), leave=False) self.pbar.update(0) def after_fit(self): if getattr(self, 'mbar', False): self.mbar.on_iter_end() delattr(self, 'mbar') self.learn.logger = self.old_logger def _write_stats(self, log): if getattr(self, 'mbar', False): self.mbar.write([f'{l:.6f}' if isinstance(l, float) else str(l) for l in log], table=True) _docs = dict(begin_fit="Setup the master bar over the epochs", begin_epoch="Update the master bar", begin_train="Launch a progress bar over the training dataloader", begin_validate="Launch a progress bar over the validation dataloader", after_train="Close the progress bar over the training dataloader", after_validate="Close the progress bar over the validation dataloader", after_batch="Update the current progress bar", after_fit="Close the master bar") defaults.callbacks = [TrainEvalCallback, Recorder, ProgressCallback] @patch @contextmanager def no_bar(self:Learner): has_progress = hasattr(self, 'progress') if has_progress: self.remove_cb(self.progress) yield self if has_progress: self.add_cb(ProgressCallback()) class ShowGraphCallback(Callback): run_after=ProgressCallback def begin_fit(self): self.run = not hasattr(self.learn, 'lr_finder') and not hasattr(self, "gather_preds") self.nb_batches = [] assert hasattr(self.learn, 'progress') def after_train(self): self.nb_batches.append(self.train_iter) def after_epoch(self): rec = self.learn.recorder iters = range_of(rec.losses) val_losses = [v[1] for v in rec.values] x_bounds = (0, (self.n_epoch - len(self.nb_batches)) * self.nb_batches[0] + len(rec.losses)) y_bounds = (0, max((max(Tensor(rec.losses)), max(Tensor(val_losses))))) self.progress.mbar.update_graph([(iters, rec.losses), (self.nb_batches, val_losses)], x_bounds, y_bounds) class CSVLogger(Callback): run_after=Recorder def __init__(self, fname='history.csv', append=False): self.fname,self.append = Path(fname),append def read_log(self): return pd.read_csv(self.path/self.fname) def begin_fit(self): self.path.parent.mkdir(parents=True, exist_ok=True) self.file = (self.path/self.fname).open('a' if self.append else 'w') self.file.write(','.join(self.recorder.metric_names) + '\n') self.old_logger,self.learn.logger = self.logger,self._write_line def _write_line(self, log): self.file.write(','.join([str(t) for t in log]) + '\n') self.old_logger(log) def after_fit(self): self.file.close() self.learn.logger = self.old_logger
true
true
f7012cda035aa7f66c1ab45c72651ee5cbf96751
460
py
Python
bin/rehex.py
genterium-project/sentinel
05eef4d70ab66d269407e06175e24e5e01014c21
[ "MIT" ]
1
2019-09-04T03:01:45.000Z
2019-09-04T03:01:45.000Z
bin/rehex.py
genterium-project/sentinel
05eef4d70ab66d269407e06175e24e5e01014c21
[ "MIT" ]
null
null
null
bin/rehex.py
genterium-project/sentinel
05eef4d70ab66d269407e06175e24e5e01014c21
[ "MIT" ]
null
null
null
import simplejson import binascii import sys import pdb from pprint import pprint import sys import os sys.path.append(os.path.normpath(os.path.join(os.path.dirname(__file__), '../lib'))) import gentariumlib # ============================================================================ usage = "%s <hex>" % sys.argv[0] obj = None if len(sys.argv) < 2: print(usage) sys.exit(1) else: obj = gentariumlib.deserialise(sys.argv[1]) pdb.set_trace() 1
20.909091
84
0.591304
import simplejson import binascii import sys import pdb from pprint import pprint import sys import os sys.path.append(os.path.normpath(os.path.join(os.path.dirname(__file__), '../lib'))) import gentariumlib usage = "%s <hex>" % sys.argv[0] obj = None if len(sys.argv) < 2: print(usage) sys.exit(1) else: obj = gentariumlib.deserialise(sys.argv[1]) pdb.set_trace() 1
true
true
f7012d2f2373077fcd2146095ef27c15ebf9f16e
421
pyde
Python
sketches/Bouncing_Ball_Simulator_Stage_5/Bouncing_Ball_Simulator_Stage_5.pyde
kantel/processingpy
74aae222e46f68d1c8f06307aaede3cdae65c8ec
[ "MIT" ]
4
2018-06-03T02:11:46.000Z
2021-08-18T19:55:15.000Z
sketches/Bouncing_Ball_Simulator_Stage_5/Bouncing_Ball_Simulator_Stage_5.pyde
kantel/processingpy
74aae222e46f68d1c8f06307aaede3cdae65c8ec
[ "MIT" ]
null
null
null
sketches/Bouncing_Ball_Simulator_Stage_5/Bouncing_Ball_Simulator_Stage_5.pyde
kantel/processingpy
74aae222e46f68d1c8f06307aaede3cdae65c8ec
[ "MIT" ]
3
2019-12-23T19:12:51.000Z
2021-04-30T14:00:31.000Z
from bouncingball import BouncingBall, BouncingBox balls = [] boxes = [] def setup(): size(600, 600) for _ in range(60): if random(10) < 5: balls.append(BouncingBall()) else: boxes.append(BouncingBox()) def draw(): background("#2b3e50") for ball in balls: ball.move() ball.display() for box in boxes: box.move() box.display()
19.136364
50
0.551069
from bouncingball import BouncingBall, BouncingBox balls = [] boxes = [] def setup(): size(600, 600) for _ in range(60): if random(10) < 5: balls.append(BouncingBall()) else: boxes.append(BouncingBox()) def draw(): background("#2b3e50") for ball in balls: ball.move() ball.display() for box in boxes: box.move() box.display()
true
true
f7012f4b52647b1313d4cde5987a00ed3870b14b
2,912
py
Python
Lib/site-packages/pyglet/debug.py
caiyongji/tf2.3.1-py3.7.9-full-built
ace4efcbf05b2b494388739718a18c13eab83c71
[ "CNRI-Python-GPL-Compatible" ]
1
2021-05-24T10:08:51.000Z
2021-05-24T10:08:51.000Z
Lib/site-packages/pyglet/debug.py
caiyongji/tf2.3.1-py3.7.9-full-built
ace4efcbf05b2b494388739718a18c13eab83c71
[ "CNRI-Python-GPL-Compatible" ]
null
null
null
Lib/site-packages/pyglet/debug.py
caiyongji/tf2.3.1-py3.7.9-full-built
ace4efcbf05b2b494388739718a18c13eab83c71
[ "CNRI-Python-GPL-Compatible" ]
null
null
null
# ---------------------------------------------------------------------------- # pyglet # Copyright (c) 2006-2008 Alex Holkner # Copyright (c) 2008-2020 pyglet contributors # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in # the documentation and/or other materials provided with the # distribution. # * Neither the name of pyglet nor the names of its # contributors may be used to endorse or promote products # derived from this software without specific prior written # permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE # COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # ---------------------------------------------------------------------------- import pyglet def debug_print(enabled_or_option='debug'): """Get a debug printer that is enabled based on a boolean input or a pyglet option. The debug print function returned should be used in an assert. This way it can be optimized out when running python with the -O flag. Usage example:: from pyglet.debug import debug_print _debug_media = debug_print('debug_media') def some_func(): assert _debug_media('My debug statement') :parameters: `enabled_or_options` : bool or str If a bool is passed, debug printing is enabled if it is True. If str is passed debug printing is enabled if the pyglet option with that name is True. :returns: Function for debug printing. """ if isinstance(enabled_or_option, bool): enabled = enabled_or_option else: enabled = pyglet.options.get(enabled_or_option, False) if enabled: def _debug_print(*args, **kwargs): print(*args, **kwargs) return True else: def _debug_print(*args, **kwargs): return True return _debug_print
38.826667
90
0.683723
import pyglet def debug_print(enabled_or_option='debug'): if isinstance(enabled_or_option, bool): enabled = enabled_or_option else: enabled = pyglet.options.get(enabled_or_option, False) if enabled: def _debug_print(*args, **kwargs): print(*args, **kwargs) return True else: def _debug_print(*args, **kwargs): return True return _debug_print
true
true
f7012faf22e24f1a9e4fdccb8629b5da8c37dd02
31,523
py
Python
python/ccxt/cobinhood.py
victor95pc/ccxt
5c3e606296a1b15852a35f1330b645f451fa08d6
[ "MIT" ]
1
2019-03-17T22:44:30.000Z
2019-03-17T22:44:30.000Z
python/ccxt/cobinhood.py
Lara-Bell/ccxt
e09230b4b60d5c33e3f6ebc044002bab6f733553
[ "MIT" ]
null
null
null
python/ccxt/cobinhood.py
Lara-Bell/ccxt
e09230b4b60d5c33e3f6ebc044002bab6f733553
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- # PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN: # https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-code from ccxt.base.exchange import Exchange import json from ccxt.base.errors import ExchangeError from ccxt.base.errors import PermissionDenied from ccxt.base.errors import InsufficientFunds from ccxt.base.errors import InvalidAddress from ccxt.base.errors import InvalidOrder from ccxt.base.errors import InvalidNonce class cobinhood (Exchange): def describe(self): return self.deep_extend(super(cobinhood, self).describe(), { 'id': 'cobinhood', 'name': 'COBINHOOD', 'countries': ['TW'], 'rateLimit': 1000 / 10, 'version': 'v1', 'has': { 'fetchCurrencies': True, 'fetchTickers': True, 'fetchOHLCV': True, 'fetchOpenOrders': True, 'fetchClosedOrders': True, 'fetchOrderTrades': True, 'fetchOrder': True, 'fetchDepositAddress': True, 'createDepositAddress': True, 'fetchDeposits': True, 'fetchWithdrawals': True, 'withdraw': True, 'fetchMyTrades': True, 'editOrder': True, }, 'requiredCredentials': { 'apiKey': True, 'secret': False, }, 'timeframes': { # the first two don't seem to work at all '1m': '1m', '5m': '5m', '15m': '15m', '30m': '30m', '1h': '1h', '3h': '3h', '6h': '6h', '12h': '12h', '1d': '1D', '1w': '7D', '2w': '14D', '1M': '1M', }, 'urls': { 'logo': 'https://user-images.githubusercontent.com/1294454/35755576-dee02e5c-0878-11e8-989f-1595d80ba47f.jpg', 'api': 'https://api.cobinhood.com', 'www': 'https://cobinhood.com', 'doc': 'https://cobinhood.github.io/api-public', }, 'api': { 'system': { 'get': [ 'info', 'time', 'messages', 'messages/{message_id}', ], }, 'admin': { 'get': [ 'system/messages', 'system/messages/{message_id}', ], 'post': [ 'system/messages', ], 'patch': [ 'system/messages/{message_id}', ], 'delete': [ 'system/messages/{message_id}', ], }, 'public': { 'get': [ 'market/fundingbook/precisions/{currency_id}', 'market/fundingbooks/{currency_id}', 'market/tickers', 'market/currencies', 'market/quote_currencies', 'market/trading_pairs', 'market/orderbook/precisions/{trading_pair_id}', 'market/orderbooks/{trading_pair_id}', 'market/stats', 'market/tickers', # fetchTickers 'market/tickers/{trading_pair_id}', 'market/trades/{trading_pair_id}', 'market/trades_history/{trading_pair_id}', 'market/trading_pairs', 'chart/candles/{trading_pair_id}', 'system/time', ], }, 'private': { 'get': [ 'funding/auto_offerings', 'funding/auto_offerings/{currency_id}', 'funding/funding_history', 'funding/fundings', 'funding/loans', 'funding/loans/{loan_id}', 'trading/orders/{order_id}', 'trading/orders/{order_id}/trades', 'trading/orders', 'trading/order_history', 'trading/positions', 'trading/positions/{trading_pair_id}', 'trading/positions/{trading_pair_id}/claimable_size', 'trading/trades', 'trading/trades/{trade_id}', 'trading/volume', 'wallet/balances', 'wallet/ledger', 'wallet/limits/withdrawal', 'wallet/generic_deposits', 'wallet/generic_deposits/{generic_deposit_id}', 'wallet/generic_withdrawals', 'wallet/generic_withdrawals/{generic_withdrawal_id}', # older endpoints 'wallet/deposit_addresses', 'wallet/deposit_addresses/iota', 'wallet/withdrawal_addresses', 'wallet/withdrawal_frozen', 'wallet/withdrawals/{withdrawal_id}', 'wallet/withdrawals', 'wallet/deposits/{deposit_id}', 'wallet/deposits', ], 'patch': [ 'trading/positions/{trading_pair_id}', ], 'post': [ 'funding/auto_offerings', 'funding/fundings', 'trading/check_order', 'trading/orders', # older endpoints 'wallet/deposit_addresses', 'wallet/transfer', 'wallet/withdrawal_addresses', 'wallet/withdrawals', 'wallet/withdrawals/fee', ], 'put': [ 'funding/fundings/{funding_id}', 'trading/orders/{order_id}', ], 'delete': [ 'funding/auto_offerings/{currency_id}', 'funding/fundings/{funding_id}', 'funding/loans/{loan_id}', 'trading/orders/{order_id}', 'trading/positions/{trading_pair_id}', 'wallet/generic_withdrawals/{generic_withdrawal_id}', 'wallet/withdrawal_addresses/{wallet_id}', ], }, }, 'fees': { 'trading': { 'maker': 0.0, 'taker': 0.0, }, }, 'precision': { 'amount': 8, 'price': 8, }, 'exceptions': { 'insufficient_balance': InsufficientFunds, 'invalid_order_size': InvalidOrder, 'invalid_nonce': InvalidNonce, 'unauthorized_scope': PermissionDenied, 'invalid_address': InvalidAddress, }, 'commonCurrencies': { 'SMT': 'SocialMedia.Market', 'MTN': 'Motion Token', }, }) def fetch_currencies(self, params={}): response = self.publicGetMarketCurrencies(params) currencies = response['result']['currencies'] result = {} for i in range(0, len(currencies)): currency = currencies[i] id = currency['currency'] code = self.common_currency_code(id) minUnit = self.safe_float(currency, 'min_unit') result[code] = { 'id': id, 'code': code, 'name': currency['name'], 'active': True, 'fiat': False, 'precision': self.precision_from_string(currency['min_unit']), 'limits': { 'amount': { 'min': minUnit, 'max': None, }, 'price': { 'min': minUnit, 'max': None, }, 'deposit': { 'min': minUnit, 'max': None, }, 'withdraw': { 'min': minUnit, 'max': None, }, }, 'funding': { 'withdraw': { 'fee': self.safe_float(currency, 'withdrawal_fee'), }, 'deposit': { 'fee': self.safe_float(currency, 'deposit_fee'), }, }, 'info': currency, } return result def fetch_markets(self, params={}): response = self.publicGetMarketTradingPairs() markets = response['result']['trading_pairs'] result = [] for i in range(0, len(markets)): market = markets[i] id = market['id'] baseId, quoteId = id.split('-') base = self.common_currency_code(baseId) quote = self.common_currency_code(quoteId) symbol = base + '/' + quote precision = { 'amount': 8, 'price': self.precision_from_string(market['quote_increment']), } active = self.safe_value(market, 'is_active', True) result.append({ 'id': id, 'symbol': symbol, 'base': base, 'quote': quote, 'baseId': baseId, 'quoteId': quoteId, 'active': active, 'precision': precision, 'limits': { 'amount': { 'min': self.safe_float(market, 'base_min_size'), 'max': self.safe_float(market, 'base_max_size'), }, 'price': { 'min': None, 'max': None, }, 'cost': { 'min': None, 'max': None, }, }, 'info': market, }) return result def parse_ticker(self, ticker, market=None): symbol = None if market is None: marketId = self.safe_string(ticker, 'trading_pair_id') if marketId in self.markets_by_id: market = self.markets_by_id[marketId] else: baseId, quoteId = marketId.split('-') base = self.common_currency_code(baseId) quote = self.common_currency_code(quoteId) symbol = base + '/' + quote if market is not None: symbol = market['symbol'] timestamp = self.safe_integer(ticker, 'timestamp') last = self.safe_float(ticker, 'last_trade_price') return { 'symbol': symbol, 'timestamp': timestamp, 'datetime': self.iso8601(timestamp), 'high': self.safe_float(ticker, '24h_high'), 'low': self.safe_float(ticker, '24h_low'), 'bid': self.safe_float(ticker, 'highest_bid'), 'bidVolume': None, 'ask': self.safe_float(ticker, 'lowest_ask'), 'askVolume': None, 'vwap': None, 'open': None, 'close': last, 'last': last, 'previousClose': None, 'change': self.safe_float(ticker, 'percentChanged24hr'), 'percentage': None, 'average': None, 'baseVolume': self.safe_float(ticker, '24h_volume'), 'quoteVolume': self.safe_float(ticker, 'quote_volume'), 'info': ticker, } def fetch_ticker(self, symbol, params={}): self.load_markets() market = self.market(symbol) response = self.publicGetMarketTickersTradingPairId(self.extend({ 'trading_pair_id': market['id'], }, params)) ticker = response['result']['ticker'] return self.parse_ticker(ticker, market) def fetch_tickers(self, symbols=None, params={}): self.load_markets() response = self.publicGetMarketTickers(params) tickers = response['result']['tickers'] result = [] for i in range(0, len(tickers)): result.append(self.parse_ticker(tickers[i])) return self.index_by(result, 'symbol') def fetch_order_book(self, symbol, limit=None, params={}): self.load_markets() request = { 'trading_pair_id': self.market_id(symbol), } if limit is not None: request['limit'] = limit # 100 response = self.publicGetMarketOrderbooksTradingPairId(self.extend(request, params)) return self.parse_order_book(response['result']['orderbook'], None, 'bids', 'asks', 0, 2) def parse_trade(self, trade, market=None): symbol = None if market: symbol = market['symbol'] timestamp = trade['timestamp'] price = self.safe_float(trade, 'price') amount = self.safe_float(trade, 'size') cost = price * amount # you can't determine your side from maker/taker side and vice versa # you can't determine if your order/trade was a maker or a taker based # on just the side of your order/trade # https://github.com/ccxt/ccxt/issues/4300 # side = 'sell' if (trade['maker_side'] == 'bid') else 'buy' side = None return { 'info': trade, 'timestamp': timestamp, 'datetime': self.iso8601(timestamp), 'symbol': symbol, 'id': trade['id'], 'order': None, 'type': None, 'side': side, 'price': price, 'amount': amount, 'cost': cost, 'fee': None, } def fetch_trades(self, symbol, since=None, limit=50, params={}): self.load_markets() market = self.market(symbol) response = self.publicGetMarketTradesTradingPairId(self.extend({ 'trading_pair_id': market['id'], 'limit': limit, # default 20, but that seems too little }, params)) trades = response['result']['trades'] return self.parse_trades(trades, market, since, limit) def parse_ohlcv(self, ohlcv, market=None, timeframe='5m', since=None, limit=None): return [ # they say that timestamps are Unix Timestamps in seconds, but in fact those are milliseconds ohlcv['timestamp'], float(ohlcv['open']), float(ohlcv['high']), float(ohlcv['low']), float(ohlcv['close']), float(ohlcv['volume']), ] def fetch_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}): self.load_markets() market = self.market(symbol) # # they say in their docs that end_time defaults to current server time # but if you don't specify it, their range limits does not allow you to query anything # # they also say that start_time defaults to 0, # but most calls fail if you do not specify any of end_time # # to make things worse, their docs say it should be a Unix Timestamp # but with seconds it fails, so we set milliseconds(somehow it works that way) # endTime = self.milliseconds() request = { 'trading_pair_id': market['id'], 'timeframe': self.timeframes[timeframe], 'end_time': endTime, } if since is not None: request['start_time'] = since response = self.publicGetChartCandlesTradingPairId(self.extend(request, params)) ohlcv = response['result']['candles'] return self.parse_ohlcvs(ohlcv, market, timeframe, since, limit) def fetch_balance(self, params={}): self.load_markets() response = self.privateGetWalletBalances(params) result = {'info': response} balances = response['result']['balances'] for i in range(0, len(balances)): balance = balances[i] currency = balance['currency'] if currency in self.currencies_by_id: currency = self.currencies_by_id[currency]['code'] account = { 'used': float(balance['on_order']), 'total': float(balance['total']), } account['free'] = float(account['total'] - account['used']) result[currency] = account return self.parse_balance(result) def parse_order_status(self, status): statuses = { 'filled': 'closed', 'rejected': 'closed', 'partially_filled': 'open', 'pending_cancellation': 'open', 'pending_modification': 'open', 'open': 'open', 'new': 'open', 'queued': 'open', 'cancelled': 'canceled', 'triggered': 'triggered', } if status in statuses: return statuses[status] return status def parse_order(self, order, market=None): # # { # 'completed_at': None, # 'eq_price': '0', # 'filled': '0', # 'id': '88426800-beae-4407-b4a1-f65cef693542', # 'price': '0.00000507', # 'side': 'bid', # 'size': '3503.6489', # 'source': 'exchange', # 'state': 'open', # 'timestamp': 1535258403597, # 'trading_pair_id': 'ACT-BTC', # 'type': 'limit', # } # symbol = None if market is None: marketId = self.safe_string_2(order, 'trading_pair', 'trading_pair_id') market = self.safe_value(self.markets_by_id, marketId) if market is not None: symbol = market['symbol'] timestamp = self.safe_integer(order, 'timestamp') price = self.safe_float(order, 'price') average = self.safe_float(order, 'eq_price') amount = self.safe_float(order, 'size') filled = self.safe_float(order, 'filled') remaining = None cost = None if filled is not None and average is not None: cost = average * filled elif average is not None: cost = average * amount if amount is not None: if filled is not None: remaining = amount - filled status = self.parse_order_status(self.safe_string(order, 'state')) side = self.safe_string(order, 'side') if side == 'bid': side = 'buy' elif side == 'ask': side = 'sell' return { 'id': self.safe_string(order, 'id'), 'datetime': self.iso8601(timestamp), 'timestamp': timestamp, 'lastTradeTimestamp': None, 'status': status, 'symbol': symbol, 'type': self.safe_string(order, 'type'), # market, limit, stop, stop_limit, trailing_stop, fill_or_kill 'side': side, 'price': price, 'cost': cost, 'average': average, 'amount': amount, 'filled': filled, 'remaining': remaining, 'trades': None, 'fee': None, 'info': order, } def create_order(self, symbol, type, side, amount, price=None, params={}): self.load_markets() market = self.market(symbol) side = 'ask' if (side == 'sell') else 'bid' request = { 'trading_pair_id': market['id'], 'type': type, # market, limit, stop, stop_limit 'side': side, 'size': self.amount_to_precision(symbol, amount), } if type != 'market': request['price'] = self.price_to_precision(symbol, price) response = self.privatePostTradingOrders(self.extend(request, params)) order = self.parse_order(response['result']['order'], market) id = order['id'] self.orders[id] = order return order def edit_order(self, id, symbol, type, side, amount, price, params={}): self.load_markets() response = self.privatePutTradingOrdersOrderId(self.extend({ 'order_id': id, 'price': self.price_to_precision(symbol, price), 'size': self.amount_to_precision(symbol, amount), }, params)) return self.parse_order(self.extend(response, { 'id': id, })) def cancel_order(self, id, symbol=None, params={}): self.load_markets() response = self.privateDeleteTradingOrdersOrderId(self.extend({ 'order_id': id, }, params)) return self.parse_order(self.extend(response, { 'id': id, })) def fetch_order(self, id, symbol=None, params={}): self.load_markets() response = self.privateGetTradingOrdersOrderId(self.extend({ 'order_id': str(id), }, params)) return self.parse_order(response['result']['order']) def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}): self.load_markets() result = self.privateGetTradingOrders(params) orders = self.parse_orders(result['result']['orders'], None, since, limit) if symbol is not None: return self.filter_by_symbol_since_limit(orders, symbol, since, limit) return self.filter_by_since_limit(orders, since, limit) def fetch_closed_orders(self, symbol=None, since=None, limit=None, params={}): self.load_markets() result = self.privateGetTradingOrderHistory(params) orders = self.parse_orders(result['result']['orders'], None, since, limit) if symbol is not None: return self.filter_by_symbol_since_limit(orders, symbol, since, limit) return self.filter_by_since_limit(orders, since, limit) def fetch_order_trades(self, id, symbol=None, since=None, limit=None, params={}): self.load_markets() response = self.privateGetTradingOrdersOrderIdTrades(self.extend({ 'order_id': id, }, params)) market = None if (symbol is None) else self.market(symbol) return self.parse_trades(response['result']['trades'], market) def fetch_my_trades(self, symbol=None, since=None, limit=None, params={}): self.load_markets() market = self.market(symbol) request = {} if symbol is not None: request['trading_pair_id'] = market['id'] response = self.privateGetTradingTrades(self.extend(request, params)) return self.parse_trades(response['result']['trades'], market, since, limit) def create_deposit_address(self, code, params={}): self.load_markets() currency = self.currency(code) # 'ledger_type' is required, see: https://cobinhood.github.io/api-public/#create-new-deposit-address ledgerType = self.safe_string(params, 'ledger_type', 'exchange') request = { 'currency': currency['id'], 'ledger_type': ledgerType, } response = self.privatePostWalletDepositAddresses(self.extend(request, params)) address = self.safe_string(response['result']['deposit_address'], 'address') tag = self.safe_string(response['result']['deposit_address'], 'memo') self.check_address(address) return { 'currency': code, 'address': address, 'tag': tag, 'info': response, } def fetch_deposit_address(self, code, params={}): self.load_markets() currency = self.currency(code) response = self.privateGetWalletDepositAddresses(self.extend({ 'currency': currency['id'], }, params)) # # {success: True, # result: {deposit_addresses: [{ address: "abcdefg", # blockchain_id: "eosio", # created_at: 1536768050235, # currency: "EOS", # memo: "12345678", # type: "exchange" }]} } # addresses = self.safe_value(response['result'], 'deposit_addresses', []) address = None tag = None if len(addresses) > 0: address = self.safe_string(addresses[0], 'address') tag = self.safe_string_2(addresses[0], 'memo', 'tag') self.check_address(address) return { 'currency': code, 'address': address, 'tag': tag, 'info': response, } def withdraw(self, code, amount, address, tag=None, params={}): self.load_markets() currency = self.currency(code) request = { 'currency': currency['id'], 'amount': amount, 'address': address, } if tag is not None: request['memo'] = tag response = self.privatePostWalletWithdrawals(self.extend(request, params)) return { 'id': None, 'info': response, } def fetch_deposits(self, code=None, since=None, limit=None, params={}): self.load_markets() if code is None: raise ExchangeError(self.id + ' fetchDeposits() requires a currency code arguemnt') currency = self.currency(code) request = { 'currency': currency['id'], } response = self.privateGetWalletDeposits(self.extend(request, params)) return self.parseTransactions(response['result']['deposits'], currency) def fetch_withdrawals(self, code=None, since=None, limit=None, params={}): self.load_markets() if code is None: raise ExchangeError(self.id + ' fetchWithdrawals() requires a currency code arguemnt') currency = self.currency(code) request = { 'currency': currency['id'], } response = self.privateGetWalletWithdrawals(self.extend(request, params)) return self.parseTransactions(response['result']['withdrawals'], currency) def parse_transaction_status(self, status): statuses = { 'tx_pending_two_factor_auth': 'pending', 'tx_pending_email_auth': 'pending', 'tx_pending_approval': 'pending', 'tx_approved': 'pending', 'tx_processing': 'pending', 'tx_pending': 'pending', 'tx_sent': 'pending', 'tx_cancelled': 'canceled', 'tx_timeout': 'failed', 'tx_invalid': 'failed', 'tx_rejected': 'failed', 'tx_confirmed': 'ok', } return statuses[status] if (status in list(statuses.keys())) else status def parse_transaction(self, transaction, currency=None): timestamp = self.safe_integer(transaction, 'created_at') code = None if currency is None: currencyId = self.safe_string(transaction, 'currency') if currencyId in self.currencies_by_id: currency = self.currencies_by_id[currencyId] else: code = self.common_currency_code(currencyId) if currency is not None: code = currency['code'] id = None withdrawalId = self.safe_string(transaction, 'withdrawal_id') depositId = self.safe_string(transaction, 'deposit_id') type = None address = None if withdrawalId is not None: type = 'withdrawal' id = withdrawalId address = self.safe_string(transaction, 'to_address') elif depositId is not None: type = 'deposit' id = depositId address = self.safe_string(transaction, 'from_address') additionalInfo = self.safe_value(transaction, 'additional_info', {}) tag = self.safe_string(additionalInfo, 'memo') return { 'info': transaction, 'id': id, 'txid': self.safe_string(transaction, 'txhash'), 'timestamp': timestamp, 'datetime': self.iso8601(timestamp), 'address': address, 'tag': tag, # refix it properly 'type': type, 'amount': self.safe_float(transaction, 'amount'), 'currency': code, 'status': self.parse_transaction_status(transaction['status']), 'updated': None, 'fee': { 'cost': self.safe_float(transaction, 'fee'), 'rate': None, }, } def sign(self, path, api='public', method='GET', params={}, headers=None, body=None): url = self.urls['api'] + '/' + self.version + '/' + self.implode_params(path, params) query = self.omit(params, self.extract_params(path)) headers = {} if api == 'private': self.check_required_credentials() # headers['device_id'] = self.apiKey headers['nonce'] = str(self.nonce()) headers['Authorization'] = self.apiKey if method == 'GET': query = self.urlencode(query) if len(query): url += '?' + query else: headers['Content-type'] = 'application/json charset=UTF-8' body = self.json(query) return {'url': url, 'method': method, 'body': body, 'headers': headers} def handle_errors(self, code, reason, url, method, headers, body, response): if code < 400 or code >= 600: return if body[0] != '{': raise ExchangeError(self.id + ' ' + body) response = json.loads(body) feedback = self.id + ' ' + self.json(response) errorCode = self.safe_value(response['error'], 'error_code') if method == 'DELETE' or method == 'GET': if errorCode == 'parameter_error': if url.find('trading/orders/') >= 0: # Cobinhood returns vague "parameter_error" on fetchOrder() and cancelOrder() calls # for invalid order IDs as well as orders that are not "open" raise InvalidOrder(feedback) exceptions = self.exceptions if errorCode in exceptions: raise exceptions[errorCode](feedback) raise ExchangeError(feedback) def nonce(self): return self.milliseconds()
39.801768
126
0.498842
from ccxt.base.exchange import Exchange import json from ccxt.base.errors import ExchangeError from ccxt.base.errors import PermissionDenied from ccxt.base.errors import InsufficientFunds from ccxt.base.errors import InvalidAddress from ccxt.base.errors import InvalidOrder from ccxt.base.errors import InvalidNonce class cobinhood (Exchange): def describe(self): return self.deep_extend(super(cobinhood, self).describe(), { 'id': 'cobinhood', 'name': 'COBINHOOD', 'countries': ['TW'], 'rateLimit': 1000 / 10, 'version': 'v1', 'has': { 'fetchCurrencies': True, 'fetchTickers': True, 'fetchOHLCV': True, 'fetchOpenOrders': True, 'fetchClosedOrders': True, 'fetchOrderTrades': True, 'fetchOrder': True, 'fetchDepositAddress': True, 'createDepositAddress': True, 'fetchDeposits': True, 'fetchWithdrawals': True, 'withdraw': True, 'fetchMyTrades': True, 'editOrder': True, }, 'requiredCredentials': { 'apiKey': True, 'secret': False, }, 'timeframes': { '1m': '1m', '5m': '5m', '15m': '15m', '30m': '30m', '1h': '1h', '3h': '3h', '6h': '6h', '12h': '12h', '1d': '1D', '1w': '7D', '2w': '14D', '1M': '1M', }, 'urls': { 'logo': 'https://user-images.githubusercontent.com/1294454/35755576-dee02e5c-0878-11e8-989f-1595d80ba47f.jpg', 'api': 'https://api.cobinhood.com', 'www': 'https://cobinhood.com', 'doc': 'https://cobinhood.github.io/api-public', }, 'api': { 'system': { 'get': [ 'info', 'time', 'messages', 'messages/{message_id}', ], }, 'admin': { 'get': [ 'system/messages', 'system/messages/{message_id}', ], 'post': [ 'system/messages', ], 'patch': [ 'system/messages/{message_id}', ], 'delete': [ 'system/messages/{message_id}', ], }, 'public': { 'get': [ 'market/fundingbook/precisions/{currency_id}', 'market/fundingbooks/{currency_id}', 'market/tickers', 'market/currencies', 'market/quote_currencies', 'market/trading_pairs', 'market/orderbook/precisions/{trading_pair_id}', 'market/orderbooks/{trading_pair_id}', 'market/stats', 'market/tickers', # fetchTickers 'market/tickers/{trading_pair_id}', 'market/trades/{trading_pair_id}', 'market/trades_history/{trading_pair_id}', 'market/trading_pairs', 'chart/candles/{trading_pair_id}', 'system/time', ], }, 'private': { 'get': [ 'funding/auto_offerings', 'funding/auto_offerings/{currency_id}', 'funding/funding_history', 'funding/fundings', 'funding/loans', 'funding/loans/{loan_id}', 'trading/orders/{order_id}', 'trading/orders/{order_id}/trades', 'trading/orders', 'trading/order_history', 'trading/positions', 'trading/positions/{trading_pair_id}', 'trading/positions/{trading_pair_id}/claimable_size', 'trading/trades', 'trading/trades/{trade_id}', 'trading/volume', 'wallet/balances', 'wallet/ledger', 'wallet/limits/withdrawal', 'wallet/generic_deposits', 'wallet/generic_deposits/{generic_deposit_id}', 'wallet/generic_withdrawals', 'wallet/generic_withdrawals/{generic_withdrawal_id}', # older endpoints 'wallet/deposit_addresses', 'wallet/deposit_addresses/iota', 'wallet/withdrawal_addresses', 'wallet/withdrawal_frozen', 'wallet/withdrawals/{withdrawal_id}', 'wallet/withdrawals', 'wallet/deposits/{deposit_id}', 'wallet/deposits', ], 'patch': [ 'trading/positions/{trading_pair_id}', ], 'post': [ 'funding/auto_offerings', 'funding/fundings', 'trading/check_order', 'trading/orders', # older endpoints 'wallet/deposit_addresses', 'wallet/transfer', 'wallet/withdrawal_addresses', 'wallet/withdrawals', 'wallet/withdrawals/fee', ], 'put': [ 'funding/fundings/{funding_id}', 'trading/orders/{order_id}', ], 'delete': [ 'funding/auto_offerings/{currency_id}', 'funding/fundings/{funding_id}', 'funding/loans/{loan_id}', 'trading/orders/{order_id}', 'trading/positions/{trading_pair_id}', 'wallet/generic_withdrawals/{generic_withdrawal_id}', 'wallet/withdrawal_addresses/{wallet_id}', ], }, }, 'fees': { 'trading': { 'maker': 0.0, 'taker': 0.0, }, }, 'precision': { 'amount': 8, 'price': 8, }, 'exceptions': { 'insufficient_balance': InsufficientFunds, 'invalid_order_size': InvalidOrder, 'invalid_nonce': InvalidNonce, 'unauthorized_scope': PermissionDenied, 'invalid_address': InvalidAddress, }, 'commonCurrencies': { 'SMT': 'SocialMedia.Market', 'MTN': 'Motion Token', }, }) def fetch_currencies(self, params={}): response = self.publicGetMarketCurrencies(params) currencies = response['result']['currencies'] result = {} for i in range(0, len(currencies)): currency = currencies[i] id = currency['currency'] code = self.common_currency_code(id) minUnit = self.safe_float(currency, 'min_unit') result[code] = { 'id': id, 'code': code, 'name': currency['name'], 'active': True, 'fiat': False, 'precision': self.precision_from_string(currency['min_unit']), 'limits': { 'amount': { 'min': minUnit, 'max': None, }, 'price': { 'min': minUnit, 'max': None, }, 'deposit': { 'min': minUnit, 'max': None, }, 'withdraw': { 'min': minUnit, 'max': None, }, }, 'funding': { 'withdraw': { 'fee': self.safe_float(currency, 'withdrawal_fee'), }, 'deposit': { 'fee': self.safe_float(currency, 'deposit_fee'), }, }, 'info': currency, } return result def fetch_markets(self, params={}): response = self.publicGetMarketTradingPairs() markets = response['result']['trading_pairs'] result = [] for i in range(0, len(markets)): market = markets[i] id = market['id'] baseId, quoteId = id.split('-') base = self.common_currency_code(baseId) quote = self.common_currency_code(quoteId) symbol = base + '/' + quote precision = { 'amount': 8, 'price': self.precision_from_string(market['quote_increment']), } active = self.safe_value(market, 'is_active', True) result.append({ 'id': id, 'symbol': symbol, 'base': base, 'quote': quote, 'baseId': baseId, 'quoteId': quoteId, 'active': active, 'precision': precision, 'limits': { 'amount': { 'min': self.safe_float(market, 'base_min_size'), 'max': self.safe_float(market, 'base_max_size'), }, 'price': { 'min': None, 'max': None, }, 'cost': { 'min': None, 'max': None, }, }, 'info': market, }) return result def parse_ticker(self, ticker, market=None): symbol = None if market is None: marketId = self.safe_string(ticker, 'trading_pair_id') if marketId in self.markets_by_id: market = self.markets_by_id[marketId] else: baseId, quoteId = marketId.split('-') base = self.common_currency_code(baseId) quote = self.common_currency_code(quoteId) symbol = base + '/' + quote if market is not None: symbol = market['symbol'] timestamp = self.safe_integer(ticker, 'timestamp') last = self.safe_float(ticker, 'last_trade_price') return { 'symbol': symbol, 'timestamp': timestamp, 'datetime': self.iso8601(timestamp), 'high': self.safe_float(ticker, '24h_high'), 'low': self.safe_float(ticker, '24h_low'), 'bid': self.safe_float(ticker, 'highest_bid'), 'bidVolume': None, 'ask': self.safe_float(ticker, 'lowest_ask'), 'askVolume': None, 'vwap': None, 'open': None, 'close': last, 'last': last, 'previousClose': None, 'change': self.safe_float(ticker, 'percentChanged24hr'), 'percentage': None, 'average': None, 'baseVolume': self.safe_float(ticker, '24h_volume'), 'quoteVolume': self.safe_float(ticker, 'quote_volume'), 'info': ticker, } def fetch_ticker(self, symbol, params={}): self.load_markets() market = self.market(symbol) response = self.publicGetMarketTickersTradingPairId(self.extend({ 'trading_pair_id': market['id'], }, params)) ticker = response['result']['ticker'] return self.parse_ticker(ticker, market) def fetch_tickers(self, symbols=None, params={}): self.load_markets() response = self.publicGetMarketTickers(params) tickers = response['result']['tickers'] result = [] for i in range(0, len(tickers)): result.append(self.parse_ticker(tickers[i])) return self.index_by(result, 'symbol') def fetch_order_book(self, symbol, limit=None, params={}): self.load_markets() request = { 'trading_pair_id': self.market_id(symbol), } if limit is not None: request['limit'] = limit # 100 response = self.publicGetMarketOrderbooksTradingPairId(self.extend(request, params)) return self.parse_order_book(response['result']['orderbook'], None, 'bids', 'asks', 0, 2) def parse_trade(self, trade, market=None): symbol = None if market: symbol = market['symbol'] timestamp = trade['timestamp'] price = self.safe_float(trade, 'price') amount = self.safe_float(trade, 'size') cost = price * amount # you can't determine your side from maker/taker side and vice versa # on just the side of your order/trade # https://github.com/ccxt/ccxt/issues/4300 # side = 'sell' if (trade['maker_side'] == 'bid') else 'buy' side = None return { 'info': trade, 'timestamp': timestamp, 'datetime': self.iso8601(timestamp), 'symbol': symbol, 'id': trade['id'], 'order': None, 'type': None, 'side': side, 'price': price, 'amount': amount, 'cost': cost, 'fee': None, } def fetch_trades(self, symbol, since=None, limit=50, params={}): self.load_markets() market = self.market(symbol) response = self.publicGetMarketTradesTradingPairId(self.extend({ 'trading_pair_id': market['id'], 'limit': limit, # default 20, but that seems too little }, params)) trades = response['result']['trades'] return self.parse_trades(trades, market, since, limit) def parse_ohlcv(self, ohlcv, market=None, timeframe='5m', since=None, limit=None): return [ # they say that timestamps are Unix Timestamps in seconds, but in fact those are milliseconds ohlcv['timestamp'], float(ohlcv['open']), float(ohlcv['high']), float(ohlcv['low']), float(ohlcv['close']), float(ohlcv['volume']), ] def fetch_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}): self.load_markets() market = self.market(symbol) # # they say in their docs that end_time defaults to current server time # but if you don't specify it, their range limits does not allow you to query anything endTime = self.milliseconds() request = { 'trading_pair_id': market['id'], 'timeframe': self.timeframes[timeframe], 'end_time': endTime, } if since is not None: request['start_time'] = since response = self.publicGetChartCandlesTradingPairId(self.extend(request, params)) ohlcv = response['result']['candles'] return self.parse_ohlcvs(ohlcv, market, timeframe, since, limit) def fetch_balance(self, params={}): self.load_markets() response = self.privateGetWalletBalances(params) result = {'info': response} balances = response['result']['balances'] for i in range(0, len(balances)): balance = balances[i] currency = balance['currency'] if currency in self.currencies_by_id: currency = self.currencies_by_id[currency]['code'] account = { 'used': float(balance['on_order']), 'total': float(balance['total']), } account['free'] = float(account['total'] - account['used']) result[currency] = account return self.parse_balance(result) def parse_order_status(self, status): statuses = { 'filled': 'closed', 'rejected': 'closed', 'partially_filled': 'open', 'pending_cancellation': 'open', 'pending_modification': 'open', 'open': 'open', 'new': 'open', 'queued': 'open', 'cancelled': 'canceled', 'triggered': 'triggered', } if status in statuses: return statuses[status] return status def parse_order(self, order, market=None): symbol = None if market is None: marketId = self.safe_string_2(order, 'trading_pair', 'trading_pair_id') market = self.safe_value(self.markets_by_id, marketId) if market is not None: symbol = market['symbol'] timestamp = self.safe_integer(order, 'timestamp') price = self.safe_float(order, 'price') average = self.safe_float(order, 'eq_price') amount = self.safe_float(order, 'size') filled = self.safe_float(order, 'filled') remaining = None cost = None if filled is not None and average is not None: cost = average * filled elif average is not None: cost = average * amount if amount is not None: if filled is not None: remaining = amount - filled status = self.parse_order_status(self.safe_string(order, 'state')) side = self.safe_string(order, 'side') if side == 'bid': side = 'buy' elif side == 'ask': side = 'sell' return { 'id': self.safe_string(order, 'id'), 'datetime': self.iso8601(timestamp), 'timestamp': timestamp, 'lastTradeTimestamp': None, 'status': status, 'symbol': symbol, 'type': self.safe_string(order, 'type'), 'side': side, 'price': price, 'cost': cost, 'average': average, 'amount': amount, 'filled': filled, 'remaining': remaining, 'trades': None, 'fee': None, 'info': order, } def create_order(self, symbol, type, side, amount, price=None, params={}): self.load_markets() market = self.market(symbol) side = 'ask' if (side == 'sell') else 'bid' request = { 'trading_pair_id': market['id'], 'type': type, 'side': side, 'size': self.amount_to_precision(symbol, amount), } if type != 'market': request['price'] = self.price_to_precision(symbol, price) response = self.privatePostTradingOrders(self.extend(request, params)) order = self.parse_order(response['result']['order'], market) id = order['id'] self.orders[id] = order return order def edit_order(self, id, symbol, type, side, amount, price, params={}): self.load_markets() response = self.privatePutTradingOrdersOrderId(self.extend({ 'order_id': id, 'price': self.price_to_precision(symbol, price), 'size': self.amount_to_precision(symbol, amount), }, params)) return self.parse_order(self.extend(response, { 'id': id, })) def cancel_order(self, id, symbol=None, params={}): self.load_markets() response = self.privateDeleteTradingOrdersOrderId(self.extend({ 'order_id': id, }, params)) return self.parse_order(self.extend(response, { 'id': id, })) def fetch_order(self, id, symbol=None, params={}): self.load_markets() response = self.privateGetTradingOrdersOrderId(self.extend({ 'order_id': str(id), }, params)) return self.parse_order(response['result']['order']) def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}): self.load_markets() result = self.privateGetTradingOrders(params) orders = self.parse_orders(result['result']['orders'], None, since, limit) if symbol is not None: return self.filter_by_symbol_since_limit(orders, symbol, since, limit) return self.filter_by_since_limit(orders, since, limit) def fetch_closed_orders(self, symbol=None, since=None, limit=None, params={}): self.load_markets() result = self.privateGetTradingOrderHistory(params) orders = self.parse_orders(result['result']['orders'], None, since, limit) if symbol is not None: return self.filter_by_symbol_since_limit(orders, symbol, since, limit) return self.filter_by_since_limit(orders, since, limit) def fetch_order_trades(self, id, symbol=None, since=None, limit=None, params={}): self.load_markets() response = self.privateGetTradingOrdersOrderIdTrades(self.extend({ 'order_id': id, }, params)) market = None if (symbol is None) else self.market(symbol) return self.parse_trades(response['result']['trades'], market) def fetch_my_trades(self, symbol=None, since=None, limit=None, params={}): self.load_markets() market = self.market(symbol) request = {} if symbol is not None: request['trading_pair_id'] = market['id'] response = self.privateGetTradingTrades(self.extend(request, params)) return self.parse_trades(response['result']['trades'], market, since, limit) def create_deposit_address(self, code, params={}): self.load_markets() currency = self.currency(code) ledgerType = self.safe_string(params, 'ledger_type', 'exchange') request = { 'currency': currency['id'], 'ledger_type': ledgerType, } response = self.privatePostWalletDepositAddresses(self.extend(request, params)) address = self.safe_string(response['result']['deposit_address'], 'address') tag = self.safe_string(response['result']['deposit_address'], 'memo') self.check_address(address) return { 'currency': code, 'address': address, 'tag': tag, 'info': response, } def fetch_deposit_address(self, code, params={}): self.load_markets() currency = self.currency(code) response = self.privateGetWalletDepositAddresses(self.extend({ 'currency': currency['id'], }, params)) addresses = self.safe_value(response['result'], 'deposit_addresses', []) address = None tag = None if len(addresses) > 0: address = self.safe_string(addresses[0], 'address') tag = self.safe_string_2(addresses[0], 'memo', 'tag') self.check_address(address) return { 'currency': code, 'address': address, 'tag': tag, 'info': response, } def withdraw(self, code, amount, address, tag=None, params={}): self.load_markets() currency = self.currency(code) request = { 'currency': currency['id'], 'amount': amount, 'address': address, } if tag is not None: request['memo'] = tag response = self.privatePostWalletWithdrawals(self.extend(request, params)) return { 'id': None, 'info': response, } def fetch_deposits(self, code=None, since=None, limit=None, params={}): self.load_markets() if code is None: raise ExchangeError(self.id + ' fetchDeposits() requires a currency code arguemnt') currency = self.currency(code) request = { 'currency': currency['id'], } response = self.privateGetWalletDeposits(self.extend(request, params)) return self.parseTransactions(response['result']['deposits'], currency) def fetch_withdrawals(self, code=None, since=None, limit=None, params={}): self.load_markets() if code is None: raise ExchangeError(self.id + ' fetchWithdrawals() requires a currency code arguemnt') currency = self.currency(code) request = { 'currency': currency['id'], } response = self.privateGetWalletWithdrawals(self.extend(request, params)) return self.parseTransactions(response['result']['withdrawals'], currency) def parse_transaction_status(self, status): statuses = { 'tx_pending_two_factor_auth': 'pending', 'tx_pending_email_auth': 'pending', 'tx_pending_approval': 'pending', 'tx_approved': 'pending', 'tx_processing': 'pending', 'tx_pending': 'pending', 'tx_sent': 'pending', 'tx_cancelled': 'canceled', 'tx_timeout': 'failed', 'tx_invalid': 'failed', 'tx_rejected': 'failed', 'tx_confirmed': 'ok', } return statuses[status] if (status in list(statuses.keys())) else status def parse_transaction(self, transaction, currency=None): timestamp = self.safe_integer(transaction, 'created_at') code = None if currency is None: currencyId = self.safe_string(transaction, 'currency') if currencyId in self.currencies_by_id: currency = self.currencies_by_id[currencyId] else: code = self.common_currency_code(currencyId) if currency is not None: code = currency['code'] id = None withdrawalId = self.safe_string(transaction, 'withdrawal_id') depositId = self.safe_string(transaction, 'deposit_id') type = None address = None if withdrawalId is not None: type = 'withdrawal' id = withdrawalId address = self.safe_string(transaction, 'to_address') elif depositId is not None: type = 'deposit' id = depositId address = self.safe_string(transaction, 'from_address') additionalInfo = self.safe_value(transaction, 'additional_info', {}) tag = self.safe_string(additionalInfo, 'memo') return { 'info': transaction, 'id': id, 'txid': self.safe_string(transaction, 'txhash'), 'timestamp': timestamp, 'datetime': self.iso8601(timestamp), 'address': address, 'tag': tag, 'type': type, 'amount': self.safe_float(transaction, 'amount'), 'currency': code, 'status': self.parse_transaction_status(transaction['status']), 'updated': None, 'fee': { 'cost': self.safe_float(transaction, 'fee'), 'rate': None, }, } def sign(self, path, api='public', method='GET', params={}, headers=None, body=None): url = self.urls['api'] + '/' + self.version + '/' + self.implode_params(path, params) query = self.omit(params, self.extract_params(path)) headers = {} if api == 'private': self.check_required_credentials() headers['nonce'] = str(self.nonce()) headers['Authorization'] = self.apiKey if method == 'GET': query = self.urlencode(query) if len(query): url += '?' + query else: headers['Content-type'] = 'application/json charset=UTF-8' body = self.json(query) return {'url': url, 'method': method, 'body': body, 'headers': headers} def handle_errors(self, code, reason, url, method, headers, body, response): if code < 400 or code >= 600: return if body[0] != '{': raise ExchangeError(self.id + ' ' + body) response = json.loads(body) feedback = self.id + ' ' + self.json(response) errorCode = self.safe_value(response['error'], 'error_code') if method == 'DELETE' or method == 'GET': if errorCode == 'parameter_error': if url.find('trading/orders/') >= 0: raise InvalidOrder(feedback) exceptions = self.exceptions if errorCode in exceptions: raise exceptions[errorCode](feedback) raise ExchangeError(feedback) def nonce(self): return self.milliseconds()
true
true
f7012febf5c582c921ff95fa96e32ca022810de8
833
py
Python
backend/model/Corpus.py
CiprianBodnar/DeftEval
7afbaa9013873c91e4f209311ed499005080e131
[ "MIT" ]
null
null
null
backend/model/Corpus.py
CiprianBodnar/DeftEval
7afbaa9013873c91e4f209311ed499005080e131
[ "MIT" ]
null
null
null
backend/model/Corpus.py
CiprianBodnar/DeftEval
7afbaa9013873c91e4f209311ed499005080e131
[ "MIT" ]
1
2019-10-15T15:48:13.000Z
2019-10-15T15:48:13.000Z
import abc from backend.model.SentenceTokenise import SentenceTokenise from backend.service.ExtractSentences import extract_sentences from backend.service.ReadCorpus import read_corpus class Corpus: def __init__(self): self.receive_text = "" self.input_file = "t1_biology_0_0.txt" self.base_train_folder = "../data/source_txt/train/" pass sentences = SentenceTokenise() @abc.abstractmethod def getInputText(self): # Corpusul curat Corpus.receivedText = read_corpus(self.base_train_folder, self.input_file) return Corpus.receivedText def getSentences(self, text): # Lista de propozitii self.sentences.listOfSentence = extract_sentences(text) return self.sentences.listOfSentence def setInputText(self, text): pass
26.870968
82
0.710684
import abc from backend.model.SentenceTokenise import SentenceTokenise from backend.service.ExtractSentences import extract_sentences from backend.service.ReadCorpus import read_corpus class Corpus: def __init__(self): self.receive_text = "" self.input_file = "t1_biology_0_0.txt" self.base_train_folder = "../data/source_txt/train/" pass sentences = SentenceTokenise() @abc.abstractmethod def getInputText(self): Corpus.receivedText = read_corpus(self.base_train_folder, self.input_file) return Corpus.receivedText def getSentences(self, text): self.sentences.listOfSentence = extract_sentences(text) return self.sentences.listOfSentence def setInputText(self, text): pass
true
true
f701300168d70aab345ae6b11215023d9be8143b
18,474
py
Python
release_test/_version.py
psyplot/ci-release-test
cc21e2749fa39a9b810abb47ecfbc2062c1f5e70
[ "MIT" ]
null
null
null
release_test/_version.py
psyplot/ci-release-test
cc21e2749fa39a9b810abb47ecfbc2062c1f5e70
[ "MIT" ]
null
null
null
release_test/_version.py
psyplot/ci-release-test
cc21e2749fa39a9b810abb47ecfbc2062c1f5e70
[ "MIT" ]
null
null
null
# This file helps to compute a version number in source trees obtained from # git-archive tarball (such as those provided by githubs download-from-tag # feature). Distribution tarballs (built by setup.py sdist) and build # directories (produced by setup.py build) will contain a much shorter file # that just contains the computed version number. # This file is released into the public domain. Generated by # versioneer-0.18 (https://github.com/warner/python-versioneer) """Git implementation of _version.py.""" import errno import os import re import subprocess import sys def get_keywords(): """Get the keywords needed to look up the version information.""" # these strings will be replaced by git during git-archive. # setup.py/versioneer.py will grep for the variable names, so they must # each be defined on a line of their own. _version.py will just call # get_keywords(). git_refnames = "$Format:%d$" git_full = "$Format:%H$" git_date = "$Format:%ci$" keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} return keywords class VersioneerConfig: """Container for Versioneer configuration parameters.""" def get_config(): """Create, populate and return the VersioneerConfig() object.""" # these strings are filled in when 'setup.py versioneer' creates # _version.py cfg = VersioneerConfig() cfg.VCS = "git" cfg.style = "pep440" cfg.tag_prefix = "v" cfg.parentdir_prefix = "psyplot-ci-release-test-" cfg.versionfile_source = "release_test/_version.py" cfg.verbose = False return cfg class NotThisMethod(Exception): """Exception raised if a method is not valid for the current scenario.""" LONG_VERSION_PY = {} HANDLERS = {} def register_vcs_handler(vcs, method): # decorator """Decorator to mark a method as the handler for a particular VCS.""" def decorate(f): """Store f in HANDLERS[vcs][method].""" if vcs not in HANDLERS: HANDLERS[vcs] = {} HANDLERS[vcs][method] = f return f return decorate def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None): """Call the given command(s).""" assert isinstance(commands, list) p = None for c in commands: try: dispcmd = str([c] + args) # remember shell=False, so use git.cmd on windows, not just git p = subprocess.Popen([c] + args, cwd=cwd, env=env, stdout=subprocess.PIPE, stderr=(subprocess.PIPE if hide_stderr else None)) break except EnvironmentError: e = sys.exc_info()[1] if e.errno == errno.ENOENT: continue if verbose: print("unable to run %s" % dispcmd) print(e) return None, None else: if verbose: print("unable to find command, tried %s" % (commands,)) return None, None stdout = p.communicate()[0].strip() if sys.version_info[0] >= 3: stdout = stdout.decode() if p.returncode != 0: if verbose: print("unable to run %s (error)" % dispcmd) print("stdout was %s" % stdout) return None, p.returncode return stdout, p.returncode def versions_from_parentdir(parentdir_prefix, root, verbose): """Try to determine the version from the parent directory name. Source tarballs conventionally unpack into a directory that includes both the project name and a version string. We will also support searching up two directory levels for an appropriately named parent directory """ rootdirs = [] for i in range(3): dirname = os.path.basename(root) if dirname.startswith(parentdir_prefix): return {"version": dirname[len(parentdir_prefix):], "full-revisionid": None, "dirty": False, "error": None, "date": None} else: rootdirs.append(root) root = os.path.dirname(root) # up a level if verbose: print("Tried directories %s but none started with prefix %s" % (str(rootdirs), parentdir_prefix)) raise NotThisMethod("rootdir doesn't start with parentdir_prefix") @register_vcs_handler("git", "get_keywords") def git_get_keywords(versionfile_abs): """Extract version information from the given file.""" # the code embedded in _version.py can just fetch the value of these # keywords. When used from setup.py, we don't want to import _version.py, # so we do it with a regexp instead. This function is not used from # _version.py. keywords = {} try: f = open(versionfile_abs, "r") for line in f.readlines(): if line.strip().startswith("git_refnames ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["refnames"] = mo.group(1) if line.strip().startswith("git_full ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["full"] = mo.group(1) if line.strip().startswith("git_date ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["date"] = mo.group(1) f.close() except EnvironmentError: pass return keywords @register_vcs_handler("git", "keywords") def git_versions_from_keywords(keywords, tag_prefix, verbose): """Get version information from git keywords.""" if not keywords: raise NotThisMethod("no keywords at all, weird") date = keywords.get("date") if date is not None: # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 # -like" string, which we must then edit to make compliant), because # it's been around since git-1.5.3, and it's too difficult to # discover which version we're using, or to work around using an # older one. date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) refnames = keywords["refnames"].strip() if refnames.startswith("$Format"): if verbose: print("keywords are unexpanded, not using") raise NotThisMethod("unexpanded keywords, not a git-archive tarball") refs = set([r.strip() for r in refnames.strip("()").split(",")]) # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of # just "foo-1.0". If we see a "tag: " prefix, prefer those. TAG = "tag: " tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) if not tags: # Either we're using git < 1.8.3, or there really are no tags. We use # a heuristic: assume all version tags have a digit. The old git %d # expansion behaves like git log --decorate=short and strips out the # refs/heads/ and refs/tags/ prefixes that would let us distinguish # between branches and tags. By ignoring refnames without digits, we # filter out many common branch names like "release" and # "stabilization", as well as "HEAD" and "master". tags = set([r for r in refs if re.search(r'\d', r)]) if verbose: print("discarding '%s', no digits" % ",".join(refs - tags)) if verbose: print("likely tags: %s" % ",".join(sorted(tags))) for ref in sorted(tags): # sorting will prefer e.g. "2.0" over "2.0rc1" if ref.startswith(tag_prefix): r = ref[len(tag_prefix):] if verbose: print("picking %s" % r) return {"version": r, "full-revisionid": keywords["full"].strip(), "dirty": False, "error": None, "date": date} # no suitable tags, so version is "0+unknown", but full hex is still there if verbose: print("no suitable tags, using unknown + full revision id") return {"version": "0+unknown", "full-revisionid": keywords["full"].strip(), "dirty": False, "error": "no suitable tags", "date": None} @register_vcs_handler("git", "pieces_from_vcs") def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): """Get version from 'git describe' in the root of the source tree. This only gets called if the git-archive 'subst' keywords were *not* expanded, and _version.py hasn't already been rewritten with a short version string, meaning we're inside a checked out source tree. """ GITS = ["git"] if sys.platform == "win32": GITS = ["git.cmd", "git.exe"] out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=True) if rc != 0: if verbose: print("Directory %s not under git control" % root) raise NotThisMethod("'git rev-parse --git-dir' returned error") # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] # if there isn't one, this yields HEX[-dirty] (no NUM) describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty", "--always", "--long", "--match", "%s*" % tag_prefix], cwd=root) # --long was added in git-1.5.5 if describe_out is None: raise NotThisMethod("'git describe' failed") describe_out = describe_out.strip() full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) if full_out is None: raise NotThisMethod("'git rev-parse' failed") full_out = full_out.strip() pieces = {} pieces["long"] = full_out pieces["short"] = full_out[:7] # maybe improved later pieces["error"] = None # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] # TAG might have hyphens. git_describe = describe_out # look for -dirty suffix dirty = git_describe.endswith("-dirty") pieces["dirty"] = dirty if dirty: git_describe = git_describe[:git_describe.rindex("-dirty")] # now we have TAG-NUM-gHEX or HEX if "-" in git_describe: # TAG-NUM-gHEX mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) if not mo: # unparseable. Maybe git-describe is misbehaving? pieces["error"] = ("unable to parse git-describe output: '%s'" % describe_out) return pieces # tag full_tag = mo.group(1) if not full_tag.startswith(tag_prefix): if verbose: fmt = "tag '%s' doesn't start with prefix '%s'" print(fmt % (full_tag, tag_prefix)) pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" % (full_tag, tag_prefix)) return pieces pieces["closest-tag"] = full_tag[len(tag_prefix):] # distance: number of commits since tag pieces["distance"] = int(mo.group(2)) # commit: short hex revision ID pieces["short"] = mo.group(3) else: # HEX: no tags pieces["closest-tag"] = None count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], cwd=root) pieces["distance"] = int(count_out) # total number of commits # commit date: see ISO-8601 comment in git_versions_from_keywords() date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip() pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) return pieces def plus_or_dot(pieces): """Return a + if we don't already have one, else return a .""" if "+" in pieces.get("closest-tag", ""): return "." return "+" def render_pep440(pieces): """Build up version string, with post-release "local version identifier". Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty Exceptions: 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += plus_or_dot(pieces) rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" else: # exception #1 rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" return rendered def render_pep440_pre(pieces): """TAG[.post.devDISTANCE] -- No -dirty. Exceptions: 1: no tags. 0.post.devDISTANCE """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"]: rendered += ".post.dev%d" % pieces["distance"] else: # exception #1 rendered = "0.post.dev%d" % pieces["distance"] return rendered def render_pep440_post(pieces): """TAG[.postDISTANCE[.dev0]+gHEX] . The ".dev0" means dirty. Note that .dev0 sorts backwards (a dirty tree will appear "older" than the corresponding clean one), but you shouldn't be releasing software with -dirty anyways. Exceptions: 1: no tags. 0.postDISTANCE[.dev0] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += ".post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" rendered += plus_or_dot(pieces) rendered += "g%s" % pieces["short"] else: # exception #1 rendered = "0.post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" rendered += "+g%s" % pieces["short"] return rendered def render_pep440_old(pieces): """TAG[.postDISTANCE[.dev0]] . The ".dev0" means dirty. Eexceptions: 1: no tags. 0.postDISTANCE[.dev0] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += ".post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" else: # exception #1 rendered = "0.post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" return rendered def render_git_describe(pieces): """TAG[-DISTANCE-gHEX][-dirty]. Like 'git describe --tags --dirty --always'. Exceptions: 1: no tags. HEX[-dirty] (note: no 'g' prefix) """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"]: rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) else: # exception #1 rendered = pieces["short"] if pieces["dirty"]: rendered += "-dirty" return rendered def render_git_describe_long(pieces): """TAG-DISTANCE-gHEX[-dirty]. Like 'git describe --tags --dirty --always -long'. The distance/hash is unconditional. Exceptions: 1: no tags. HEX[-dirty] (note: no 'g' prefix) """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) else: # exception #1 rendered = pieces["short"] if pieces["dirty"]: rendered += "-dirty" return rendered def render(pieces, style): """Render the given version pieces into the requested style.""" if pieces["error"]: return {"version": "unknown", "full-revisionid": pieces.get("long"), "dirty": None, "error": pieces["error"], "date": None} if not style or style == "default": style = "pep440" # the default if style == "pep440": rendered = render_pep440(pieces) elif style == "pep440-pre": rendered = render_pep440_pre(pieces) elif style == "pep440-post": rendered = render_pep440_post(pieces) elif style == "pep440-old": rendered = render_pep440_old(pieces) elif style == "git-describe": rendered = render_git_describe(pieces) elif style == "git-describe-long": rendered = render_git_describe_long(pieces) else: raise ValueError("unknown style '%s'" % style) return {"version": rendered, "full-revisionid": pieces["long"], "dirty": pieces["dirty"], "error": None, "date": pieces.get("date")} def get_versions(): """Get version information or return default if unable to do so.""" # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have # __file__, we can work backwards from there to the root. Some # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which # case we can only use expanded keywords. cfg = get_config() verbose = cfg.verbose try: return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, verbose) except NotThisMethod: pass try: root = os.path.realpath(__file__) # versionfile_source is the relative path from the top of the source # tree (where the .git directory might live) to this file. Invert # this to find the root from __file__. for i in cfg.versionfile_source.split('/'): root = os.path.dirname(root) except NameError: return {"version": "0+unknown", "full-revisionid": None, "dirty": None, "error": "unable to find root of source tree", "date": None} try: pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) return render(pieces, cfg.style) except NotThisMethod: pass try: if cfg.parentdir_prefix: return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) except NotThisMethod: pass return {"version": "0+unknown", "full-revisionid": None, "dirty": None, "error": "unable to compute version", "date": None}
35.458733
79
0.584876
import errno import os import re import subprocess import sys def get_keywords(): git_refnames = "$Format:%d$" git_full = "$Format:%H$" git_date = "$Format:%ci$" keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} return keywords class VersioneerConfig: def get_config(): cfg = VersioneerConfig() cfg.VCS = "git" cfg.style = "pep440" cfg.tag_prefix = "v" cfg.parentdir_prefix = "psyplot-ci-release-test-" cfg.versionfile_source = "release_test/_version.py" cfg.verbose = False return cfg class NotThisMethod(Exception): LONG_VERSION_PY = {} HANDLERS = {} def register_vcs_handler(vcs, method): def decorate(f): if vcs not in HANDLERS: HANDLERS[vcs] = {} HANDLERS[vcs][method] = f return f return decorate def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None): assert isinstance(commands, list) p = None for c in commands: try: dispcmd = str([c] + args) p = subprocess.Popen([c] + args, cwd=cwd, env=env, stdout=subprocess.PIPE, stderr=(subprocess.PIPE if hide_stderr else None)) break except EnvironmentError: e = sys.exc_info()[1] if e.errno == errno.ENOENT: continue if verbose: print("unable to run %s" % dispcmd) print(e) return None, None else: if verbose: print("unable to find command, tried %s" % (commands,)) return None, None stdout = p.communicate()[0].strip() if sys.version_info[0] >= 3: stdout = stdout.decode() if p.returncode != 0: if verbose: print("unable to run %s (error)" % dispcmd) print("stdout was %s" % stdout) return None, p.returncode return stdout, p.returncode def versions_from_parentdir(parentdir_prefix, root, verbose): rootdirs = [] for i in range(3): dirname = os.path.basename(root) if dirname.startswith(parentdir_prefix): return {"version": dirname[len(parentdir_prefix):], "full-revisionid": None, "dirty": False, "error": None, "date": None} else: rootdirs.append(root) root = os.path.dirname(root) if verbose: print("Tried directories %s but none started with prefix %s" % (str(rootdirs), parentdir_prefix)) raise NotThisMethod("rootdir doesn't start with parentdir_prefix") @register_vcs_handler("git", "get_keywords") def git_get_keywords(versionfile_abs): # the code embedded in _version.py can just fetch the value of these # keywords. When used from setup.py, we don't want to import _version.py, keywords = {} try: f = open(versionfile_abs, "r") for line in f.readlines(): if line.strip().startswith("git_refnames ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["refnames"] = mo.group(1) if line.strip().startswith("git_full ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["full"] = mo.group(1) if line.strip().startswith("git_date ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["date"] = mo.group(1) f.close() except EnvironmentError: pass return keywords @register_vcs_handler("git", "keywords") def git_versions_from_keywords(keywords, tag_prefix, verbose): if not keywords: raise NotThisMethod("no keywords at all, weird") date = keywords.get("date") if date is not None: # -like" string, which we must then edit to make compliant), because # older one. date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) refnames = keywords["refnames"].strip() if refnames.startswith("$Format"): if verbose: print("keywords are unexpanded, not using") raise NotThisMethod("unexpanded keywords, not a git-archive tarball") refs = set([r.strip() for r in refnames.strip("()").split(",")]) # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of # just "foo-1.0". If we see a "tag: " prefix, prefer those. TAG = "tag: " tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) if not tags: # Either we're using git < 1.8.3, or there really are no tags. We use tags = set([r for r in refs if re.search(r'\d', r)]) if verbose: print("discarding '%s', no digits" % ",".join(refs - tags)) if verbose: print("likely tags: %s" % ",".join(sorted(tags))) for ref in sorted(tags): if ref.startswith(tag_prefix): r = ref[len(tag_prefix):] if verbose: print("picking %s" % r) return {"version": r, "full-revisionid": keywords["full"].strip(), "dirty": False, "error": None, "date": date} if verbose: print("no suitable tags, using unknown + full revision id") return {"version": "0+unknown", "full-revisionid": keywords["full"].strip(), "dirty": False, "error": "no suitable tags", "date": None} @register_vcs_handler("git", "pieces_from_vcs") def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): GITS = ["git"] if sys.platform == "win32": GITS = ["git.cmd", "git.exe"] out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=True) if rc != 0: if verbose: print("Directory %s not under git control" % root) raise NotThisMethod("'git rev-parse --git-dir' returned error") describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty", "--always", "--long", "--match", "%s*" % tag_prefix], cwd=root) # --long was added in git-1.5.5 if describe_out is None: raise NotThisMethod("'git describe' failed") describe_out = describe_out.strip() full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) if full_out is None: raise NotThisMethod("'git rev-parse' failed") full_out = full_out.strip() pieces = {} pieces["long"] = full_out pieces["short"] = full_out[:7] # maybe improved later pieces["error"] = None # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] # TAG might have hyphens. git_describe = describe_out # look for -dirty suffix dirty = git_describe.endswith("-dirty") pieces["dirty"] = dirty if dirty: git_describe = git_describe[:git_describe.rindex("-dirty")] # now we have TAG-NUM-gHEX or HEX if "-" in git_describe: # TAG-NUM-gHEX mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) if not mo: # unparseable. Maybe git-describe is misbehaving? pieces["error"] = ("unable to parse git-describe output: '%s'" % describe_out) return pieces # tag full_tag = mo.group(1) if not full_tag.startswith(tag_prefix): if verbose: fmt = "tag '%s' doesn't start with prefix '%s'" print(fmt % (full_tag, tag_prefix)) pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" % (full_tag, tag_prefix)) return pieces pieces["closest-tag"] = full_tag[len(tag_prefix):] # distance: number of commits since tag pieces["distance"] = int(mo.group(2)) # commit: short hex revision ID pieces["short"] = mo.group(3) else: # HEX: no tags pieces["closest-tag"] = None count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], cwd=root) pieces["distance"] = int(count_out) # total number of commits # commit date: see ISO-8601 comment in git_versions_from_keywords() date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip() pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) return pieces def plus_or_dot(pieces): if "+" in pieces.get("closest-tag", ""): return "." return "+" def render_pep440(pieces): if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += plus_or_dot(pieces) rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" else: # exception #1 rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" return rendered def render_pep440_pre(pieces): if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"]: rendered += ".post.dev%d" % pieces["distance"] else: # exception #1 rendered = "0.post.dev%d" % pieces["distance"] return rendered def render_pep440_post(pieces): if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += ".post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" rendered += plus_or_dot(pieces) rendered += "g%s" % pieces["short"] else: # exception #1 rendered = "0.post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" rendered += "+g%s" % pieces["short"] return rendered def render_pep440_old(pieces): if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += ".post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" else: # exception #1 rendered = "0.post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" return rendered def render_git_describe(pieces): if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"]: rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) else: # exception #1 rendered = pieces["short"] if pieces["dirty"]: rendered += "-dirty" return rendered def render_git_describe_long(pieces): if pieces["closest-tag"]: rendered = pieces["closest-tag"] rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) else: # exception #1 rendered = pieces["short"] if pieces["dirty"]: rendered += "-dirty" return rendered def render(pieces, style): if pieces["error"]: return {"version": "unknown", "full-revisionid": pieces.get("long"), "dirty": None, "error": pieces["error"], "date": None} if not style or style == "default": style = "pep440" # the default if style == "pep440": rendered = render_pep440(pieces) elif style == "pep440-pre": rendered = render_pep440_pre(pieces) elif style == "pep440-post": rendered = render_pep440_post(pieces) elif style == "pep440-old": rendered = render_pep440_old(pieces) elif style == "git-describe": rendered = render_git_describe(pieces) elif style == "git-describe-long": rendered = render_git_describe_long(pieces) else: raise ValueError("unknown style '%s'" % style) return {"version": rendered, "full-revisionid": pieces["long"], "dirty": pieces["dirty"], "error": None, "date": pieces.get("date")} def get_versions(): # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have # __file__, we can work backwards from there to the root. Some # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which cfg = get_config() verbose = cfg.verbose try: return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, verbose) except NotThisMethod: pass try: root = os.path.realpath(__file__) for i in cfg.versionfile_source.split('/'): root = os.path.dirname(root) except NameError: return {"version": "0+unknown", "full-revisionid": None, "dirty": None, "error": "unable to find root of source tree", "date": None} try: pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) return render(pieces, cfg.style) except NotThisMethod: pass try: if cfg.parentdir_prefix: return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) except NotThisMethod: pass return {"version": "0+unknown", "full-revisionid": None, "dirty": None, "error": "unable to compute version", "date": None}
true
true
f701300b81bbc9afd41de7d74fc9c00b50ca96e8
206
py
Python
scripts/6-num.py
bfhq/lists
bb402ed71f1c2b20b5d7640b2dab162891a8d75c
[ "MIT" ]
null
null
null
scripts/6-num.py
bfhq/lists
bb402ed71f1c2b20b5d7640b2dab162891a8d75c
[ "MIT" ]
null
null
null
scripts/6-num.py
bfhq/lists
bb402ed71f1c2b20b5d7640b2dab162891a8d75c
[ "MIT" ]
null
null
null
sys.stdout = open("6-num.txt", "w") data = "1234567890" for a in data: for b in data: for c in data: for d in data: for e in data: for f in data: print(a+b+c+d+e+f) sys.stdout.close()
18.727273
35
0.582524
sys.stdout = open("6-num.txt", "w") data = "1234567890" for a in data: for b in data: for c in data: for d in data: for e in data: for f in data: print(a+b+c+d+e+f) sys.stdout.close()
true
true
f7013157fcb57127232d1789c4abece01a2d173b
299
py
Python
digitaloxford/settings/__init__.py
digitaloxford/do-wagtail
49dd75b95109ebb38bf66aca13d3fdeb8e25d319
[ "MIT" ]
2
2021-04-11T11:59:51.000Z
2021-04-12T06:56:23.000Z
digitaloxford/settings/__init__.py
digitaloxford/do-wagtail
49dd75b95109ebb38bf66aca13d3fdeb8e25d319
[ "MIT" ]
8
2021-04-10T10:40:27.000Z
2022-01-25T16:32:22.000Z
digitaloxford/settings/__init__.py
digitaloxford/do-wagtail
49dd75b95109ebb38bf66aca13d3fdeb8e25d319
[ "MIT" ]
null
null
null
import os from dotenv import load_dotenv # Load environment variables load_dotenv() # Set hosting environment, if not set, default to production for security HOSTING_ENV = os.getenv("HOSTING_ENV", "production") if HOSTING_ENV == "dev": from .dev import * else: from .production import *
19.933333
73
0.742475
import os from dotenv import load_dotenv load_dotenv() HOSTING_ENV = os.getenv("HOSTING_ENV", "production") if HOSTING_ENV == "dev": from .dev import * else: from .production import *
true
true
f70131b5a4b1638a0ebec960b0e6d3ac3970019e
88,180
py
Python
great_expectations/data_context/types/base.py
annuupadhyayPS/great_expectations
3b3833881bfc5dd6aada9396dba4e08d097dabc7
[ "Apache-2.0" ]
null
null
null
great_expectations/data_context/types/base.py
annuupadhyayPS/great_expectations
3b3833881bfc5dd6aada9396dba4e08d097dabc7
[ "Apache-2.0" ]
null
null
null
great_expectations/data_context/types/base.py
annuupadhyayPS/great_expectations
3b3833881bfc5dd6aada9396dba4e08d097dabc7
[ "Apache-2.0" ]
null
null
null
import abc import enum import itertools import logging import uuid from copy import deepcopy from typing import Any, Dict, List, MutableMapping, Optional, Union from ruamel.yaml import YAML from ruamel.yaml.comments import CommentedMap from ruamel.yaml.compat import StringIO import great_expectations.exceptions as ge_exceptions from great_expectations.core.util import convert_to_json_serializable, nested_update from great_expectations.marshmallow__shade import ( INCLUDE, Schema, ValidationError, fields, post_dump, post_load, pre_load, validates_schema, ) from great_expectations.marshmallow__shade.validate import OneOf from great_expectations.types import DictDot, SerializableDictDot from great_expectations.types.configurations import ClassConfigSchema yaml = YAML() yaml.indent(mapping=2, sequence=4, offset=2) logger = logging.getLogger(__name__) CURRENT_GE_CONFIG_VERSION = 3 FIRST_GE_CONFIG_VERSION_WITH_CHECKPOINT_STORE = 3 CURRENT_CHECKPOINT_CONFIG_VERSION = 1 MINIMUM_SUPPORTED_CONFIG_VERSION = 2 DEFAULT_USAGE_STATISTICS_URL = ( "https://stats.greatexpectations.io/great_expectations/v1/usage_statistics" ) def object_to_yaml_str(obj): output_str: str with StringIO() as string_stream: yaml.dump(obj, string_stream) output_str = string_stream.getvalue() return output_str class BaseYamlConfig(SerializableDictDot): _config_schema_class = None def __init__(self, commented_map: CommentedMap = None): if commented_map is None: commented_map = CommentedMap() self._commented_map = commented_map @classmethod def _get_schema_instance(cls) -> Schema: if not issubclass(cls.get_schema_class(), Schema): raise ge_exceptions.InvalidConfigError( "Invalid type: A configuration schema class needs to inherit from the Marshmallow Schema class." ) if not issubclass(cls.get_config_class(), BaseYamlConfig): raise ge_exceptions.InvalidConfigError( "Invalid type: A configuration class needs to inherit from the BaseYamlConfig class." ) if hasattr(cls.get_config_class(), "_schema_instance"): # noinspection PyProtectedMember schema_instance: Schema = cls.get_config_class()._schema_instance if schema_instance is None: cls.get_config_class()._schema_instance = (cls.get_schema_class())() else: return schema_instance else: cls.get_config_class().schema_instance = (cls.get_schema_class())() return cls.get_config_class().schema_instance @classmethod def from_commented_map(cls, commented_map: CommentedMap): try: config: Union[dict, BaseYamlConfig] config = cls._get_schema_instance().load(commented_map) if isinstance(config, dict): return cls.get_config_class()(commented_map=commented_map, **config) return config except ValidationError: logger.error( "Encountered errors during loading config. See ValidationError for more details." ) raise def _get_schema_validated_updated_commented_map(self) -> CommentedMap: commented_map: CommentedMap = deepcopy(self._commented_map) commented_map.update(self._get_schema_instance().dump(self)) return commented_map def to_yaml(self, outfile): """ :returns None (but writes a YAML file containing the project configuration) """ yaml.dump(self.commented_map, outfile) def to_yaml_str(self) -> str: """ :returns a YAML string containing the project configuration """ return object_to_yaml_str(self.commented_map) def to_json_dict(self) -> dict: """ :returns a JSON-serialiable dict containing the project configuration """ commented_map: CommentedMap = self.commented_map return convert_to_json_serializable(data=commented_map) @property def commented_map(self) -> CommentedMap: return self._get_schema_validated_updated_commented_map() @classmethod def get_config_class(cls): raise NotImplementedError @classmethod def get_schema_class(cls): raise NotImplementedError class AssetConfig(DictDot): def __init__( self, name=None, class_name=None, module_name=None, bucket=None, prefix=None, delimiter=None, max_keys=None, batch_spec_passthrough=None, **kwargs, ): if name is not None: self.name = name self._class_name = class_name self._module_name = module_name if bucket is not None: self.bucket = bucket if prefix is not None: self.prefix = prefix if delimiter is not None: self.delimiter = delimiter if max_keys is not None: self.max_keys = max_keys if batch_spec_passthrough is not None: self.batch_spec_passthrough = batch_spec_passthrough for k, v in kwargs.items(): setattr(self, k, v) @property def class_name(self): return self._class_name @property def module_name(self): return self._module_name class AssetConfigSchema(Schema): class Meta: unknown = INCLUDE name = fields.String(required=False, allow_none=True) class_name = fields.String(required=False, allow_none=True, missing="Asset") module_name = fields.String( required=False, all_none=True, missing="great_expectations.datasource.data_connector.asset", ) base_directory = fields.String(required=False, allow_none=True) glob_directive = fields.String(required=False, allow_none=True) pattern = fields.String(required=False, allow_none=True) group_names = fields.List( cls_or_instance=fields.Str(), required=False, allow_none=True ) bucket = fields.String(required=False, allow_none=True) prefix = fields.String(required=False, allow_none=True) delimiter = fields.String(required=False, allow_none=True) max_keys = fields.Integer(required=False, allow_none=True) batch_spec_passthrough = fields.Dict(required=False, allow_none=True) @validates_schema def validate_schema(self, data, **kwargs): pass # noinspection PyUnusedLocal @post_load def make_asset_config(self, data, **kwargs): return AssetConfig(**data) class SorterConfig(DictDot): def __init__( self, name, class_name=None, module_name=None, orderby="asc", reference_list=None, datetime_format=None, **kwargs, ): self._name = name self._class_name = class_name self._module_name = module_name self._orderby = orderby for k, v in kwargs.items(): setattr(self, k, v) if reference_list is not None: self._reference_list = reference_list if datetime_format is not None: self._datetime_format = datetime_format @property def name(self): return self._name @property def module_name(self): return self._module_name @property def class_name(self): return self._class_name @property def orderby(self): return self._orderby @property def reference_list(self): return self._reference_list @property def datetime_format(self): return self._datetime_format class SorterConfigSchema(Schema): class Meta: unknown = INCLUDE name = fields.String(required=True) class_name = fields.String(required=True) module_name = fields.String( missing="great_expectations.datasource.data_connector.sorter" ) orderby = fields.String(required=False, missing="asc", allow_none=False) # allow_none = True because it is only used by some Sorters reference_list = fields.List( cls_or_instance=fields.Str(), required=False, missing=None, allow_none=True ) datetime_format = fields.String(required=False, missing=None, allow_none=True) @validates_schema def validate_schema(self, data, **kwargs): pass # noinspection PyUnusedLocal @post_load def make_sorter_config(self, data, **kwargs): return SorterConfig(**data) class DataConnectorConfig(DictDot): def __init__( self, class_name, module_name=None, credentials=None, assets=None, base_directory=None, glob_directive=None, default_regex=None, batch_identifiers=None, sorters=None, batch_spec_passthrough=None, # S3 boto3_options=None, bucket=None, max_keys=None, # Azure azure_options=None, container=None, name_starts_with=None, # GCS bucket_or_name=None, max_results=None, # Both S3/GCS prefix=None, # Both S3/Azure delimiter=None, **kwargs, ): self._class_name = class_name self._module_name = module_name if credentials is not None: self.credentials = credentials if assets is not None: self.assets = assets if base_directory is not None: self.base_directory = base_directory if glob_directive is not None: self.glob_directive = glob_directive if default_regex is not None: self.default_regex = default_regex if batch_identifiers is not None: self.batch_identifiers = batch_identifiers if sorters is not None: self.sorters = sorters if batch_spec_passthrough is not None: self.batch_spec_passthrough = batch_spec_passthrough # S3 if boto3_options is not None: self.boto3_options = boto3_options if bucket is not None: self.bucket = bucket if max_keys is not None: self.max_keys = max_keys # Azure if azure_options is not None: self.azure_options = azure_options if container is not None: self.container = container if name_starts_with is not None: self.name_starts_with = name_starts_with # GCS if bucket_or_name is not None: self.bucket_or_name = bucket_or_name if max_results is not None: self.max_results = max_results # Both S3/GCS if prefix is not None: self.prefix = prefix # Both S3/Azure if delimiter is not None: self.delimiter = delimiter for k, v in kwargs.items(): setattr(self, k, v) @property def class_name(self): return self._class_name @property def module_name(self): return self._module_name class DataConnectorConfigSchema(Schema): class Meta: unknown = INCLUDE class_name = fields.String(required=True) module_name = fields.String(missing="great_expectations.datasource.data_connector") assets = fields.Dict( keys=fields.Str(), values=fields.Nested(AssetConfigSchema, required=False, allow_none=True), required=False, allow_none=True, ) base_directory = fields.String(required=False, allow_none=True) glob_directive = fields.String(required=False, allow_none=True) sorters = fields.List( fields.Nested(SorterConfigSchema, required=False, allow_none=True), required=False, allow_none=True, ) default_regex = fields.Dict(required=False, allow_none=True) credentials = fields.Raw(required=False, allow_none=True) batch_identifiers = fields.List( cls_or_instance=fields.Str(), required=False, allow_none=True ) # S3 boto3_options = fields.Dict( keys=fields.Str(), values=fields.Str(), required=False, allow_none=True ) bucket = fields.String(required=False, allow_none=True) max_keys = fields.Integer(required=False, allow_none=True) # Azure azure_options = fields.Dict( keys=fields.Str(), values=fields.Str(), required=False, allow_none=True ) container = fields.String(required=False, allow_none=True) name_starts_with = fields.String(required=False, allow_none=True) # GCS gcs_options = fields.Dict( keys=fields.Str(), values=fields.Str(), required=False, allow_none=True ) bucket_or_name = fields.String(required=False, allow_none=True) max_results = fields.String(required=False, allow_none=True) # Both S3/GCS prefix = fields.String(required=False, allow_none=True) # Both S3/Azure delimiter = fields.String(required=False, allow_none=True) data_asset_name_prefix = fields.String(required=False, allow_none=True) data_asset_name_suffix = fields.String(required=False, allow_none=True) include_schema_name = fields.Boolean(required=False, allow_none=True) splitter_method = fields.String(required=False, allow_none=True) splitter_kwargs = fields.Dict(required=False, allow_none=True) sampling_method = fields.String(required=False, allow_none=True) sampling_kwargs = fields.Dict(required=False, allow_none=True) excluded_tables = fields.List( cls_or_instance=fields.Str(), required=False, allow_none=True ) included_tables = fields.List( cls_or_instance=fields.Str(), required=False, allow_none=True ) skip_inapplicable_tables = fields.Boolean(required=False, allow_none=True) batch_spec_passthrough = fields.Dict(required=False, allow_none=True) @validates_schema def validate_schema(self, data, **kwargs): # If a class_name begins with the dollar sign ("$"), then it is assumed to be a variable name to be substituted. if data["class_name"][0] == "$": return if ("default_regex" in data) and not ( data["class_name"] in [ "InferredAssetFilesystemDataConnector", "ConfiguredAssetFilesystemDataConnector", "InferredAssetS3DataConnector", "ConfiguredAssetS3DataConnector", "InferredAssetAzureDataConnector", "ConfiguredAssetAzureDataConnector", "InferredAssetGCSDataConnector", "ConfiguredAssetGCSDataConnector", ] ): raise ge_exceptions.InvalidConfigError( f"""Your current configuration uses one or more keys in a data connector that are required only by a subclass of the FilePathDataConnector class (your data connector is "{data['class_name']}"). Please update your configuration to continue. """ ) if ("glob_directive" in data) and not ( data["class_name"] in [ "InferredAssetFilesystemDataConnector", "ConfiguredAssetFilesystemDataConnector", ] ): raise ge_exceptions.InvalidConfigError( f"""Your current configuration uses one or more keys in a data connector that are required only by a filesystem type of the data connector (your data connector is "{data['class_name']}"). Please update your configuration to continue. """ ) if ("delimiter" in data) and not ( data["class_name"] in [ "InferredAssetS3DataConnector", "ConfiguredAssetS3DataConnector", "InferredAssetAzureDataConnector", "ConfiguredAssetAzureDataConnector", ] ): raise ge_exceptions.InvalidConfigError( f"""Your current configuration uses one or more keys in a data connector that are required only by an S3/Azure type of the data connector (your data connector is "{data['class_name']}"). Please update your configuration to continue. """ ) if ("prefix" in data) and not ( data["class_name"] in [ "InferredAssetS3DataConnector", "ConfiguredAssetS3DataConnector", "InferredAssetGCSDataConnector", "ConfiguredAssetGCSDataConnector", ] ): raise ge_exceptions.InvalidConfigError( f"""Your current configuration uses one or more keys in a data connector that are required only by an S3/GCS type of the data connector (your data connector is "{data['class_name']}"). Please update your configuration to continue. """ ) if ("bucket" in data or "max_keys" in data) and not ( data["class_name"] in [ "InferredAssetS3DataConnector", "ConfiguredAssetS3DataConnector", ] ): raise ge_exceptions.InvalidConfigError( f"""Your current configuration uses one or more keys in a data connector that are required only by an S3 type of the data connector (your data connector is "{data['class_name']}"). Please update your configuration to continue. """ ) if ( "azure_options" in data or "container" in data or "name_starts_with" in data ) and not ( data["class_name"] in [ "InferredAssetAzureDataConnector", "ConfiguredAssetAzureDataConnector", ] ): raise ge_exceptions.InvalidConfigError( f"""Your current configuration uses one or more keys in a data connector that are required only by an Azure type of the data connector (your data connector is "{data['class_name']}"). Please update your configuration to continue. """ ) if "azure_options" in data and data["class_name"] in [ "InferredAssetAzureDataConnector", "ConfiguredAssetAzureDataConnector", ]: azure_options = data["azure_options"] if not (("conn_str" in azure_options) ^ ("account_url" in azure_options)): raise ge_exceptions.InvalidConfigError( f"""Your current configuration is either missing methods of authentication or is using too many for the Azure type of data connector. You must only select one between `conn_str` or `account_url`. Please update your configuration to continue. """ ) if ( "gcs_options" in data or "bucket_or_name" in data or "max_results" in data ) and not ( data["class_name"] in [ "InferredAssetGCSDataConnector", "ConfiguredAssetGCSDataConnector", ] ): raise ge_exceptions.InvalidConfigError( f"""Your current configuration uses one or more keys in a data connector that are required only by a GCS type of the data connector (your data connector is "{data['class_name']}"). Please update your configuration to continue. """ ) if "gcs_options" in data and data["class_name"] in [ "InferredAssetGCSDataConnector", "ConfiguredAssetGCSDataConnector", ]: gcs_options = data["gcs_options"] if "filename" in gcs_options and "info" in gcs_options: raise ge_exceptions.InvalidConfigError( f"""Your current configuration can only use a single method of authentication for the GCS type of data connector. You must only select one between `filename` (from_service_account_file) and `info` (from_service_account_info). Please update your configuration to continue. """ ) if ( "data_asset_name_prefix" in data or "data_asset_name_suffix" in data or "include_schema_name" in data or "splitter_method" in data or "splitter_kwargs" in data or "sampling_method" in data or "sampling_kwargs" in data or "excluded_tables" in data or "included_tables" in data or "skip_inapplicable_tables" in data ) and not ( data["class_name"] in [ "InferredAssetSqlDataConnector", "ConfiguredAssetSqlDataConnector", ] ): raise ge_exceptions.InvalidConfigError( f"""Your current configuration uses one or more keys in a data connector that are required only by an SQL type of the data connector (your data connector is "{data['class_name']}"). Please update your configuration to continue. """ ) # noinspection PyUnusedLocal @post_load def make_data_connector_config(self, data, **kwargs): return DataConnectorConfig(**data) class ExecutionEngineConfig(DictDot): def __init__( self, class_name, module_name=None, caching=None, batch_spec_defaults=None, connection_string=None, credentials=None, spark_config=None, boto3_options=None, azure_options=None, gcs_options=None, **kwargs, ): self._class_name = class_name self._module_name = module_name if caching is not None: self.caching = caching if batch_spec_defaults is not None: self._batch_spec_defaults = batch_spec_defaults if connection_string is not None: self.connection_string = connection_string if credentials is not None: self.credentials = credentials if spark_config is not None: self.spark_config = spark_config if boto3_options is not None: self.boto3_options = boto3_options if azure_options is not None: self.azure_options = azure_options if gcs_options is not None: self.gcs_options = gcs_options for k, v in kwargs.items(): setattr(self, k, v) @property def module_name(self): return self._module_name @property def class_name(self): return self._class_name @property def batch_spec_defaults(self): return self._batch_spec_defaults class ExecutionEngineConfigSchema(Schema): class Meta: unknown = INCLUDE class_name = fields.String(required=True) module_name = fields.String(missing="great_expectations.execution_engine") connection_string = fields.String(required=False, allow_none=True) credentials = fields.Raw(required=False, allow_none=True) spark_config = fields.Raw(required=False, allow_none=True) boto3_options = fields.Dict( keys=fields.Str(), values=fields.Str(), required=False, allow_none=True ) azure_options = fields.Dict( keys=fields.Str(), values=fields.Str(), required=False, allow_none=True ) gcs_options = fields.Dict( keys=fields.Str(), values=fields.Str(), required=False, allow_none=True ) caching = fields.Boolean(required=False, allow_none=True) batch_spec_defaults = fields.Dict(required=False, allow_none=True) @validates_schema def validate_schema(self, data, **kwargs): # If a class_name begins with the dollar sign ("$"), then it is assumed to be a variable name to be substituted. if data["class_name"][0] == "$": return if ("connection_string" in data or "credentials" in data) and not ( data["class_name"] == "SqlAlchemyExecutionEngine" ): raise ge_exceptions.InvalidConfigError( f"""Your current configuration uses the "connection_string" key in an execution engine, but only SqlAlchemyExecutionEngine requires this attribute (your execution engine is "{data['class_name']}"). Please update your configuration to continue. """ ) if "spark_config" in data and not ( data["class_name"] == "SparkDFExecutionEngine" ): raise ge_exceptions.InvalidConfigError( f"""Your current configuration uses the "spark_config" key in an execution engine, but only SparkDFExecutionEngine requires this attribute (your execution engine is "{data['class_name']}"). Please update your configuration to continue. """ ) # noinspection PyUnusedLocal @post_load def make_execution_engine_config(self, data, **kwargs): return ExecutionEngineConfig(**data) class DatasourceConfig(DictDot): def __init__( self, class_name=None, module_name: Optional[str] = "great_expectations.datasource", execution_engine=None, data_connectors=None, data_asset_type=None, batch_kwargs_generators=None, connection_string=None, credentials=None, introspection=None, tables=None, boto3_options=None, azure_options=None, gcs_options=None, reader_method=None, reader_options=None, limit=None, **kwargs, ): # NOTE - JPC - 20200316: Currently, we are mostly inconsistent with respect to this type... self._class_name = class_name self._module_name = module_name if execution_engine is not None: self.execution_engine = execution_engine if data_connectors is not None and isinstance(data_connectors, dict): self.data_connectors = data_connectors # NOTE - AJB - 20201202: This should use the datasource class build_configuration method as in DataContext.add_datasource() if data_asset_type is None: if class_name == "PandasDatasource": data_asset_type = { "class_name": "PandasDataset", "module_name": "great_expectations.dataset", } elif class_name == "SqlAlchemyDatasource": data_asset_type = { "class_name": "SqlAlchemyDataset", "module_name": "great_expectations.dataset", } elif class_name == "SparkDFDatasource": data_asset_type = { "class_name": "SparkDFDataset", "module_name": "great_expectations.dataset", } if data_asset_type is not None: self.data_asset_type = data_asset_type if batch_kwargs_generators is not None: self.batch_kwargs_generators = batch_kwargs_generators if connection_string is not None: self.connection_string = connection_string if credentials is not None: self.credentials = credentials if introspection is not None: self.introspection = introspection if tables is not None: self.tables = tables if boto3_options is not None: self.boto3_options = boto3_options if azure_options is not None: self.azure_options = azure_options if gcs_options is not None: self.gcs_options = gcs_options if reader_method is not None: self.reader_method = reader_method if reader_options is not None: self.reader_options = reader_options if limit is not None: self.limit = limit for k, v in kwargs.items(): setattr(self, k, v) @property def class_name(self): return self._class_name @property def module_name(self): return self._module_name class DatasourceConfigSchema(Schema): class Meta: unknown = INCLUDE class_name = fields.String(missing="Datasource") module_name = fields.String(missing="great_expectations.datasource") force_reuse_spark_context = fields.Bool(required=False, allow_none=True) spark_config = fields.Dict( keys=fields.Str(), values=fields.Str(), required=False, allow_none=True ) execution_engine = fields.Nested( ExecutionEngineConfigSchema, required=False, allow_none=True ) data_connectors = fields.Dict( keys=fields.Str(), values=fields.Nested(DataConnectorConfigSchema), required=False, allow_none=True, ) data_asset_type = fields.Nested(ClassConfigSchema, required=False, allow_none=True) # TODO: Update to generator-specific # batch_kwargs_generators = fields.Mapping(keys=fields.Str(), values=fields.Nested(fields.GeneratorSchema)) batch_kwargs_generators = fields.Dict( keys=fields.Str(), values=fields.Dict(), required=False, allow_none=True ) connection_string = fields.String(required=False, allow_none=True) credentials = fields.Raw(required=False, allow_none=True) introspection = fields.Dict(required=False, allow_none=True) tables = fields.Dict(required=False, allow_none=True) boto3_options = fields.Dict( keys=fields.Str(), values=fields.Str(), required=False, allow_none=True ) azure_options = fields.Dict( keys=fields.Str(), values=fields.Str(), required=False, allow_none=True ) gcs_options = fields.Dict( keys=fields.Str(), values=fields.Str(), required=False, allow_none=True ) reader_method = fields.String(required=False, allow_none=True) reader_options = fields.Dict( keys=fields.Str(), values=fields.Str(), required=False, allow_none=True ) limit = fields.Integer(required=False, allow_none=True) @validates_schema def validate_schema(self, data, **kwargs): if "generators" in data: raise ge_exceptions.InvalidConfigError( 'Your current configuration uses the "generators" key in a datasource, but in version 0.10 of ' 'GE that key is renamed to "batch_kwargs_generators". Please update your configuration to continue.' ) # If a class_name begins with the dollar sign ("$"), then it is assumed to be a variable name to be substituted. if data["class_name"][0] == "$": return if ( "connection_string" in data or "credentials" in data or "introspection" in data or "tables" in data ) and not ( data["class_name"] in [ "SqlAlchemyDatasource", "SimpleSqlalchemyDatasource", ] ): raise ge_exceptions.InvalidConfigError( f"""Your current configuration uses one or more keys in a data source that are required only by a sqlalchemy data source (your data source is "{data['class_name']}"). Please update your configuration to continue. """ ) # noinspection PyUnusedLocal @post_load def make_datasource_config(self, data, **kwargs): return DatasourceConfig(**data) class AnonymizedUsageStatisticsConfig(DictDot): def __init__(self, enabled=True, data_context_id=None, usage_statistics_url=None): self._enabled = enabled if data_context_id is None: data_context_id = str(uuid.uuid4()) self._explicit_id = False else: self._explicit_id = True self._data_context_id = data_context_id if usage_statistics_url is None: usage_statistics_url = DEFAULT_USAGE_STATISTICS_URL self._explicit_url = False else: self._explicit_url = True self._usage_statistics_url = usage_statistics_url @property def enabled(self): return self._enabled @enabled.setter def enabled(self, enabled): if not isinstance(enabled, bool): raise ValueError("usage statistics enabled property must be boolean") self._enabled = enabled @property def data_context_id(self): return self._data_context_id @data_context_id.setter def data_context_id(self, data_context_id): try: uuid.UUID(data_context_id) except ValueError: raise ge_exceptions.InvalidConfigError( "data_context_id must be a valid uuid" ) self._data_context_id = data_context_id self._explicit_id = True @property def explicit_id(self): return self._explicit_id @property def usage_statistics_url(self): return self._usage_statistics_url @usage_statistics_url.setter def usage_statistics_url(self, usage_statistics_url): self._usage_statistics_url = usage_statistics_url self._explicit_url = True class AnonymizedUsageStatisticsConfigSchema(Schema): data_context_id = fields.UUID() enabled = fields.Boolean(default=True) usage_statistics_url = fields.URL(allow_none=True) _explicit_url = fields.Boolean(required=False) # noinspection PyUnusedLocal @post_load() def make_usage_statistics_config(self, data, **kwargs): if "data_context_id" in data: data["data_context_id"] = str(data["data_context_id"]) return AnonymizedUsageStatisticsConfig(**data) # noinspection PyUnusedLocal @post_dump() def filter_implicit(self, data, **kwargs): if not data.get("_explicit_url") and "usage_statistics_url" in data: del data["usage_statistics_url"] if "_explicit_url" in data: del data["_explicit_url"] return data class NotebookTemplateConfig(DictDot): def __init__(self, file_name, template_kwargs=None): self.file_name = file_name if template_kwargs: self.template_kwargs = template_kwargs else: self.template_kwargs = {} class NotebookTemplateConfigSchema(Schema): file_name = fields.String() template_kwargs = fields.Dict( keys=fields.Str(), values=fields.Str(), allow_none=True ) # noinspection PyUnusedLocal @post_load def make_notebook_template_config(self, data, **kwargs): return NotebookTemplateConfig(**data) class NotebookConfig(DictDot): def __init__( self, class_name, module_name, custom_templates_module, header_markdown=None, footer_markdown=None, table_expectations_header_markdown=None, column_expectations_header_markdown=None, table_expectations_not_found_markdown=None, column_expectations_not_found_markdown=None, authoring_intro_markdown=None, column_expectations_markdown=None, header_code=None, footer_code=None, table_expectation_code=None, column_expectation_code=None, ): self.class_name = class_name self.module_name = module_name self.custom_templates_module = custom_templates_module self.header_markdown = header_markdown self.footer_markdown = footer_markdown self.table_expectations_header_markdown = table_expectations_header_markdown self.column_expectations_header_markdown = column_expectations_header_markdown self.table_expectations_not_found_markdown = ( table_expectations_not_found_markdown ) self.column_expectations_not_found_markdown = ( column_expectations_not_found_markdown ) self.authoring_intro_markdown = authoring_intro_markdown self.column_expectations_markdown = column_expectations_markdown self.header_code = header_code self.footer_code = footer_code self.table_expectation_code = table_expectation_code self.column_expectation_code = column_expectation_code class NotebookConfigSchema(Schema): class_name = fields.String(missing="SuiteEditNotebookRenderer") module_name = fields.String( missing="great_expectations.render.renderer.suite_edit_notebook_renderer" ) custom_templates_module = fields.String() header_markdown = fields.Nested(NotebookTemplateConfigSchema, allow_none=True) footer_markdown = fields.Nested(NotebookTemplateConfigSchema, allow_none=True) table_expectations_header_markdown = fields.Nested( NotebookTemplateConfigSchema, allow_none=True ) column_expectations_header_markdown = fields.Nested( NotebookTemplateConfigSchema, allow_none=True ) table_expectations_not_found_markdown = fields.Nested( NotebookTemplateConfigSchema, allow_none=True ) column_expectations_not_found_markdown = fields.Nested( NotebookTemplateConfigSchema, allow_none=True ) authoring_intro_markdown = fields.Nested( NotebookTemplateConfigSchema, allow_none=True ) column_expectations_markdown = fields.Nested( NotebookTemplateConfigSchema, allow_none=True ) header_code = fields.Nested(NotebookTemplateConfigSchema, allow_none=True) footer_code = fields.Nested(NotebookTemplateConfigSchema, allow_none=True) table_expectation_code = fields.Nested( NotebookTemplateConfigSchema, allow_none=True ) column_expectation_code = fields.Nested( NotebookTemplateConfigSchema, allow_none=True ) # noinspection PyUnusedLocal @post_load def make_notebook_config(self, data, **kwargs): return NotebookConfig(**data) class NotebooksConfig(DictDot): def __init__(self, suite_edit): self.suite_edit = suite_edit class NotebooksConfigSchema(Schema): # for now only suite_edit, could have other customization options for # notebooks in the future suite_edit = fields.Nested(NotebookConfigSchema) # noinspection PyUnusedLocal @post_load def make_notebooks_config(self, data, **kwargs): return NotebooksConfig(**data) class ConcurrencyConfig(DictDot): """WARNING: This class is experimental.""" def __init__(self, enabled: Optional[bool] = False): """Initialize a concurrency configuration to control multithreaded execution. Args: enabled: Whether or not multithreading is enabled. """ self._enabled = enabled @property def enabled(self): """Whether or not multithreading is enabled.""" return self._enabled @property def max_database_query_concurrency(self) -> int: """Max number of concurrent database queries to execute with mulithreading.""" # BigQuery has a limit of 100 for "Concurrent rate limit for interactive queries" as described at # https://cloud.google.com/bigquery/quotas#query_jobs). If necessary, this can later be tuned for other # databases and/or be manually user configurable. return 100 def add_sqlalchemy_create_engine_parameters( self, parameters: MutableMapping[str, Any] ): """Update SqlAlchemy parameters to prevent concurrency errors (e.g. http://sqlalche.me/e/14/3o7r) and bottlenecks. Args: parameters: SqlAlchemy create_engine parameters to which we add concurrency appropriate parameters. If the concurrency parameters are already set, those parameters are left unchanged. """ if not self._enabled: return if "pool_size" not in parameters: # https://docs.sqlalchemy.org/en/14/core/engines.html#sqlalchemy.create_engine.params.pool_size parameters["pool_size"] = 0 if "max_overflow" not in parameters: # https://docs.sqlalchemy.org/en/14/core/engines.html#sqlalchemy.create_engine.params.max_overflow parameters["max_overflow"] = -1 class ConcurrencyConfigSchema(Schema): """WARNING: This class is experimental.""" enabled = fields.Boolean(default=False) class GeCloudConfig(DictDot): def __init__(self, base_url: str, account_id: str, access_token: str): self.base_url = base_url self.account_id = account_id self.access_token = access_token def to_json_dict(self): return { "base_url": self.base_url, "account_id": self.account_id, "access_token": self.access_token, } class DataContextConfigSchema(Schema): config_version = fields.Number( validate=lambda x: 0 < x < 100, error_messages={"invalid": "config version must " "be a number."}, ) datasources = fields.Dict( keys=fields.Str(), values=fields.Nested(DatasourceConfigSchema), required=False, allow_none=True, ) expectations_store_name = fields.Str() validations_store_name = fields.Str() evaluation_parameter_store_name = fields.Str() checkpoint_store_name = fields.Str(required=False, allow_none=True) plugins_directory = fields.Str(allow_none=True) validation_operators = fields.Dict( keys=fields.Str(), values=fields.Dict(), required=False, allow_none=True ) stores = fields.Dict(keys=fields.Str(), values=fields.Dict()) notebooks = fields.Nested(NotebooksConfigSchema, allow_none=True) data_docs_sites = fields.Dict( keys=fields.Str(), values=fields.Dict(), allow_none=True ) config_variables_file_path = fields.Str(allow_none=True) anonymous_usage_statistics = fields.Nested(AnonymizedUsageStatisticsConfigSchema) concurrency = fields.Nested(ConcurrencyConfigSchema) # noinspection PyMethodMayBeStatic # noinspection PyUnusedLocal def handle_error(self, exc, data, **kwargs): """Log and raise our custom exception when (de)serialization fails.""" if ( exc and exc.messages and isinstance(exc.messages, dict) and all([key is None for key in exc.messages.keys()]) ): exc.messages = list(itertools.chain.from_iterable(exc.messages.values())) message: str = ( f"Error while processing DataContextConfig: {' '.join(exc.messages)}" ) logger.error(message) raise ge_exceptions.InvalidDataContextConfigError( message=message, ) @validates_schema def validate_schema(self, data, **kwargs): if "config_version" not in data: raise ge_exceptions.InvalidDataContextConfigError( "The key `config_version` is missing; please check your config file.", validation_error=ValidationError(message="no config_version key"), ) if not isinstance(data["config_version"], (int, float)): raise ge_exceptions.InvalidDataContextConfigError( "The key `config_version` must be a number. Please check your config file.", validation_error=ValidationError(message="config version not a number"), ) # When migrating from 0.7.x to 0.8.0 if data["config_version"] == 0 and any( [ store_config["class_name"] == "ValidationsStore" for store_config in data["stores"].values() ] ): raise ge_exceptions.UnsupportedConfigVersionError( "You appear to be using a config version from the 0.7.x series. This version is no longer supported." ) if data["config_version"] < MINIMUM_SUPPORTED_CONFIG_VERSION: raise ge_exceptions.UnsupportedConfigVersionError( "You appear to have an invalid config version ({}).\n The version number must be at least {}. " "Please see the migration guide at https://docs.greatexpectations.io/en/latest/guides/how_to_guides/migrating_versions.html".format( data["config_version"], MINIMUM_SUPPORTED_CONFIG_VERSION ), ) if data["config_version"] > CURRENT_GE_CONFIG_VERSION: raise ge_exceptions.InvalidDataContextConfigError( "You appear to have an invalid config version ({}).\n The maximum valid version is {}.".format( data["config_version"], CURRENT_GE_CONFIG_VERSION ), validation_error=ValidationError(message="config version too high"), ) if data["config_version"] < CURRENT_GE_CONFIG_VERSION and ( "checkpoint_store_name" in data or any( [ store_config["class_name"] == "CheckpointStore" for store_config in data["stores"].values() ] ) ): raise ge_exceptions.InvalidDataContextConfigError( "You appear to be using a Checkpoint store with an invalid config version ({}).\n Your data context with this older configuration version specifies a Checkpoint store, which is a new feature. Please update your configuration to the new version number {} before adding a Checkpoint store.\n Visit https://docs.greatexpectations.io/en/latest/how_to_guides/migrating_versions.html to learn more about the upgrade process.".format( data["config_version"], float(CURRENT_GE_CONFIG_VERSION) ), validation_error=ValidationError( message="You appear to be using a Checkpoint store with an invalid config version ({}).\n Your data context with this older configuration version specifies a Checkpoint store, which is a new feature. Please update your configuration to the new version number {} before adding a Checkpoint store.\n Visit https://docs.greatexpectations.io/en/latest/how_to_guides/migrating_versions.html to learn more about the upgrade process.".format( data["config_version"], float(CURRENT_GE_CONFIG_VERSION) ) ), ) if ( data["config_version"] >= FIRST_GE_CONFIG_VERSION_WITH_CHECKPOINT_STORE and "validation_operators" in data and data["validation_operators"] is not None ): # TODO: <Alex>Add a URL to the migration guide with instructions for how to replace validation_operators with appropriate actions.</Alex> logger.warning( "You appear to be using a legacy capability with the latest config version ({}).\n Your data context with this configuration version uses validation_operators, which are being deprecated. Please update your configuration to be compatible with the version number {}.".format( data["config_version"], CURRENT_GE_CONFIG_VERSION ), ) class DataContextConfigDefaults(enum.Enum): DEFAULT_CONFIG_VERSION = CURRENT_GE_CONFIG_VERSION DEFAULT_EXPECTATIONS_STORE_NAME = "expectations_store" EXPECTATIONS_BASE_DIRECTORY = "expectations" DEFAULT_EXPECTATIONS_STORE_BASE_DIRECTORY_RELATIVE_NAME = ( f"{EXPECTATIONS_BASE_DIRECTORY}/" ) DEFAULT_VALIDATIONS_STORE_NAME = "validations_store" VALIDATIONS_BASE_DIRECTORY = "validations" DEFAULT_VALIDATIONS_STORE_BASE_DIRECTORY_RELATIVE_NAME = ( f"uncommitted/{VALIDATIONS_BASE_DIRECTORY}/" ) DEFAULT_EVALUATION_PARAMETER_STORE_NAME = "evaluation_parameter_store" DEFAULT_EVALUATION_PARAMETER_STORE_BASE_DIRECTORY_RELATIVE_NAME = ( "evaluation_parameters/" ) DEFAULT_CHECKPOINT_STORE_NAME = "checkpoint_store" CHECKPOINTS_BASE_DIRECTORY = "checkpoints" DEFAULT_CHECKPOINT_STORE_BASE_DIRECTORY_RELATIVE_NAME = ( f"{CHECKPOINTS_BASE_DIRECTORY}/" ) DEFAULT_DATA_DOCS_SITE_NAME = "local_site" DEFAULT_CONFIG_VARIABLES_FILEPATH = "uncommitted/config_variables.yml" PLUGINS_BASE_DIRECTORY = "plugins" DEFAULT_PLUGINS_DIRECTORY = f"{PLUGINS_BASE_DIRECTORY}/" NOTEBOOKS_BASE_DIRECTORY = "notebooks" DEFAULT_VALIDATION_OPERATORS = { "action_list_operator": { "class_name": "ActionListValidationOperator", "action_list": [ { "name": "store_validation_result", "action": {"class_name": "StoreValidationResultAction"}, }, { "name": "store_evaluation_params", "action": {"class_name": "StoreEvaluationParametersAction"}, }, { "name": "update_data_docs", "action": {"class_name": "UpdateDataDocsAction"}, }, ], } } DEFAULT_STORES = { DEFAULT_EXPECTATIONS_STORE_NAME: { "class_name": "ExpectationsStore", "store_backend": { "class_name": "TupleFilesystemStoreBackend", "base_directory": DEFAULT_EXPECTATIONS_STORE_BASE_DIRECTORY_RELATIVE_NAME, }, }, DEFAULT_VALIDATIONS_STORE_NAME: { "class_name": "ValidationsStore", "store_backend": { "class_name": "TupleFilesystemStoreBackend", "base_directory": DEFAULT_VALIDATIONS_STORE_BASE_DIRECTORY_RELATIVE_NAME, }, }, DEFAULT_EVALUATION_PARAMETER_STORE_NAME: { "class_name": "EvaluationParameterStore" }, DEFAULT_CHECKPOINT_STORE_NAME: { "class_name": "CheckpointStore", "store_backend": { "class_name": "TupleFilesystemStoreBackend", "suppress_store_backend_id": True, "base_directory": DEFAULT_CHECKPOINT_STORE_BASE_DIRECTORY_RELATIVE_NAME, }, }, } DEFAULT_DATA_DOCS_SITES = { DEFAULT_DATA_DOCS_SITE_NAME: { "class_name": "SiteBuilder", "show_how_to_buttons": True, "store_backend": { "class_name": "TupleFilesystemStoreBackend", "base_directory": "uncommitted/data_docs/local_site/", }, "site_index_builder": { "class_name": "DefaultSiteIndexBuilder", }, } } class CheckpointConfigDefaults(enum.Enum): DEFAULT_CONFIG_VERSION = CURRENT_CHECKPOINT_CONFIG_VERSION class BaseStoreBackendDefaults(DictDot): """ Define base defaults for platform specific StoreBackendDefaults. StoreBackendDefaults define defaults for specific cases of often used configurations. For example, if you plan to store expectations, validations, and data_docs in s3 use the S3StoreBackendDefaults and you may be able to specify less parameters. """ def __init__( self, expectations_store_name: str = DataContextConfigDefaults.DEFAULT_EXPECTATIONS_STORE_NAME.value, validations_store_name: str = DataContextConfigDefaults.DEFAULT_VALIDATIONS_STORE_NAME.value, evaluation_parameter_store_name: str = DataContextConfigDefaults.DEFAULT_EVALUATION_PARAMETER_STORE_NAME.value, checkpoint_store_name: str = DataContextConfigDefaults.DEFAULT_CHECKPOINT_STORE_NAME.value, data_docs_site_name: str = DataContextConfigDefaults.DEFAULT_DATA_DOCS_SITE_NAME.value, validation_operators: dict = None, stores: dict = None, data_docs_sites: dict = None, ): self.expectations_store_name = expectations_store_name self.validations_store_name = validations_store_name self.evaluation_parameter_store_name = evaluation_parameter_store_name self.checkpoint_store_name = checkpoint_store_name self.validation_operators = validation_operators if stores is None: stores = deepcopy(DataContextConfigDefaults.DEFAULT_STORES.value) self.stores = stores if data_docs_sites is None: data_docs_sites = deepcopy( DataContextConfigDefaults.DEFAULT_DATA_DOCS_SITES.value ) self.data_docs_sites = data_docs_sites self.data_docs_site_name = data_docs_site_name class S3StoreBackendDefaults(BaseStoreBackendDefaults): """ Default store configs for s3 backends, with some accessible parameters Args: default_bucket_name: Use this bucket name for stores that do not have a bucket name provided expectations_store_bucket_name: Overrides default_bucket_name if supplied validations_store_bucket_name: Overrides default_bucket_name if supplied data_docs_bucket_name: Overrides default_bucket_name if supplied checkpoint_store_bucket_name: Overrides default_bucket_name if supplied expectations_store_prefix: Overrides default if supplied validations_store_prefix: Overrides default if supplied data_docs_prefix: Overrides default if supplied checkpoint_store_prefix: Overrides default if supplied expectations_store_name: Overrides default if supplied validations_store_name: Overrides default if supplied evaluation_parameter_store_name: Overrides default if supplied checkpoint_store_name: Overrides default if supplied """ def __init__( self, default_bucket_name: Optional[str] = None, expectations_store_bucket_name: Optional[str] = None, validations_store_bucket_name: Optional[str] = None, data_docs_bucket_name: Optional[str] = None, checkpoint_store_bucket_name: Optional[str] = None, expectations_store_prefix: str = "expectations", validations_store_prefix: str = "validations", data_docs_prefix: str = "data_docs", checkpoint_store_prefix: str = "checkpoints", expectations_store_name: str = "expectations_S3_store", validations_store_name: str = "validations_S3_store", evaluation_parameter_store_name: str = "evaluation_parameter_store", checkpoint_store_name: str = "checkpoint_S3_store", ): # Initialize base defaults super().__init__() # Use default_bucket_name if separate store buckets are not provided if expectations_store_bucket_name is None: expectations_store_bucket_name = default_bucket_name if validations_store_bucket_name is None: validations_store_bucket_name = default_bucket_name if data_docs_bucket_name is None: data_docs_bucket_name = default_bucket_name if checkpoint_store_bucket_name is None: checkpoint_store_bucket_name = default_bucket_name # Overwrite defaults self.expectations_store_name = expectations_store_name self.validations_store_name = validations_store_name self.evaluation_parameter_store_name = evaluation_parameter_store_name self.checkpoint_store_name = checkpoint_store_name self.stores = { expectations_store_name: { "class_name": "ExpectationsStore", "store_backend": { "class_name": "TupleS3StoreBackend", "bucket": expectations_store_bucket_name, "prefix": expectations_store_prefix, }, }, validations_store_name: { "class_name": "ValidationsStore", "store_backend": { "class_name": "TupleS3StoreBackend", "bucket": validations_store_bucket_name, "prefix": validations_store_prefix, }, }, evaluation_parameter_store_name: {"class_name": "EvaluationParameterStore"}, checkpoint_store_name: { "class_name": "CheckpointStore", "store_backend": { "class_name": "TupleS3StoreBackend", "bucket": checkpoint_store_bucket_name, "prefix": checkpoint_store_prefix, }, }, } self.data_docs_sites = { "s3_site": { "class_name": "SiteBuilder", "show_how_to_buttons": True, "store_backend": { "class_name": "TupleS3StoreBackend", "bucket": data_docs_bucket_name, "prefix": data_docs_prefix, }, "site_index_builder": { "class_name": "DefaultSiteIndexBuilder", }, } } class FilesystemStoreBackendDefaults(BaseStoreBackendDefaults): """ Default store configs for filesystem backends, with some accessible parameters Args: root_directory: Absolute directory prepended to the base_directory for each store plugins_directory: Overrides default if supplied """ def __init__( self, root_directory: Optional[str] = None, plugins_directory: Optional[str] = None, ): # Initialize base defaults super().__init__() if plugins_directory is None: plugins_directory = ( DataContextConfigDefaults.DEFAULT_PLUGINS_DIRECTORY.value ) self.plugins_directory = plugins_directory if root_directory is not None: self.stores[self.expectations_store_name]["store_backend"][ "root_directory" ] = root_directory self.stores[self.validations_store_name]["store_backend"][ "root_directory" ] = root_directory self.stores[self.checkpoint_store_name]["store_backend"][ "root_directory" ] = root_directory self.data_docs_sites[self.data_docs_site_name]["store_backend"][ "root_directory" ] = root_directory class InMemoryStoreBackendDefaults(BaseStoreBackendDefaults): """ Default store configs for in memory backends. This is useful for testing without persistence. """ def __init__( self, ): # Initialize base defaults super().__init__() self.stores = { self.expectations_store_name: { "class_name": "ExpectationsStore", "store_backend": { "class_name": "InMemoryStoreBackend", }, }, self.validations_store_name: { "class_name": "ValidationsStore", "store_backend": { "class_name": "InMemoryStoreBackend", }, }, self.evaluation_parameter_store_name: { "class_name": "EvaluationParameterStore" }, self.checkpoint_store_name: { "class_name": "CheckpointStore", "store_backend": { "class_name": "InMemoryStoreBackend", }, }, } self.data_docs_sites = {} class GCSStoreBackendDefaults(BaseStoreBackendDefaults): """ Default store configs for Google Cloud Storage (GCS) backends, with some accessible parameters Args: default_bucket_name: Use this bucket name for stores that do not have a bucket name provided default_project_name: Use this project name for stores that do not have a project name provided expectations_store_bucket_name: Overrides default_bucket_name if supplied validations_store_bucket_name: Overrides default_bucket_name if supplied data_docs_bucket_name: Overrides default_bucket_name if supplied checkpoint_store_bucket_name: Overrides default_bucket_name if supplied expectations_store_project_name: Overrides default_project_name if supplied validations_store_project_name: Overrides default_project_name if supplied data_docs_project_name: Overrides default_project_name if supplied checkpoint_store_project_name: Overrides default_project_name if supplied expectations_store_prefix: Overrides default if supplied validations_store_prefix: Overrides default if supplied data_docs_prefix: Overrides default if supplied checkpoint_store_prefix: Overrides default if supplied expectations_store_name: Overrides default if supplied validations_store_name: Overrides default if supplied evaluation_parameter_store_name: Overrides default if supplied checkpoint_store_name: Overrides default if supplied """ def __init__( self, default_bucket_name: Optional[str] = None, default_project_name: Optional[str] = None, expectations_store_bucket_name: Optional[str] = None, validations_store_bucket_name: Optional[str] = None, data_docs_bucket_name: Optional[str] = None, checkpoint_store_bucket_name: Optional[str] = None, expectations_store_project_name: Optional[str] = None, validations_store_project_name: Optional[str] = None, data_docs_project_name: Optional[str] = None, checkpoint_store_project_name: Optional[str] = None, expectations_store_prefix: str = "expectations", validations_store_prefix: str = "validations", data_docs_prefix: str = "data_docs", checkpoint_store_prefix: str = "checkpoints", expectations_store_name: str = "expectations_GCS_store", validations_store_name: str = "validations_GCS_store", evaluation_parameter_store_name: str = "evaluation_parameter_store", checkpoint_store_name: str = "checkpoint_GCS_store", ): # Initialize base defaults super().__init__() # Use default_bucket_name if separate store buckets are not provided if expectations_store_bucket_name is None: expectations_store_bucket_name = default_bucket_name if validations_store_bucket_name is None: validations_store_bucket_name = default_bucket_name if data_docs_bucket_name is None: data_docs_bucket_name = default_bucket_name if checkpoint_store_bucket_name is None: checkpoint_store_bucket_name = default_bucket_name # Use default_project_name if separate store projects are not provided if expectations_store_project_name is None: expectations_store_project_name = default_project_name if validations_store_project_name is None: validations_store_project_name = default_project_name if data_docs_project_name is None: data_docs_project_name = default_project_name if checkpoint_store_project_name is None: checkpoint_store_project_name = default_project_name # Overwrite defaults self.expectations_store_name = expectations_store_name self.validations_store_name = validations_store_name self.evaluation_parameter_store_name = evaluation_parameter_store_name self.checkpoint_store_name = checkpoint_store_name self.stores = { expectations_store_name: { "class_name": "ExpectationsStore", "store_backend": { "class_name": "TupleGCSStoreBackend", "project": expectations_store_project_name, "bucket": expectations_store_bucket_name, "prefix": expectations_store_prefix, }, }, validations_store_name: { "class_name": "ValidationsStore", "store_backend": { "class_name": "TupleGCSStoreBackend", "project": validations_store_project_name, "bucket": validations_store_bucket_name, "prefix": validations_store_prefix, }, }, evaluation_parameter_store_name: {"class_name": "EvaluationParameterStore"}, checkpoint_store_name: { "class_name": "CheckpointStore", "store_backend": { "class_name": "TupleGCSStoreBackend", "project": checkpoint_store_project_name, "bucket": checkpoint_store_bucket_name, "prefix": checkpoint_store_prefix, }, }, } self.data_docs_sites = { "gcs_site": { "class_name": "SiteBuilder", "show_how_to_buttons": True, "store_backend": { "class_name": "TupleGCSStoreBackend", "project": data_docs_project_name, "bucket": data_docs_bucket_name, "prefix": data_docs_prefix, }, "site_index_builder": { "class_name": "DefaultSiteIndexBuilder", }, } } class DatabaseStoreBackendDefaults(BaseStoreBackendDefaults): """ Default store configs for database backends, with some accessible parameters Args: default_credentials: Use these credentials for all stores that do not have credentials provided expectations_store_credentials: Overrides default_credentials if supplied validations_store_credentials: Overrides default_credentials if supplied checkpoint_store_credentials: Overrides default_credentials if supplied expectations_store_name: Overrides default if supplied validations_store_name: Overrides default if supplied evaluation_parameter_store_name: Overrides default if supplied checkpoint_store_name: Overrides default if supplied """ def __init__( self, default_credentials: Optional[Dict] = None, expectations_store_credentials: Optional[Dict] = None, validations_store_credentials: Optional[Dict] = None, checkpoint_store_credentials: Optional[Dict] = None, expectations_store_name: str = "expectations_database_store", validations_store_name: str = "validations_database_store", evaluation_parameter_store_name: str = "evaluation_parameter_store", checkpoint_store_name: str = "checkpoint_database_store", ): # Initialize base defaults super().__init__() # Use default credentials if separate credentials not supplied for expectations_store and validations_store if expectations_store_credentials is None: expectations_store_credentials = default_credentials if validations_store_credentials is None: validations_store_credentials = default_credentials if checkpoint_store_credentials is None: checkpoint_store_credentials = default_credentials # Overwrite defaults self.expectations_store_name = expectations_store_name self.validations_store_name = validations_store_name self.evaluation_parameter_store_name = evaluation_parameter_store_name self.checkpoint_store_name = checkpoint_store_name self.stores = { expectations_store_name: { "class_name": "ExpectationsStore", "store_backend": { "class_name": "DatabaseStoreBackend", "credentials": expectations_store_credentials, }, }, validations_store_name: { "class_name": "ValidationsStore", "store_backend": { "class_name": "DatabaseStoreBackend", "credentials": validations_store_credentials, }, }, evaluation_parameter_store_name: {"class_name": "EvaluationParameterStore"}, checkpoint_store_name: { "class_name": "CheckpointStore", "store_backend": { "class_name": "DatabaseStoreBackend", "credentials": checkpoint_store_credentials, }, }, } class DataContextConfig(BaseYamlConfig): # TODO: <Alex>ALEX (does not work yet)</Alex> # _config_schema_class = DataContextConfigSchema def __init__( self, config_version: Optional[float] = None, datasources: Optional[ Union[ Dict[str, DatasourceConfig], Dict[str, Dict[str, Union[Dict[str, str], str, dict]]], ] ] = None, expectations_store_name: Optional[str] = None, validations_store_name: Optional[str] = None, evaluation_parameter_store_name: Optional[str] = None, checkpoint_store_name: Optional[str] = None, plugins_directory: Optional[str] = None, validation_operators=None, stores: Optional[Dict] = None, data_docs_sites: Optional[Dict] = None, notebooks=None, config_variables_file_path: Optional[str] = None, anonymous_usage_statistics=None, store_backend_defaults: Optional[BaseStoreBackendDefaults] = None, commented_map: Optional[CommentedMap] = None, concurrency: Optional[Union[ConcurrencyConfig, Dict]] = None, ): # Set defaults if config_version is None: config_version = DataContextConfigDefaults.DEFAULT_CONFIG_VERSION.value # Set defaults via store_backend_defaults if one is passed in # Override attributes from store_backend_defaults with any items passed into the constructor: if store_backend_defaults is not None: if stores is None: stores = store_backend_defaults.stores if expectations_store_name is None: expectations_store_name = store_backend_defaults.expectations_store_name if validations_store_name is None: validations_store_name = store_backend_defaults.validations_store_name if evaluation_parameter_store_name is None: evaluation_parameter_store_name = ( store_backend_defaults.evaluation_parameter_store_name ) if data_docs_sites is None: data_docs_sites = store_backend_defaults.data_docs_sites if checkpoint_store_name is None: checkpoint_store_name = store_backend_defaults.checkpoint_store_name self._config_version = config_version if datasources is None: datasources = {} self.datasources = datasources self.expectations_store_name = expectations_store_name self.validations_store_name = validations_store_name self.evaluation_parameter_store_name = evaluation_parameter_store_name if checkpoint_store_name is not None: self.checkpoint_store_name = checkpoint_store_name self.plugins_directory = plugins_directory if validation_operators is not None: self.validation_operators = validation_operators self.stores = stores self.notebooks = notebooks self.data_docs_sites = data_docs_sites self.config_variables_file_path = config_variables_file_path if anonymous_usage_statistics is None: anonymous_usage_statistics = AnonymizedUsageStatisticsConfig() elif isinstance(anonymous_usage_statistics, dict): anonymous_usage_statistics = AnonymizedUsageStatisticsConfig( **anonymous_usage_statistics ) self.anonymous_usage_statistics = anonymous_usage_statistics if concurrency is None: concurrency = ConcurrencyConfig() elif isinstance(concurrency, dict): concurrency = ConcurrencyConfig(**concurrency) self.concurrency: ConcurrencyConfig = concurrency super().__init__(commented_map=commented_map) # TODO: <Alex>ALEX (we still need the next two properties)</Alex> @classmethod def get_config_class(cls): return cls # DataContextConfig @classmethod def get_schema_class(cls): return DataContextConfigSchema @property def config_version(self): return self._config_version class CheckpointConfigSchema(Schema): class Meta: unknown = INCLUDE fields = ( "name", "config_version", "template_name", "module_name", "class_name", "run_name_template", "expectation_suite_name", "batch_request", "action_list", "evaluation_parameters", "runtime_configuration", "validations", "profilers", # Next two fields are for LegacyCheckpoint configuration "validation_operator_name", "batches", # Next fields are used by configurators "site_names", "slack_webhook", "notify_on", "notify_with", "ge_cloud_id", "expectation_suite_ge_cloud_id", ) ordered = True # if keys have None value, remove in post_dump REMOVE_KEYS_IF_NONE = [ "site_names", "slack_webhook", "notify_on", "notify_with", ] ge_cloud_id = fields.UUID(required=False, allow_none=True) name = fields.String(required=False, allow_none=True) config_version = fields.Number( validate=lambda x: (0 < x < 100) or x is None, error_messages={"invalid": "config version must " "be a number or None."}, required=False, allow_none=True, ) template_name = fields.String(required=False, allow_none=True) module_name = fields.String(required=False, missing="great_expectations.checkpoint") class_name = fields.Str(required=False, allow_none=True) run_name_template = fields.String(required=False, allow_none=True) expectation_suite_name = fields.String(required=False, allow_none=True) expectation_suite_ge_cloud_id = fields.UUID(required=False, allow_none=True) batch_request = fields.Dict(required=False, allow_none=True) action_list = fields.List( cls_or_instance=fields.Dict(), required=False, allow_none=True ) evaluation_parameters = fields.Dict(required=False, allow_none=True) runtime_configuration = fields.Dict(required=False, allow_none=True) validations = fields.List( cls_or_instance=fields.Dict(), required=False, allow_none=True ) profilers = fields.List( cls_or_instance=fields.Dict(), required=False, allow_none=True ) # Next two fields are for LegacyCheckpoint configuration validation_operator_name = fields.Str(required=False, allow_none=True) batches = fields.List( cls_or_instance=fields.Dict( keys=fields.Str( validate=OneOf(["batch_kwargs", "expectation_suite_names"]), required=False, allow_none=True, ) ), required=False, allow_none=True, ) # Next fields are used by configurators site_names = fields.Raw(required=False, allow_none=True) slack_webhook = fields.String(required=False, allow_none=True) notify_on = fields.String(required=False, allow_none=True) notify_with = fields.String(required=False, allow_none=True) @validates_schema def validate_schema(self, data, **kwargs): if not ( "name" in data or "validation_operator_name" in data or "batches" in data ): raise ge_exceptions.InvalidConfigError( f"""Your current Checkpoint configuration is incomplete. Please update your Checkpoint configuration to continue. """ ) if data.get("config_version"): if "name" not in data: raise ge_exceptions.InvalidConfigError( f"""Your Checkpoint configuration requires the "name" field. Please update your current Checkpoint configuration to continue. """ ) @post_dump def remove_keys_if_none(self, data, **kwargs): data = deepcopy(data) for key in self.REMOVE_KEYS_IF_NONE: if key in data and data[key] is None: data.pop(key) return data class CheckpointConfig(BaseYamlConfig): # TODO: <Alex>ALEX (does not work yet)</Alex> # _config_schema_class = CheckpointConfigSchema def __init__( self, name: Optional[str] = None, config_version: Optional[Union[int, float]] = None, template_name: Optional[str] = None, module_name: Optional[str] = None, class_name: Optional[str] = None, run_name_template: Optional[str] = None, expectation_suite_name: Optional[str] = None, batch_request: Optional[dict] = None, action_list: Optional[List[dict]] = None, evaluation_parameters: Optional[dict] = None, runtime_configuration: Optional[dict] = None, validations: Optional[List[dict]] = None, profilers: Optional[List[dict]] = None, validation_operator_name: Optional[str] = None, batches: Optional[List[dict]] = None, commented_map: Optional[CommentedMap] = None, ge_cloud_id: Optional[str] = None, # the following four args are used by SimpleCheckpoint site_names: Optional[Union[list, str]] = None, slack_webhook: Optional[str] = None, notify_on: Optional[str] = None, notify_with: Optional[str] = None, expectation_suite_ge_cloud_id: Optional[str] = None, ): self._name = name self._config_version = config_version if self.config_version is None: class_name = class_name or "LegacyCheckpoint" self.validation_operator_name = validation_operator_name if batches is not None and isinstance(batches, list): self.batches = batches else: class_name = class_name or "Checkpoint" self._template_name = template_name self._run_name_template = run_name_template self._expectation_suite_name = expectation_suite_name self._expectation_suite_ge_cloud_id = expectation_suite_ge_cloud_id self._batch_request = batch_request self._action_list = action_list or [] self._evaluation_parameters = evaluation_parameters or {} self._runtime_configuration = runtime_configuration or {} self._validations = validations or [] self._profilers = profilers or [] self._ge_cloud_id = ge_cloud_id # the following attributes are used by SimpleCheckpoint self._site_names = site_names self._slack_webhook = slack_webhook self._notify_on = notify_on self._notify_with = notify_with self._module_name = module_name or "great_expectations.checkpoint" self._class_name = class_name super().__init__(commented_map=commented_map) def update( self, other_config: Optional["CheckpointConfig"] = None, runtime_kwargs: Optional[dict] = None, ): assert other_config is not None or runtime_kwargs is not None, ( "other_config and runtime_kwargs cannot both " "be None" ) if other_config is not None: # replace if other_config.name is not None: self.name = other_config.name if other_config.module_name is not None: self.module_name = other_config.module_name if other_config.class_name is not None: self.class_name = other_config.class_name if other_config.run_name_template is not None: self.run_name_template = other_config.run_name_template if other_config.expectation_suite_name is not None: self.expectation_suite_name = other_config.expectation_suite_name if other_config.expectation_suite_ge_cloud_id is not None: self.expectation_suite_ge_cloud_id = ( other_config.expectation_suite_ge_cloud_id ) # update if other_config.batch_request is not None: if self.batch_request is None: batch_request = {} else: batch_request = self.batch_request other_batch_request = other_config.batch_request updated_batch_request = nested_update( batch_request, other_batch_request, ) self._batch_request = updated_batch_request if other_config.action_list is not None: self.action_list = self.get_updated_action_list( base_action_list=self.action_list, other_action_list=other_config.action_list, ) if other_config.evaluation_parameters is not None: nested_update( self.evaluation_parameters, other_config.evaluation_parameters, ) if other_config.runtime_configuration is not None: nested_update( self.runtime_configuration, other_config.runtime_configuration, ) if other_config.validations is not None: self.validations.extend( filter( lambda v: v not in self.validations, other_config.validations ) ) if other_config.profilers is not None: self.profilers.extend(other_config.profilers) if runtime_kwargs is not None and any(runtime_kwargs.values()): # replace if runtime_kwargs.get("run_name_template") is not None: self.run_name_template = runtime_kwargs.get("run_name_template") if runtime_kwargs.get("expectation_suite_name") is not None: self.expectation_suite_name = runtime_kwargs.get( "expectation_suite_name" ) if runtime_kwargs.get("expectation_suite_ge_cloud_id") is not None: self.expectation_suite_ge_cloud_id = runtime_kwargs.get( "expectation_suite_ge_cloud_id" ) # update if runtime_kwargs.get("batch_request") is not None: batch_request = self.batch_request batch_request = batch_request or {} runtime_batch_request = runtime_kwargs.get("batch_request") batch_request = nested_update(batch_request, runtime_batch_request) self._batch_request = batch_request if runtime_kwargs.get("action_list") is not None: self.action_list = self.get_updated_action_list( base_action_list=self.action_list, other_action_list=runtime_kwargs.get("action_list"), ) if runtime_kwargs.get("evaluation_parameters") is not None: nested_update( self.evaluation_parameters, runtime_kwargs.get("evaluation_parameters"), ) if runtime_kwargs.get("runtime_configuration") is not None: nested_update( self.runtime_configuration, runtime_kwargs.get("runtime_configuration"), ) if runtime_kwargs.get("validations") is not None: self.validations.extend( filter( lambda v: v not in self.validations, runtime_kwargs.get("validations"), ) ) if runtime_kwargs.get("profilers") is not None: self.profilers.extend(runtime_kwargs.get("profilers")) # TODO: <Alex>ALEX (we still need the next two properties)</Alex> @classmethod def get_config_class(cls): return cls # CheckpointConfig @classmethod def get_schema_class(cls): return CheckpointConfigSchema @property def ge_cloud_id(self): return self._ge_cloud_id @ge_cloud_id.setter def ge_cloud_id(self, value: str): self._ge_cloud_id = value @property def expectation_suite_ge_cloud_id(self): return self._expectation_suite_ge_cloud_id @expectation_suite_ge_cloud_id.setter def expectation_suite_ge_cloud_id(self, value: str): self._expectation_suite_ge_cloud_id = value @property def name(self): return self._name @name.setter def name(self, value: str): self._name = value @property def template_name(self): return self._template_name @template_name.setter def template_name(self, value: str): self._template_name = value @property def config_version(self): return self._config_version @property def validations(self): return self._validations @property def profilers(self): return self._profilers @property def module_name(self): return self._module_name @module_name.setter def module_name(self, value: str): self._module_name = value @property def class_name(self): return self._class_name @class_name.setter def class_name(self, value: str): self._class_name = value @property def run_name_template(self): return self._run_name_template @run_name_template.setter def run_name_template(self, value: str): self._run_name_template = value @property def batch_request(self): return self._batch_request @batch_request.setter def batch_request(self, value: dict): self._batch_request = value @property def expectation_suite_name(self): return self._expectation_suite_name @expectation_suite_name.setter def expectation_suite_name(self, value: str): self._expectation_suite_name = value @property def action_list(self): return self._action_list @action_list.setter def action_list(self, value: List[dict]): self._action_list = value @property def site_names(self): return self._site_names @property def slack_webhook(self): return self._slack_webhook @property def notify_on(self): return self._notify_on @property def notify_with(self): return self._notify_with @classmethod def get_updated_action_list( cls, base_action_list: list, other_action_list: list, ) -> List[dict]: base_action_list_dict = {action["name"]: action for action in base_action_list} for other_action in other_action_list: other_action_name = other_action["name"] if other_action_name in base_action_list_dict: if other_action["action"] is None: base_action_list_dict.pop(other_action_name) else: nested_update( base_action_list_dict[other_action_name], other_action, dedup=True, ) else: base_action_list_dict[other_action_name] = other_action return list(base_action_list_dict.values()) @property def evaluation_parameters(self): return self._evaluation_parameters @property def runtime_configuration(self): return self._runtime_configuration class CheckpointValidationConfig(DictDot): pass class CheckpointValidationConfigSchema(Schema): pass dataContextConfigSchema = DataContextConfigSchema() datasourceConfigSchema = DatasourceConfigSchema() dataConnectorConfigSchema = DataConnectorConfigSchema() assetConfigSchema = AssetConfigSchema() sorterConfigSchema = SorterConfigSchema() anonymizedUsageStatisticsSchema = AnonymizedUsageStatisticsConfigSchema() notebookConfigSchema = NotebookConfigSchema() checkpointConfigSchema = CheckpointConfigSchema() concurrencyConfigSchema = ConcurrencyConfigSchema()
38.76044
460
0.647868
import abc import enum import itertools import logging import uuid from copy import deepcopy from typing import Any, Dict, List, MutableMapping, Optional, Union from ruamel.yaml import YAML from ruamel.yaml.comments import CommentedMap from ruamel.yaml.compat import StringIO import great_expectations.exceptions as ge_exceptions from great_expectations.core.util import convert_to_json_serializable, nested_update from great_expectations.marshmallow__shade import ( INCLUDE, Schema, ValidationError, fields, post_dump, post_load, pre_load, validates_schema, ) from great_expectations.marshmallow__shade.validate import OneOf from great_expectations.types import DictDot, SerializableDictDot from great_expectations.types.configurations import ClassConfigSchema yaml = YAML() yaml.indent(mapping=2, sequence=4, offset=2) logger = logging.getLogger(__name__) CURRENT_GE_CONFIG_VERSION = 3 FIRST_GE_CONFIG_VERSION_WITH_CHECKPOINT_STORE = 3 CURRENT_CHECKPOINT_CONFIG_VERSION = 1 MINIMUM_SUPPORTED_CONFIG_VERSION = 2 DEFAULT_USAGE_STATISTICS_URL = ( "https://stats.greatexpectations.io/great_expectations/v1/usage_statistics" ) def object_to_yaml_str(obj): output_str: str with StringIO() as string_stream: yaml.dump(obj, string_stream) output_str = string_stream.getvalue() return output_str class BaseYamlConfig(SerializableDictDot): _config_schema_class = None def __init__(self, commented_map: CommentedMap = None): if commented_map is None: commented_map = CommentedMap() self._commented_map = commented_map @classmethod def _get_schema_instance(cls) -> Schema: if not issubclass(cls.get_schema_class(), Schema): raise ge_exceptions.InvalidConfigError( "Invalid type: A configuration schema class needs to inherit from the Marshmallow Schema class." ) if not issubclass(cls.get_config_class(), BaseYamlConfig): raise ge_exceptions.InvalidConfigError( "Invalid type: A configuration class needs to inherit from the BaseYamlConfig class." ) if hasattr(cls.get_config_class(), "_schema_instance"): schema_instance: Schema = cls.get_config_class()._schema_instance if schema_instance is None: cls.get_config_class()._schema_instance = (cls.get_schema_class())() else: return schema_instance else: cls.get_config_class().schema_instance = (cls.get_schema_class())() return cls.get_config_class().schema_instance @classmethod def from_commented_map(cls, commented_map: CommentedMap): try: config: Union[dict, BaseYamlConfig] config = cls._get_schema_instance().load(commented_map) if isinstance(config, dict): return cls.get_config_class()(commented_map=commented_map, **config) return config except ValidationError: logger.error( "Encountered errors during loading config. See ValidationError for more details." ) raise def _get_schema_validated_updated_commented_map(self) -> CommentedMap: commented_map: CommentedMap = deepcopy(self._commented_map) commented_map.update(self._get_schema_instance().dump(self)) return commented_map def to_yaml(self, outfile): yaml.dump(self.commented_map, outfile) def to_yaml_str(self) -> str: return object_to_yaml_str(self.commented_map) def to_json_dict(self) -> dict: commented_map: CommentedMap = self.commented_map return convert_to_json_serializable(data=commented_map) @property def commented_map(self) -> CommentedMap: return self._get_schema_validated_updated_commented_map() @classmethod def get_config_class(cls): raise NotImplementedError @classmethod def get_schema_class(cls): raise NotImplementedError class AssetConfig(DictDot): def __init__( self, name=None, class_name=None, module_name=None, bucket=None, prefix=None, delimiter=None, max_keys=None, batch_spec_passthrough=None, **kwargs, ): if name is not None: self.name = name self._class_name = class_name self._module_name = module_name if bucket is not None: self.bucket = bucket if prefix is not None: self.prefix = prefix if delimiter is not None: self.delimiter = delimiter if max_keys is not None: self.max_keys = max_keys if batch_spec_passthrough is not None: self.batch_spec_passthrough = batch_spec_passthrough for k, v in kwargs.items(): setattr(self, k, v) @property def class_name(self): return self._class_name @property def module_name(self): return self._module_name class AssetConfigSchema(Schema): class Meta: unknown = INCLUDE name = fields.String(required=False, allow_none=True) class_name = fields.String(required=False, allow_none=True, missing="Asset") module_name = fields.String( required=False, all_none=True, missing="great_expectations.datasource.data_connector.asset", ) base_directory = fields.String(required=False, allow_none=True) glob_directive = fields.String(required=False, allow_none=True) pattern = fields.String(required=False, allow_none=True) group_names = fields.List( cls_or_instance=fields.Str(), required=False, allow_none=True ) bucket = fields.String(required=False, allow_none=True) prefix = fields.String(required=False, allow_none=True) delimiter = fields.String(required=False, allow_none=True) max_keys = fields.Integer(required=False, allow_none=True) batch_spec_passthrough = fields.Dict(required=False, allow_none=True) @validates_schema def validate_schema(self, data, **kwargs): pass @post_load def make_asset_config(self, data, **kwargs): return AssetConfig(**data) class SorterConfig(DictDot): def __init__( self, name, class_name=None, module_name=None, orderby="asc", reference_list=None, datetime_format=None, **kwargs, ): self._name = name self._class_name = class_name self._module_name = module_name self._orderby = orderby for k, v in kwargs.items(): setattr(self, k, v) if reference_list is not None: self._reference_list = reference_list if datetime_format is not None: self._datetime_format = datetime_format @property def name(self): return self._name @property def module_name(self): return self._module_name @property def class_name(self): return self._class_name @property def orderby(self): return self._orderby @property def reference_list(self): return self._reference_list @property def datetime_format(self): return self._datetime_format class SorterConfigSchema(Schema): class Meta: unknown = INCLUDE name = fields.String(required=True) class_name = fields.String(required=True) module_name = fields.String( missing="great_expectations.datasource.data_connector.sorter" ) orderby = fields.String(required=False, missing="asc", allow_none=False) reference_list = fields.List( cls_or_instance=fields.Str(), required=False, missing=None, allow_none=True ) datetime_format = fields.String(required=False, missing=None, allow_none=True) @validates_schema def validate_schema(self, data, **kwargs): pass @post_load def make_sorter_config(self, data, **kwargs): return SorterConfig(**data) class DataConnectorConfig(DictDot): def __init__( self, class_name, module_name=None, credentials=None, assets=None, base_directory=None, glob_directive=None, default_regex=None, batch_identifiers=None, sorters=None, batch_spec_passthrough=None, boto3_options=None, bucket=None, max_keys=None, azure_options=None, container=None, name_starts_with=None, bucket_or_name=None, max_results=None, prefix=None, delimiter=None, **kwargs, ): self._class_name = class_name self._module_name = module_name if credentials is not None: self.credentials = credentials if assets is not None: self.assets = assets if base_directory is not None: self.base_directory = base_directory if glob_directive is not None: self.glob_directive = glob_directive if default_regex is not None: self.default_regex = default_regex if batch_identifiers is not None: self.batch_identifiers = batch_identifiers if sorters is not None: self.sorters = sorters if batch_spec_passthrough is not None: self.batch_spec_passthrough = batch_spec_passthrough if boto3_options is not None: self.boto3_options = boto3_options if bucket is not None: self.bucket = bucket if max_keys is not None: self.max_keys = max_keys if azure_options is not None: self.azure_options = azure_options if container is not None: self.container = container if name_starts_with is not None: self.name_starts_with = name_starts_with if bucket_or_name is not None: self.bucket_or_name = bucket_or_name if max_results is not None: self.max_results = max_results if prefix is not None: self.prefix = prefix if delimiter is not None: self.delimiter = delimiter for k, v in kwargs.items(): setattr(self, k, v) @property def class_name(self): return self._class_name @property def module_name(self): return self._module_name class DataConnectorConfigSchema(Schema): class Meta: unknown = INCLUDE class_name = fields.String(required=True) module_name = fields.String(missing="great_expectations.datasource.data_connector") assets = fields.Dict( keys=fields.Str(), values=fields.Nested(AssetConfigSchema, required=False, allow_none=True), required=False, allow_none=True, ) base_directory = fields.String(required=False, allow_none=True) glob_directive = fields.String(required=False, allow_none=True) sorters = fields.List( fields.Nested(SorterConfigSchema, required=False, allow_none=True), required=False, allow_none=True, ) default_regex = fields.Dict(required=False, allow_none=True) credentials = fields.Raw(required=False, allow_none=True) batch_identifiers = fields.List( cls_or_instance=fields.Str(), required=False, allow_none=True ) boto3_options = fields.Dict( keys=fields.Str(), values=fields.Str(), required=False, allow_none=True ) bucket = fields.String(required=False, allow_none=True) max_keys = fields.Integer(required=False, allow_none=True) azure_options = fields.Dict( keys=fields.Str(), values=fields.Str(), required=False, allow_none=True ) container = fields.String(required=False, allow_none=True) name_starts_with = fields.String(required=False, allow_none=True) gcs_options = fields.Dict( keys=fields.Str(), values=fields.Str(), required=False, allow_none=True ) bucket_or_name = fields.String(required=False, allow_none=True) max_results = fields.String(required=False, allow_none=True) prefix = fields.String(required=False, allow_none=True) delimiter = fields.String(required=False, allow_none=True) data_asset_name_prefix = fields.String(required=False, allow_none=True) data_asset_name_suffix = fields.String(required=False, allow_none=True) include_schema_name = fields.Boolean(required=False, allow_none=True) splitter_method = fields.String(required=False, allow_none=True) splitter_kwargs = fields.Dict(required=False, allow_none=True) sampling_method = fields.String(required=False, allow_none=True) sampling_kwargs = fields.Dict(required=False, allow_none=True) excluded_tables = fields.List( cls_or_instance=fields.Str(), required=False, allow_none=True ) included_tables = fields.List( cls_or_instance=fields.Str(), required=False, allow_none=True ) skip_inapplicable_tables = fields.Boolean(required=False, allow_none=True) batch_spec_passthrough = fields.Dict(required=False, allow_none=True) @validates_schema def validate_schema(self, data, **kwargs): if data["class_name"][0] == "$": return if ("default_regex" in data) and not ( data["class_name"] in [ "InferredAssetFilesystemDataConnector", "ConfiguredAssetFilesystemDataConnector", "InferredAssetS3DataConnector", "ConfiguredAssetS3DataConnector", "InferredAssetAzureDataConnector", "ConfiguredAssetAzureDataConnector", "InferredAssetGCSDataConnector", "ConfiguredAssetGCSDataConnector", ] ): raise ge_exceptions.InvalidConfigError( f"""Your current configuration uses one or more keys in a data connector that are required only by a subclass of the FilePathDataConnector class (your data connector is "{data['class_name']}"). Please update your configuration to continue. """ ) if ("glob_directive" in data) and not ( data["class_name"] in [ "InferredAssetFilesystemDataConnector", "ConfiguredAssetFilesystemDataConnector", ] ): raise ge_exceptions.InvalidConfigError( f"""Your current configuration uses one or more keys in a data connector that are required only by a filesystem type of the data connector (your data connector is "{data['class_name']}"). Please update your configuration to continue. """ ) if ("delimiter" in data) and not ( data["class_name"] in [ "InferredAssetS3DataConnector", "ConfiguredAssetS3DataConnector", "InferredAssetAzureDataConnector", "ConfiguredAssetAzureDataConnector", ] ): raise ge_exceptions.InvalidConfigError( f"""Your current configuration uses one or more keys in a data connector that are required only by an S3/Azure type of the data connector (your data connector is "{data['class_name']}"). Please update your configuration to continue. """ ) if ("prefix" in data) and not ( data["class_name"] in [ "InferredAssetS3DataConnector", "ConfiguredAssetS3DataConnector", "InferredAssetGCSDataConnector", "ConfiguredAssetGCSDataConnector", ] ): raise ge_exceptions.InvalidConfigError( f"""Your current configuration uses one or more keys in a data connector that are required only by an S3/GCS type of the data connector (your data connector is "{data['class_name']}"). Please update your configuration to continue. """ ) if ("bucket" in data or "max_keys" in data) and not ( data["class_name"] in [ "InferredAssetS3DataConnector", "ConfiguredAssetS3DataConnector", ] ): raise ge_exceptions.InvalidConfigError( f"""Your current configuration uses one or more keys in a data connector that are required only by an S3 type of the data connector (your data connector is "{data['class_name']}"). Please update your configuration to continue. """ ) if ( "azure_options" in data or "container" in data or "name_starts_with" in data ) and not ( data["class_name"] in [ "InferredAssetAzureDataConnector", "ConfiguredAssetAzureDataConnector", ] ): raise ge_exceptions.InvalidConfigError( f"""Your current configuration uses one or more keys in a data connector that are required only by an Azure type of the data connector (your data connector is "{data['class_name']}"). Please update your configuration to continue. """ ) if "azure_options" in data and data["class_name"] in [ "InferredAssetAzureDataConnector", "ConfiguredAssetAzureDataConnector", ]: azure_options = data["azure_options"] if not (("conn_str" in azure_options) ^ ("account_url" in azure_options)): raise ge_exceptions.InvalidConfigError( f"""Your current configuration is either missing methods of authentication or is using too many for the Azure type of data connector. You must only select one between `conn_str` or `account_url`. Please update your configuration to continue. """ ) if ( "gcs_options" in data or "bucket_or_name" in data or "max_results" in data ) and not ( data["class_name"] in [ "InferredAssetGCSDataConnector", "ConfiguredAssetGCSDataConnector", ] ): raise ge_exceptions.InvalidConfigError( f"""Your current configuration uses one or more keys in a data connector that are required only by a GCS type of the data connector (your data connector is "{data['class_name']}"). Please update your configuration to continue. """ ) if "gcs_options" in data and data["class_name"] in [ "InferredAssetGCSDataConnector", "ConfiguredAssetGCSDataConnector", ]: gcs_options = data["gcs_options"] if "filename" in gcs_options and "info" in gcs_options: raise ge_exceptions.InvalidConfigError( f"""Your current configuration can only use a single method of authentication for the GCS type of data connector. You must only select one between `filename` (from_service_account_file) and `info` (from_service_account_info). Please update your configuration to continue. """ ) if ( "data_asset_name_prefix" in data or "data_asset_name_suffix" in data or "include_schema_name" in data or "splitter_method" in data or "splitter_kwargs" in data or "sampling_method" in data or "sampling_kwargs" in data or "excluded_tables" in data or "included_tables" in data or "skip_inapplicable_tables" in data ) and not ( data["class_name"] in [ "InferredAssetSqlDataConnector", "ConfiguredAssetSqlDataConnector", ] ): raise ge_exceptions.InvalidConfigError( f"""Your current configuration uses one or more keys in a data connector that are required only by an SQL type of the data connector (your data connector is "{data['class_name']}"). Please update your configuration to continue. """ ) @post_load def make_data_connector_config(self, data, **kwargs): return DataConnectorConfig(**data) class ExecutionEngineConfig(DictDot): def __init__( self, class_name, module_name=None, caching=None, batch_spec_defaults=None, connection_string=None, credentials=None, spark_config=None, boto3_options=None, azure_options=None, gcs_options=None, **kwargs, ): self._class_name = class_name self._module_name = module_name if caching is not None: self.caching = caching if batch_spec_defaults is not None: self._batch_spec_defaults = batch_spec_defaults if connection_string is not None: self.connection_string = connection_string if credentials is not None: self.credentials = credentials if spark_config is not None: self.spark_config = spark_config if boto3_options is not None: self.boto3_options = boto3_options if azure_options is not None: self.azure_options = azure_options if gcs_options is not None: self.gcs_options = gcs_options for k, v in kwargs.items(): setattr(self, k, v) @property def module_name(self): return self._module_name @property def class_name(self): return self._class_name @property def batch_spec_defaults(self): return self._batch_spec_defaults class ExecutionEngineConfigSchema(Schema): class Meta: unknown = INCLUDE class_name = fields.String(required=True) module_name = fields.String(missing="great_expectations.execution_engine") connection_string = fields.String(required=False, allow_none=True) credentials = fields.Raw(required=False, allow_none=True) spark_config = fields.Raw(required=False, allow_none=True) boto3_options = fields.Dict( keys=fields.Str(), values=fields.Str(), required=False, allow_none=True ) azure_options = fields.Dict( keys=fields.Str(), values=fields.Str(), required=False, allow_none=True ) gcs_options = fields.Dict( keys=fields.Str(), values=fields.Str(), required=False, allow_none=True ) caching = fields.Boolean(required=False, allow_none=True) batch_spec_defaults = fields.Dict(required=False, allow_none=True) @validates_schema def validate_schema(self, data, **kwargs): if data["class_name"][0] == "$": return if ("connection_string" in data or "credentials" in data) and not ( data["class_name"] == "SqlAlchemyExecutionEngine" ): raise ge_exceptions.InvalidConfigError( f"""Your current configuration uses the "connection_string" key in an execution engine, but only SqlAlchemyExecutionEngine requires this attribute (your execution engine is "{data['class_name']}"). Please update your configuration to continue. """ ) if "spark_config" in data and not ( data["class_name"] == "SparkDFExecutionEngine" ): raise ge_exceptions.InvalidConfigError( f"""Your current configuration uses the "spark_config" key in an execution engine, but only SparkDFExecutionEngine requires this attribute (your execution engine is "{data['class_name']}"). Please update your configuration to continue. """ ) @post_load def make_execution_engine_config(self, data, **kwargs): return ExecutionEngineConfig(**data) class DatasourceConfig(DictDot): def __init__( self, class_name=None, module_name: Optional[str] = "great_expectations.datasource", execution_engine=None, data_connectors=None, data_asset_type=None, batch_kwargs_generators=None, connection_string=None, credentials=None, introspection=None, tables=None, boto3_options=None, azure_options=None, gcs_options=None, reader_method=None, reader_options=None, limit=None, **kwargs, ): self._class_name = class_name self._module_name = module_name if execution_engine is not None: self.execution_engine = execution_engine if data_connectors is not None and isinstance(data_connectors, dict): self.data_connectors = data_connectors if data_asset_type is None: if class_name == "PandasDatasource": data_asset_type = { "class_name": "PandasDataset", "module_name": "great_expectations.dataset", } elif class_name == "SqlAlchemyDatasource": data_asset_type = { "class_name": "SqlAlchemyDataset", "module_name": "great_expectations.dataset", } elif class_name == "SparkDFDatasource": data_asset_type = { "class_name": "SparkDFDataset", "module_name": "great_expectations.dataset", } if data_asset_type is not None: self.data_asset_type = data_asset_type if batch_kwargs_generators is not None: self.batch_kwargs_generators = batch_kwargs_generators if connection_string is not None: self.connection_string = connection_string if credentials is not None: self.credentials = credentials if introspection is not None: self.introspection = introspection if tables is not None: self.tables = tables if boto3_options is not None: self.boto3_options = boto3_options if azure_options is not None: self.azure_options = azure_options if gcs_options is not None: self.gcs_options = gcs_options if reader_method is not None: self.reader_method = reader_method if reader_options is not None: self.reader_options = reader_options if limit is not None: self.limit = limit for k, v in kwargs.items(): setattr(self, k, v) @property def class_name(self): return self._class_name @property def module_name(self): return self._module_name class DatasourceConfigSchema(Schema): class Meta: unknown = INCLUDE class_name = fields.String(missing="Datasource") module_name = fields.String(missing="great_expectations.datasource") force_reuse_spark_context = fields.Bool(required=False, allow_none=True) spark_config = fields.Dict( keys=fields.Str(), values=fields.Str(), required=False, allow_none=True ) execution_engine = fields.Nested( ExecutionEngineConfigSchema, required=False, allow_none=True ) data_connectors = fields.Dict( keys=fields.Str(), values=fields.Nested(DataConnectorConfigSchema), required=False, allow_none=True, ) data_asset_type = fields.Nested(ClassConfigSchema, required=False, allow_none=True) batch_kwargs_generators = fields.Dict( keys=fields.Str(), values=fields.Dict(), required=False, allow_none=True ) connection_string = fields.String(required=False, allow_none=True) credentials = fields.Raw(required=False, allow_none=True) introspection = fields.Dict(required=False, allow_none=True) tables = fields.Dict(required=False, allow_none=True) boto3_options = fields.Dict( keys=fields.Str(), values=fields.Str(), required=False, allow_none=True ) azure_options = fields.Dict( keys=fields.Str(), values=fields.Str(), required=False, allow_none=True ) gcs_options = fields.Dict( keys=fields.Str(), values=fields.Str(), required=False, allow_none=True ) reader_method = fields.String(required=False, allow_none=True) reader_options = fields.Dict( keys=fields.Str(), values=fields.Str(), required=False, allow_none=True ) limit = fields.Integer(required=False, allow_none=True) @validates_schema def validate_schema(self, data, **kwargs): if "generators" in data: raise ge_exceptions.InvalidConfigError( 'Your current configuration uses the "generators" key in a datasource, but in version 0.10 of ' 'GE that key is renamed to "batch_kwargs_generators". Please update your configuration to continue.' ) if data["class_name"][0] == "$": return if ( "connection_string" in data or "credentials" in data or "introspection" in data or "tables" in data ) and not ( data["class_name"] in [ "SqlAlchemyDatasource", "SimpleSqlalchemyDatasource", ] ): raise ge_exceptions.InvalidConfigError( f"""Your current configuration uses one or more keys in a data source that are required only by a sqlalchemy data source (your data source is "{data['class_name']}"). Please update your configuration to continue. """ ) @post_load def make_datasource_config(self, data, **kwargs): return DatasourceConfig(**data) class AnonymizedUsageStatisticsConfig(DictDot): def __init__(self, enabled=True, data_context_id=None, usage_statistics_url=None): self._enabled = enabled if data_context_id is None: data_context_id = str(uuid.uuid4()) self._explicit_id = False else: self._explicit_id = True self._data_context_id = data_context_id if usage_statistics_url is None: usage_statistics_url = DEFAULT_USAGE_STATISTICS_URL self._explicit_url = False else: self._explicit_url = True self._usage_statistics_url = usage_statistics_url @property def enabled(self): return self._enabled @enabled.setter def enabled(self, enabled): if not isinstance(enabled, bool): raise ValueError("usage statistics enabled property must be boolean") self._enabled = enabled @property def data_context_id(self): return self._data_context_id @data_context_id.setter def data_context_id(self, data_context_id): try: uuid.UUID(data_context_id) except ValueError: raise ge_exceptions.InvalidConfigError( "data_context_id must be a valid uuid" ) self._data_context_id = data_context_id self._explicit_id = True @property def explicit_id(self): return self._explicit_id @property def usage_statistics_url(self): return self._usage_statistics_url @usage_statistics_url.setter def usage_statistics_url(self, usage_statistics_url): self._usage_statistics_url = usage_statistics_url self._explicit_url = True class AnonymizedUsageStatisticsConfigSchema(Schema): data_context_id = fields.UUID() enabled = fields.Boolean(default=True) usage_statistics_url = fields.URL(allow_none=True) _explicit_url = fields.Boolean(required=False) @post_load() def make_usage_statistics_config(self, data, **kwargs): if "data_context_id" in data: data["data_context_id"] = str(data["data_context_id"]) return AnonymizedUsageStatisticsConfig(**data) @post_dump() def filter_implicit(self, data, **kwargs): if not data.get("_explicit_url") and "usage_statistics_url" in data: del data["usage_statistics_url"] if "_explicit_url" in data: del data["_explicit_url"] return data class NotebookTemplateConfig(DictDot): def __init__(self, file_name, template_kwargs=None): self.file_name = file_name if template_kwargs: self.template_kwargs = template_kwargs else: self.template_kwargs = {} class NotebookTemplateConfigSchema(Schema): file_name = fields.String() template_kwargs = fields.Dict( keys=fields.Str(), values=fields.Str(), allow_none=True ) @post_load def make_notebook_template_config(self, data, **kwargs): return NotebookTemplateConfig(**data) class NotebookConfig(DictDot): def __init__( self, class_name, module_name, custom_templates_module, header_markdown=None, footer_markdown=None, table_expectations_header_markdown=None, column_expectations_header_markdown=None, table_expectations_not_found_markdown=None, column_expectations_not_found_markdown=None, authoring_intro_markdown=None, column_expectations_markdown=None, header_code=None, footer_code=None, table_expectation_code=None, column_expectation_code=None, ): self.class_name = class_name self.module_name = module_name self.custom_templates_module = custom_templates_module self.header_markdown = header_markdown self.footer_markdown = footer_markdown self.table_expectations_header_markdown = table_expectations_header_markdown self.column_expectations_header_markdown = column_expectations_header_markdown self.table_expectations_not_found_markdown = ( table_expectations_not_found_markdown ) self.column_expectations_not_found_markdown = ( column_expectations_not_found_markdown ) self.authoring_intro_markdown = authoring_intro_markdown self.column_expectations_markdown = column_expectations_markdown self.header_code = header_code self.footer_code = footer_code self.table_expectation_code = table_expectation_code self.column_expectation_code = column_expectation_code class NotebookConfigSchema(Schema): class_name = fields.String(missing="SuiteEditNotebookRenderer") module_name = fields.String( missing="great_expectations.render.renderer.suite_edit_notebook_renderer" ) custom_templates_module = fields.String() header_markdown = fields.Nested(NotebookTemplateConfigSchema, allow_none=True) footer_markdown = fields.Nested(NotebookTemplateConfigSchema, allow_none=True) table_expectations_header_markdown = fields.Nested( NotebookTemplateConfigSchema, allow_none=True ) column_expectations_header_markdown = fields.Nested( NotebookTemplateConfigSchema, allow_none=True ) table_expectations_not_found_markdown = fields.Nested( NotebookTemplateConfigSchema, allow_none=True ) column_expectations_not_found_markdown = fields.Nested( NotebookTemplateConfigSchema, allow_none=True ) authoring_intro_markdown = fields.Nested( NotebookTemplateConfigSchema, allow_none=True ) column_expectations_markdown = fields.Nested( NotebookTemplateConfigSchema, allow_none=True ) header_code = fields.Nested(NotebookTemplateConfigSchema, allow_none=True) footer_code = fields.Nested(NotebookTemplateConfigSchema, allow_none=True) table_expectation_code = fields.Nested( NotebookTemplateConfigSchema, allow_none=True ) column_expectation_code = fields.Nested( NotebookTemplateConfigSchema, allow_none=True ) @post_load def make_notebook_config(self, data, **kwargs): return NotebookConfig(**data) class NotebooksConfig(DictDot): def __init__(self, suite_edit): self.suite_edit = suite_edit class NotebooksConfigSchema(Schema): suite_edit = fields.Nested(NotebookConfigSchema) @post_load def make_notebooks_config(self, data, **kwargs): return NotebooksConfig(**data) class ConcurrencyConfig(DictDot): def __init__(self, enabled: Optional[bool] = False): self._enabled = enabled @property def enabled(self): return self._enabled @property def max_database_query_concurrency(self) -> int: return 100 def add_sqlalchemy_create_engine_parameters( self, parameters: MutableMapping[str, Any] ): if not self._enabled: return if "pool_size" not in parameters: parameters["pool_size"] = 0 if "max_overflow" not in parameters: parameters["max_overflow"] = -1 class ConcurrencyConfigSchema(Schema): enabled = fields.Boolean(default=False) class GeCloudConfig(DictDot): def __init__(self, base_url: str, account_id: str, access_token: str): self.base_url = base_url self.account_id = account_id self.access_token = access_token def to_json_dict(self): return { "base_url": self.base_url, "account_id": self.account_id, "access_token": self.access_token, } class DataContextConfigSchema(Schema): config_version = fields.Number( validate=lambda x: 0 < x < 100, error_messages={"invalid": "config version must " "be a number."}, ) datasources = fields.Dict( keys=fields.Str(), values=fields.Nested(DatasourceConfigSchema), required=False, allow_none=True, ) expectations_store_name = fields.Str() validations_store_name = fields.Str() evaluation_parameter_store_name = fields.Str() checkpoint_store_name = fields.Str(required=False, allow_none=True) plugins_directory = fields.Str(allow_none=True) validation_operators = fields.Dict( keys=fields.Str(), values=fields.Dict(), required=False, allow_none=True ) stores = fields.Dict(keys=fields.Str(), values=fields.Dict()) notebooks = fields.Nested(NotebooksConfigSchema, allow_none=True) data_docs_sites = fields.Dict( keys=fields.Str(), values=fields.Dict(), allow_none=True ) config_variables_file_path = fields.Str(allow_none=True) anonymous_usage_statistics = fields.Nested(AnonymizedUsageStatisticsConfigSchema) concurrency = fields.Nested(ConcurrencyConfigSchema) def handle_error(self, exc, data, **kwargs): if ( exc and exc.messages and isinstance(exc.messages, dict) and all([key is None for key in exc.messages.keys()]) ): exc.messages = list(itertools.chain.from_iterable(exc.messages.values())) message: str = ( f"Error while processing DataContextConfig: {' '.join(exc.messages)}" ) logger.error(message) raise ge_exceptions.InvalidDataContextConfigError( message=message, ) @validates_schema def validate_schema(self, data, **kwargs): if "config_version" not in data: raise ge_exceptions.InvalidDataContextConfigError( "The key `config_version` is missing; please check your config file.", validation_error=ValidationError(message="no config_version key"), ) if not isinstance(data["config_version"], (int, float)): raise ge_exceptions.InvalidDataContextConfigError( "The key `config_version` must be a number. Please check your config file.", validation_error=ValidationError(message="config version not a number"), ) if data["config_version"] == 0 and any( [ store_config["class_name"] == "ValidationsStore" for store_config in data["stores"].values() ] ): raise ge_exceptions.UnsupportedConfigVersionError( "You appear to be using a config version from the 0.7.x series. This version is no longer supported." ) if data["config_version"] < MINIMUM_SUPPORTED_CONFIG_VERSION: raise ge_exceptions.UnsupportedConfigVersionError( "You appear to have an invalid config version ({}).\n The version number must be at least {}. " "Please see the migration guide at https://docs.greatexpectations.io/en/latest/guides/how_to_guides/migrating_versions.html".format( data["config_version"], MINIMUM_SUPPORTED_CONFIG_VERSION ), ) if data["config_version"] > CURRENT_GE_CONFIG_VERSION: raise ge_exceptions.InvalidDataContextConfigError( "You appear to have an invalid config version ({}).\n The maximum valid version is {}.".format( data["config_version"], CURRENT_GE_CONFIG_VERSION ), validation_error=ValidationError(message="config version too high"), ) if data["config_version"] < CURRENT_GE_CONFIG_VERSION and ( "checkpoint_store_name" in data or any( [ store_config["class_name"] == "CheckpointStore" for store_config in data["stores"].values() ] ) ): raise ge_exceptions.InvalidDataContextConfigError( "You appear to be using a Checkpoint store with an invalid config version ({}).\n Your data context with this older configuration version specifies a Checkpoint store, which is a new feature. Please update your configuration to the new version number {} before adding a Checkpoint store.\n Visit https://docs.greatexpectations.io/en/latest/how_to_guides/migrating_versions.html to learn more about the upgrade process.".format( data["config_version"], float(CURRENT_GE_CONFIG_VERSION) ), validation_error=ValidationError( message="You appear to be using a Checkpoint store with an invalid config version ({}).\n Your data context with this older configuration version specifies a Checkpoint store, which is a new feature. Please update your configuration to the new version number {} before adding a Checkpoint store.\n Visit https://docs.greatexpectations.io/en/latest/how_to_guides/migrating_versions.html to learn more about the upgrade process.".format( data["config_version"], float(CURRENT_GE_CONFIG_VERSION) ) ), ) if ( data["config_version"] >= FIRST_GE_CONFIG_VERSION_WITH_CHECKPOINT_STORE and "validation_operators" in data and data["validation_operators"] is not None ): logger.warning( "You appear to be using a legacy capability with the latest config version ({}).\n Your data context with this configuration version uses validation_operators, which are being deprecated. Please update your configuration to be compatible with the version number {}.".format( data["config_version"], CURRENT_GE_CONFIG_VERSION ), ) class DataContextConfigDefaults(enum.Enum): DEFAULT_CONFIG_VERSION = CURRENT_GE_CONFIG_VERSION DEFAULT_EXPECTATIONS_STORE_NAME = "expectations_store" EXPECTATIONS_BASE_DIRECTORY = "expectations" DEFAULT_EXPECTATIONS_STORE_BASE_DIRECTORY_RELATIVE_NAME = ( f"{EXPECTATIONS_BASE_DIRECTORY}/" ) DEFAULT_VALIDATIONS_STORE_NAME = "validations_store" VALIDATIONS_BASE_DIRECTORY = "validations" DEFAULT_VALIDATIONS_STORE_BASE_DIRECTORY_RELATIVE_NAME = ( f"uncommitted/{VALIDATIONS_BASE_DIRECTORY}/" ) DEFAULT_EVALUATION_PARAMETER_STORE_NAME = "evaluation_parameter_store" DEFAULT_EVALUATION_PARAMETER_STORE_BASE_DIRECTORY_RELATIVE_NAME = ( "evaluation_parameters/" ) DEFAULT_CHECKPOINT_STORE_NAME = "checkpoint_store" CHECKPOINTS_BASE_DIRECTORY = "checkpoints" DEFAULT_CHECKPOINT_STORE_BASE_DIRECTORY_RELATIVE_NAME = ( f"{CHECKPOINTS_BASE_DIRECTORY}/" ) DEFAULT_DATA_DOCS_SITE_NAME = "local_site" DEFAULT_CONFIG_VARIABLES_FILEPATH = "uncommitted/config_variables.yml" PLUGINS_BASE_DIRECTORY = "plugins" DEFAULT_PLUGINS_DIRECTORY = f"{PLUGINS_BASE_DIRECTORY}/" NOTEBOOKS_BASE_DIRECTORY = "notebooks" DEFAULT_VALIDATION_OPERATORS = { "action_list_operator": { "class_name": "ActionListValidationOperator", "action_list": [ { "name": "store_validation_result", "action": {"class_name": "StoreValidationResultAction"}, }, { "name": "store_evaluation_params", "action": {"class_name": "StoreEvaluationParametersAction"}, }, { "name": "update_data_docs", "action": {"class_name": "UpdateDataDocsAction"}, }, ], } } DEFAULT_STORES = { DEFAULT_EXPECTATIONS_STORE_NAME: { "class_name": "ExpectationsStore", "store_backend": { "class_name": "TupleFilesystemStoreBackend", "base_directory": DEFAULT_EXPECTATIONS_STORE_BASE_DIRECTORY_RELATIVE_NAME, }, }, DEFAULT_VALIDATIONS_STORE_NAME: { "class_name": "ValidationsStore", "store_backend": { "class_name": "TupleFilesystemStoreBackend", "base_directory": DEFAULT_VALIDATIONS_STORE_BASE_DIRECTORY_RELATIVE_NAME, }, }, DEFAULT_EVALUATION_PARAMETER_STORE_NAME: { "class_name": "EvaluationParameterStore" }, DEFAULT_CHECKPOINT_STORE_NAME: { "class_name": "CheckpointStore", "store_backend": { "class_name": "TupleFilesystemStoreBackend", "suppress_store_backend_id": True, "base_directory": DEFAULT_CHECKPOINT_STORE_BASE_DIRECTORY_RELATIVE_NAME, }, }, } DEFAULT_DATA_DOCS_SITES = { DEFAULT_DATA_DOCS_SITE_NAME: { "class_name": "SiteBuilder", "show_how_to_buttons": True, "store_backend": { "class_name": "TupleFilesystemStoreBackend", "base_directory": "uncommitted/data_docs/local_site/", }, "site_index_builder": { "class_name": "DefaultSiteIndexBuilder", }, } } class CheckpointConfigDefaults(enum.Enum): DEFAULT_CONFIG_VERSION = CURRENT_CHECKPOINT_CONFIG_VERSION class BaseStoreBackendDefaults(DictDot): def __init__( self, expectations_store_name: str = DataContextConfigDefaults.DEFAULT_EXPECTATIONS_STORE_NAME.value, validations_store_name: str = DataContextConfigDefaults.DEFAULT_VALIDATIONS_STORE_NAME.value, evaluation_parameter_store_name: str = DataContextConfigDefaults.DEFAULT_EVALUATION_PARAMETER_STORE_NAME.value, checkpoint_store_name: str = DataContextConfigDefaults.DEFAULT_CHECKPOINT_STORE_NAME.value, data_docs_site_name: str = DataContextConfigDefaults.DEFAULT_DATA_DOCS_SITE_NAME.value, validation_operators: dict = None, stores: dict = None, data_docs_sites: dict = None, ): self.expectations_store_name = expectations_store_name self.validations_store_name = validations_store_name self.evaluation_parameter_store_name = evaluation_parameter_store_name self.checkpoint_store_name = checkpoint_store_name self.validation_operators = validation_operators if stores is None: stores = deepcopy(DataContextConfigDefaults.DEFAULT_STORES.value) self.stores = stores if data_docs_sites is None: data_docs_sites = deepcopy( DataContextConfigDefaults.DEFAULT_DATA_DOCS_SITES.value ) self.data_docs_sites = data_docs_sites self.data_docs_site_name = data_docs_site_name class S3StoreBackendDefaults(BaseStoreBackendDefaults): def __init__( self, default_bucket_name: Optional[str] = None, expectations_store_bucket_name: Optional[str] = None, validations_store_bucket_name: Optional[str] = None, data_docs_bucket_name: Optional[str] = None, checkpoint_store_bucket_name: Optional[str] = None, expectations_store_prefix: str = "expectations", validations_store_prefix: str = "validations", data_docs_prefix: str = "data_docs", checkpoint_store_prefix: str = "checkpoints", expectations_store_name: str = "expectations_S3_store", validations_store_name: str = "validations_S3_store", evaluation_parameter_store_name: str = "evaluation_parameter_store", checkpoint_store_name: str = "checkpoint_S3_store", ): super().__init__() if expectations_store_bucket_name is None: expectations_store_bucket_name = default_bucket_name if validations_store_bucket_name is None: validations_store_bucket_name = default_bucket_name if data_docs_bucket_name is None: data_docs_bucket_name = default_bucket_name if checkpoint_store_bucket_name is None: checkpoint_store_bucket_name = default_bucket_name self.expectations_store_name = expectations_store_name self.validations_store_name = validations_store_name self.evaluation_parameter_store_name = evaluation_parameter_store_name self.checkpoint_store_name = checkpoint_store_name self.stores = { expectations_store_name: { "class_name": "ExpectationsStore", "store_backend": { "class_name": "TupleS3StoreBackend", "bucket": expectations_store_bucket_name, "prefix": expectations_store_prefix, }, }, validations_store_name: { "class_name": "ValidationsStore", "store_backend": { "class_name": "TupleS3StoreBackend", "bucket": validations_store_bucket_name, "prefix": validations_store_prefix, }, }, evaluation_parameter_store_name: {"class_name": "EvaluationParameterStore"}, checkpoint_store_name: { "class_name": "CheckpointStore", "store_backend": { "class_name": "TupleS3StoreBackend", "bucket": checkpoint_store_bucket_name, "prefix": checkpoint_store_prefix, }, }, } self.data_docs_sites = { "s3_site": { "class_name": "SiteBuilder", "show_how_to_buttons": True, "store_backend": { "class_name": "TupleS3StoreBackend", "bucket": data_docs_bucket_name, "prefix": data_docs_prefix, }, "site_index_builder": { "class_name": "DefaultSiteIndexBuilder", }, } } class FilesystemStoreBackendDefaults(BaseStoreBackendDefaults): def __init__( self, root_directory: Optional[str] = None, plugins_directory: Optional[str] = None, ): super().__init__() if plugins_directory is None: plugins_directory = ( DataContextConfigDefaults.DEFAULT_PLUGINS_DIRECTORY.value ) self.plugins_directory = plugins_directory if root_directory is not None: self.stores[self.expectations_store_name]["store_backend"][ "root_directory" ] = root_directory self.stores[self.validations_store_name]["store_backend"][ "root_directory" ] = root_directory self.stores[self.checkpoint_store_name]["store_backend"][ "root_directory" ] = root_directory self.data_docs_sites[self.data_docs_site_name]["store_backend"][ "root_directory" ] = root_directory class InMemoryStoreBackendDefaults(BaseStoreBackendDefaults): def __init__( self, ): super().__init__() self.stores = { self.expectations_store_name: { "class_name": "ExpectationsStore", "store_backend": { "class_name": "InMemoryStoreBackend", }, }, self.validations_store_name: { "class_name": "ValidationsStore", "store_backend": { "class_name": "InMemoryStoreBackend", }, }, self.evaluation_parameter_store_name: { "class_name": "EvaluationParameterStore" }, self.checkpoint_store_name: { "class_name": "CheckpointStore", "store_backend": { "class_name": "InMemoryStoreBackend", }, }, } self.data_docs_sites = {} class GCSStoreBackendDefaults(BaseStoreBackendDefaults): def __init__( self, default_bucket_name: Optional[str] = None, default_project_name: Optional[str] = None, expectations_store_bucket_name: Optional[str] = None, validations_store_bucket_name: Optional[str] = None, data_docs_bucket_name: Optional[str] = None, checkpoint_store_bucket_name: Optional[str] = None, expectations_store_project_name: Optional[str] = None, validations_store_project_name: Optional[str] = None, data_docs_project_name: Optional[str] = None, checkpoint_store_project_name: Optional[str] = None, expectations_store_prefix: str = "expectations", validations_store_prefix: str = "validations", data_docs_prefix: str = "data_docs", checkpoint_store_prefix: str = "checkpoints", expectations_store_name: str = "expectations_GCS_store", validations_store_name: str = "validations_GCS_store", evaluation_parameter_store_name: str = "evaluation_parameter_store", checkpoint_store_name: str = "checkpoint_GCS_store", ): super().__init__() if expectations_store_bucket_name is None: expectations_store_bucket_name = default_bucket_name if validations_store_bucket_name is None: validations_store_bucket_name = default_bucket_name if data_docs_bucket_name is None: data_docs_bucket_name = default_bucket_name if checkpoint_store_bucket_name is None: checkpoint_store_bucket_name = default_bucket_name if expectations_store_project_name is None: expectations_store_project_name = default_project_name if validations_store_project_name is None: validations_store_project_name = default_project_name if data_docs_project_name is None: data_docs_project_name = default_project_name if checkpoint_store_project_name is None: checkpoint_store_project_name = default_project_name self.expectations_store_name = expectations_store_name self.validations_store_name = validations_store_name self.evaluation_parameter_store_name = evaluation_parameter_store_name self.checkpoint_store_name = checkpoint_store_name self.stores = { expectations_store_name: { "class_name": "ExpectationsStore", "store_backend": { "class_name": "TupleGCSStoreBackend", "project": expectations_store_project_name, "bucket": expectations_store_bucket_name, "prefix": expectations_store_prefix, }, }, validations_store_name: { "class_name": "ValidationsStore", "store_backend": { "class_name": "TupleGCSStoreBackend", "project": validations_store_project_name, "bucket": validations_store_bucket_name, "prefix": validations_store_prefix, }, }, evaluation_parameter_store_name: {"class_name": "EvaluationParameterStore"}, checkpoint_store_name: { "class_name": "CheckpointStore", "store_backend": { "class_name": "TupleGCSStoreBackend", "project": checkpoint_store_project_name, "bucket": checkpoint_store_bucket_name, "prefix": checkpoint_store_prefix, }, }, } self.data_docs_sites = { "gcs_site": { "class_name": "SiteBuilder", "show_how_to_buttons": True, "store_backend": { "class_name": "TupleGCSStoreBackend", "project": data_docs_project_name, "bucket": data_docs_bucket_name, "prefix": data_docs_prefix, }, "site_index_builder": { "class_name": "DefaultSiteIndexBuilder", }, } } class DatabaseStoreBackendDefaults(BaseStoreBackendDefaults): def __init__( self, default_credentials: Optional[Dict] = None, expectations_store_credentials: Optional[Dict] = None, validations_store_credentials: Optional[Dict] = None, checkpoint_store_credentials: Optional[Dict] = None, expectations_store_name: str = "expectations_database_store", validations_store_name: str = "validations_database_store", evaluation_parameter_store_name: str = "evaluation_parameter_store", checkpoint_store_name: str = "checkpoint_database_store", ): super().__init__() if expectations_store_credentials is None: expectations_store_credentials = default_credentials if validations_store_credentials is None: validations_store_credentials = default_credentials if checkpoint_store_credentials is None: checkpoint_store_credentials = default_credentials self.expectations_store_name = expectations_store_name self.validations_store_name = validations_store_name self.evaluation_parameter_store_name = evaluation_parameter_store_name self.checkpoint_store_name = checkpoint_store_name self.stores = { expectations_store_name: { "class_name": "ExpectationsStore", "store_backend": { "class_name": "DatabaseStoreBackend", "credentials": expectations_store_credentials, }, }, validations_store_name: { "class_name": "ValidationsStore", "store_backend": { "class_name": "DatabaseStoreBackend", "credentials": validations_store_credentials, }, }, evaluation_parameter_store_name: {"class_name": "EvaluationParameterStore"}, checkpoint_store_name: { "class_name": "CheckpointStore", "store_backend": { "class_name": "DatabaseStoreBackend", "credentials": checkpoint_store_credentials, }, }, } class DataContextConfig(BaseYamlConfig): def __init__( self, config_version: Optional[float] = None, datasources: Optional[ Union[ Dict[str, DatasourceConfig], Dict[str, Dict[str, Union[Dict[str, str], str, dict]]], ] ] = None, expectations_store_name: Optional[str] = None, validations_store_name: Optional[str] = None, evaluation_parameter_store_name: Optional[str] = None, checkpoint_store_name: Optional[str] = None, plugins_directory: Optional[str] = None, validation_operators=None, stores: Optional[Dict] = None, data_docs_sites: Optional[Dict] = None, notebooks=None, config_variables_file_path: Optional[str] = None, anonymous_usage_statistics=None, store_backend_defaults: Optional[BaseStoreBackendDefaults] = None, commented_map: Optional[CommentedMap] = None, concurrency: Optional[Union[ConcurrencyConfig, Dict]] = None, ): if config_version is None: config_version = DataContextConfigDefaults.DEFAULT_CONFIG_VERSION.value if store_backend_defaults is not None: if stores is None: stores = store_backend_defaults.stores if expectations_store_name is None: expectations_store_name = store_backend_defaults.expectations_store_name if validations_store_name is None: validations_store_name = store_backend_defaults.validations_store_name if evaluation_parameter_store_name is None: evaluation_parameter_store_name = ( store_backend_defaults.evaluation_parameter_store_name ) if data_docs_sites is None: data_docs_sites = store_backend_defaults.data_docs_sites if checkpoint_store_name is None: checkpoint_store_name = store_backend_defaults.checkpoint_store_name self._config_version = config_version if datasources is None: datasources = {} self.datasources = datasources self.expectations_store_name = expectations_store_name self.validations_store_name = validations_store_name self.evaluation_parameter_store_name = evaluation_parameter_store_name if checkpoint_store_name is not None: self.checkpoint_store_name = checkpoint_store_name self.plugins_directory = plugins_directory if validation_operators is not None: self.validation_operators = validation_operators self.stores = stores self.notebooks = notebooks self.data_docs_sites = data_docs_sites self.config_variables_file_path = config_variables_file_path if anonymous_usage_statistics is None: anonymous_usage_statistics = AnonymizedUsageStatisticsConfig() elif isinstance(anonymous_usage_statistics, dict): anonymous_usage_statistics = AnonymizedUsageStatisticsConfig( **anonymous_usage_statistics ) self.anonymous_usage_statistics = anonymous_usage_statistics if concurrency is None: concurrency = ConcurrencyConfig() elif isinstance(concurrency, dict): concurrency = ConcurrencyConfig(**concurrency) self.concurrency: ConcurrencyConfig = concurrency super().__init__(commented_map=commented_map) @classmethod def get_config_class(cls): return cls @classmethod def get_schema_class(cls): return DataContextConfigSchema @property def config_version(self): return self._config_version class CheckpointConfigSchema(Schema): class Meta: unknown = INCLUDE fields = ( "name", "config_version", "template_name", "module_name", "class_name", "run_name_template", "expectation_suite_name", "batch_request", "action_list", "evaluation_parameters", "runtime_configuration", "validations", "profilers", "validation_operator_name", "batches", "site_names", "slack_webhook", "notify_on", "notify_with", "ge_cloud_id", "expectation_suite_ge_cloud_id", ) ordered = True REMOVE_KEYS_IF_NONE = [ "site_names", "slack_webhook", "notify_on", "notify_with", ] ge_cloud_id = fields.UUID(required=False, allow_none=True) name = fields.String(required=False, allow_none=True) config_version = fields.Number( validate=lambda x: (0 < x < 100) or x is None, error_messages={"invalid": "config version must " "be a number or None."}, required=False, allow_none=True, ) template_name = fields.String(required=False, allow_none=True) module_name = fields.String(required=False, missing="great_expectations.checkpoint") class_name = fields.Str(required=False, allow_none=True) run_name_template = fields.String(required=False, allow_none=True) expectation_suite_name = fields.String(required=False, allow_none=True) expectation_suite_ge_cloud_id = fields.UUID(required=False, allow_none=True) batch_request = fields.Dict(required=False, allow_none=True) action_list = fields.List( cls_or_instance=fields.Dict(), required=False, allow_none=True ) evaluation_parameters = fields.Dict(required=False, allow_none=True) runtime_configuration = fields.Dict(required=False, allow_none=True) validations = fields.List( cls_or_instance=fields.Dict(), required=False, allow_none=True ) profilers = fields.List( cls_or_instance=fields.Dict(), required=False, allow_none=True ) validation_operator_name = fields.Str(required=False, allow_none=True) batches = fields.List( cls_or_instance=fields.Dict( keys=fields.Str( validate=OneOf(["batch_kwargs", "expectation_suite_names"]), required=False, allow_none=True, ) ), required=False, allow_none=True, ) site_names = fields.Raw(required=False, allow_none=True) slack_webhook = fields.String(required=False, allow_none=True) notify_on = fields.String(required=False, allow_none=True) notify_with = fields.String(required=False, allow_none=True) @validates_schema def validate_schema(self, data, **kwargs): if not ( "name" in data or "validation_operator_name" in data or "batches" in data ): raise ge_exceptions.InvalidConfigError( f"""Your current Checkpoint configuration is incomplete. Please update your Checkpoint configuration to continue. """ ) if data.get("config_version"): if "name" not in data: raise ge_exceptions.InvalidConfigError( f"""Your Checkpoint configuration requires the "name" field. Please update your current Checkpoint configuration to continue. """ ) @post_dump def remove_keys_if_none(self, data, **kwargs): data = deepcopy(data) for key in self.REMOVE_KEYS_IF_NONE: if key in data and data[key] is None: data.pop(key) return data class CheckpointConfig(BaseYamlConfig): def __init__( self, name: Optional[str] = None, config_version: Optional[Union[int, float]] = None, template_name: Optional[str] = None, module_name: Optional[str] = None, class_name: Optional[str] = None, run_name_template: Optional[str] = None, expectation_suite_name: Optional[str] = None, batch_request: Optional[dict] = None, action_list: Optional[List[dict]] = None, evaluation_parameters: Optional[dict] = None, runtime_configuration: Optional[dict] = None, validations: Optional[List[dict]] = None, profilers: Optional[List[dict]] = None, validation_operator_name: Optional[str] = None, batches: Optional[List[dict]] = None, commented_map: Optional[CommentedMap] = None, ge_cloud_id: Optional[str] = None, site_names: Optional[Union[list, str]] = None, slack_webhook: Optional[str] = None, notify_on: Optional[str] = None, notify_with: Optional[str] = None, expectation_suite_ge_cloud_id: Optional[str] = None, ): self._name = name self._config_version = config_version if self.config_version is None: class_name = class_name or "LegacyCheckpoint" self.validation_operator_name = validation_operator_name if batches is not None and isinstance(batches, list): self.batches = batches else: class_name = class_name or "Checkpoint" self._template_name = template_name self._run_name_template = run_name_template self._expectation_suite_name = expectation_suite_name self._expectation_suite_ge_cloud_id = expectation_suite_ge_cloud_id self._batch_request = batch_request self._action_list = action_list or [] self._evaluation_parameters = evaluation_parameters or {} self._runtime_configuration = runtime_configuration or {} self._validations = validations or [] self._profilers = profilers or [] self._ge_cloud_id = ge_cloud_id self._site_names = site_names self._slack_webhook = slack_webhook self._notify_on = notify_on self._notify_with = notify_with self._module_name = module_name or "great_expectations.checkpoint" self._class_name = class_name super().__init__(commented_map=commented_map) def update( self, other_config: Optional["CheckpointConfig"] = None, runtime_kwargs: Optional[dict] = None, ): assert other_config is not None or runtime_kwargs is not None, ( "other_config and runtime_kwargs cannot both " "be None" ) if other_config is not None: if other_config.name is not None: self.name = other_config.name if other_config.module_name is not None: self.module_name = other_config.module_name if other_config.class_name is not None: self.class_name = other_config.class_name if other_config.run_name_template is not None: self.run_name_template = other_config.run_name_template if other_config.expectation_suite_name is not None: self.expectation_suite_name = other_config.expectation_suite_name if other_config.expectation_suite_ge_cloud_id is not None: self.expectation_suite_ge_cloud_id = ( other_config.expectation_suite_ge_cloud_id ) if other_config.batch_request is not None: if self.batch_request is None: batch_request = {} else: batch_request = self.batch_request other_batch_request = other_config.batch_request updated_batch_request = nested_update( batch_request, other_batch_request, ) self._batch_request = updated_batch_request if other_config.action_list is not None: self.action_list = self.get_updated_action_list( base_action_list=self.action_list, other_action_list=other_config.action_list, ) if other_config.evaluation_parameters is not None: nested_update( self.evaluation_parameters, other_config.evaluation_parameters, ) if other_config.runtime_configuration is not None: nested_update( self.runtime_configuration, other_config.runtime_configuration, ) if other_config.validations is not None: self.validations.extend( filter( lambda v: v not in self.validations, other_config.validations ) ) if other_config.profilers is not None: self.profilers.extend(other_config.profilers) if runtime_kwargs is not None and any(runtime_kwargs.values()): if runtime_kwargs.get("run_name_template") is not None: self.run_name_template = runtime_kwargs.get("run_name_template") if runtime_kwargs.get("expectation_suite_name") is not None: self.expectation_suite_name = runtime_kwargs.get( "expectation_suite_name" ) if runtime_kwargs.get("expectation_suite_ge_cloud_id") is not None: self.expectation_suite_ge_cloud_id = runtime_kwargs.get( "expectation_suite_ge_cloud_id" ) if runtime_kwargs.get("batch_request") is not None: batch_request = self.batch_request batch_request = batch_request or {} runtime_batch_request = runtime_kwargs.get("batch_request") batch_request = nested_update(batch_request, runtime_batch_request) self._batch_request = batch_request if runtime_kwargs.get("action_list") is not None: self.action_list = self.get_updated_action_list( base_action_list=self.action_list, other_action_list=runtime_kwargs.get("action_list"), ) if runtime_kwargs.get("evaluation_parameters") is not None: nested_update( self.evaluation_parameters, runtime_kwargs.get("evaluation_parameters"), ) if runtime_kwargs.get("runtime_configuration") is not None: nested_update( self.runtime_configuration, runtime_kwargs.get("runtime_configuration"), ) if runtime_kwargs.get("validations") is not None: self.validations.extend( filter( lambda v: v not in self.validations, runtime_kwargs.get("validations"), ) ) if runtime_kwargs.get("profilers") is not None: self.profilers.extend(runtime_kwargs.get("profilers")) @classmethod def get_config_class(cls): return cls @classmethod def get_schema_class(cls): return CheckpointConfigSchema @property def ge_cloud_id(self): return self._ge_cloud_id @ge_cloud_id.setter def ge_cloud_id(self, value: str): self._ge_cloud_id = value @property def expectation_suite_ge_cloud_id(self): return self._expectation_suite_ge_cloud_id @expectation_suite_ge_cloud_id.setter def expectation_suite_ge_cloud_id(self, value: str): self._expectation_suite_ge_cloud_id = value @property def name(self): return self._name @name.setter def name(self, value: str): self._name = value @property def template_name(self): return self._template_name @template_name.setter def template_name(self, value: str): self._template_name = value @property def config_version(self): return self._config_version @property def validations(self): return self._validations @property def profilers(self): return self._profilers @property def module_name(self): return self._module_name @module_name.setter def module_name(self, value: str): self._module_name = value @property def class_name(self): return self._class_name @class_name.setter def class_name(self, value: str): self._class_name = value @property def run_name_template(self): return self._run_name_template @run_name_template.setter def run_name_template(self, value: str): self._run_name_template = value @property def batch_request(self): return self._batch_request @batch_request.setter def batch_request(self, value: dict): self._batch_request = value @property def expectation_suite_name(self): return self._expectation_suite_name @expectation_suite_name.setter def expectation_suite_name(self, value: str): self._expectation_suite_name = value @property def action_list(self): return self._action_list @action_list.setter def action_list(self, value: List[dict]): self._action_list = value @property def site_names(self): return self._site_names @property def slack_webhook(self): return self._slack_webhook @property def notify_on(self): return self._notify_on @property def notify_with(self): return self._notify_with @classmethod def get_updated_action_list( cls, base_action_list: list, other_action_list: list, ) -> List[dict]: base_action_list_dict = {action["name"]: action for action in base_action_list} for other_action in other_action_list: other_action_name = other_action["name"] if other_action_name in base_action_list_dict: if other_action["action"] is None: base_action_list_dict.pop(other_action_name) else: nested_update( base_action_list_dict[other_action_name], other_action, dedup=True, ) else: base_action_list_dict[other_action_name] = other_action return list(base_action_list_dict.values()) @property def evaluation_parameters(self): return self._evaluation_parameters @property def runtime_configuration(self): return self._runtime_configuration class CheckpointValidationConfig(DictDot): pass class CheckpointValidationConfigSchema(Schema): pass dataContextConfigSchema = DataContextConfigSchema() datasourceConfigSchema = DatasourceConfigSchema() dataConnectorConfigSchema = DataConnectorConfigSchema() assetConfigSchema = AssetConfigSchema() sorterConfigSchema = SorterConfigSchema() anonymizedUsageStatisticsSchema = AnonymizedUsageStatisticsConfigSchema() notebookConfigSchema = NotebookConfigSchema() checkpointConfigSchema = CheckpointConfigSchema() concurrencyConfigSchema = ConcurrencyConfigSchema()
true
true
f70131e5c3a3a3b91e5f27031da8d1afb2dc99b3
545
py
Python
source/openwarpgui/openwarp/__init__.py
NREL/OpenWARP
ca49c4cbde17e0cead69bd9e55a81d5c0fafe4df
[ "Apache-2.0" ]
22
2015-06-22T07:35:04.000Z
2021-07-23T05:10:09.000Z
source/openwarpgui/openwarp/__init__.py
yuyihsiang/OpenWARP
d3f8c9c377e5dbe4f838888667f37b975b924c7c
[ "Apache-2.0" ]
9
2015-07-30T20:01:35.000Z
2020-08-28T17:29:18.000Z
source/openwarpgui/openwarp/__init__.py
NREL/OpenWARP
ca49c4cbde17e0cead69bd9e55a81d5c0fafe4df
[ "Apache-2.0" ]
13
2016-04-01T07:45:27.000Z
2021-04-06T08:33:33.000Z
# -*- coding: utf-8 -*- """ This is initialization module of openwarp. """ __author__ = "caoweiquan322" __copyright__ = "Copyright (C) 2014 TopCoder Inc. All rights reserved." __version__ = "1.0" #import logging # Create a base logger for the whole module. #logger = logging.getLogger(__name__) #logger.setLevel(logging.DEBUG) #ch = logging.StreamHandler() #ch.setLevel(logging.DEBUG) #formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') #ch.setFormatter(formatter) #logger.addHandler(ch)
27.25
87
0.699083
__author__ = "caoweiquan322" __copyright__ = "Copyright (C) 2014 TopCoder Inc. All rights reserved." __version__ = "1.0"
true
true
f70131fe3a88f9e91df9791720516f24afa634b9
69
py
Python
vizualize.py
Heyjuke58/frcnn-wind-turbine-detection
29311020188d3a26c8935cae710bd2c5013653ab
[ "Apache-2.0" ]
null
null
null
vizualize.py
Heyjuke58/frcnn-wind-turbine-detection
29311020188d3a26c8935cae710bd2c5013653ab
[ "Apache-2.0" ]
null
null
null
vizualize.py
Heyjuke58/frcnn-wind-turbine-detection
29311020188d3a26c8935cae710bd2c5013653ab
[ "Apache-2.0" ]
null
null
null
# https://github.com/albermax/innvestigate # http://heatmapping.org/
34.5
43
0.753623
true
true
f70133067d6e6cc538d38dd7974efe692aa5fd65
1,920
py
Python
phase2_recognition_only/architectures/vgg.py
Manish-rai21bit/deep_learning_for_camera_trap_images
f9d9fd50824ece4743b39d5136f67235871cc0ef
[ "MIT" ]
85
2018-03-12T16:55:18.000Z
2022-03-29T12:41:46.000Z
phase2_recognition_only/architectures/vgg.py
Manish-rai21bit/deep_learning_for_camera_trap_images
f9d9fd50824ece4743b39d5136f67235871cc0ef
[ "MIT" ]
11
2018-06-11T09:29:46.000Z
2022-01-16T20:18:19.000Z
phase2_recognition_only/architectures/vgg.py
Manish-rai21bit/deep_learning_for_camera_trap_images
f9d9fd50824ece4743b39d5136f67235871cc0ef
[ "MIT" ]
46
2018-05-26T02:38:22.000Z
2022-03-20T17:37:35.000Z
import tensorflow as tf import common def inference(x, num_output, wd, dropout_rate, is_training, transfer_mode= False, model_type= 'A'): # Create tables describing VGG configurations A, B, D, E if model_type == 'A': config = [64, 'M', 128, 'M', 256, 256, 'M', 512, 512, 'M', 512, 512, 'M'] elif model_type == 'B': config = [64, 64, 'M', 128, 128, 'M', 256, 256, 'M', 512, 512, 'M', 512, 512, 'M'] elif model_type == 'D': config = [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 'M', 512, 512, 512, 'M', 512, 512, 512, 'M'] elif model_type == 'E': config = [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 256, 'M', 512, 512, 512, 512, 'M', 512, 512, 512, 512, 'M'] else: print('Unknown model type: ' + model_type + ' | Please specify a modelType A or B or D or E') network= x for k,v in enumerate(config): if v == 'M': network= common.maxPool(network, 2, 2) else: with tf.variable_scope('conv'+str(k)): network = common.spatialConvolution(network, 3, 1, v, wd= wd) network = tf.nn.relu(network) network= common.flatten(network) with tf.variable_scope('fc1'): network = common.fullyConnected(network, 4096, wd= wd) network = tf.nn.relu(network) network = common.batchNormalization(network, is_training= is_training) network = tf.nn.dropout(network, dropout_rate) with tf.variable_scope('fc2'): network = common.fullyConnected(network, 4096, wd= wd) network = tf.nn.relu(network) network = common.batchNormalization(network, is_training= is_training) network = tf.nn.dropout(network, dropout_rate) if not transfer_mode: with tf.variable_scope('output'): network = common.fullyConnected(network, num_output, wd= wd) else: with tf.variable_scope('transfer_output'): network = common.fullyConnected(network, num_output, wd= wd) return network
40.851064
118
0.627604
import tensorflow as tf import common def inference(x, num_output, wd, dropout_rate, is_training, transfer_mode= False, model_type= 'A'): if model_type == 'A': config = [64, 'M', 128, 'M', 256, 256, 'M', 512, 512, 'M', 512, 512, 'M'] elif model_type == 'B': config = [64, 64, 'M', 128, 128, 'M', 256, 256, 'M', 512, 512, 'M', 512, 512, 'M'] elif model_type == 'D': config = [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 'M', 512, 512, 512, 'M', 512, 512, 512, 'M'] elif model_type == 'E': config = [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 256, 'M', 512, 512, 512, 512, 'M', 512, 512, 512, 512, 'M'] else: print('Unknown model type: ' + model_type + ' | Please specify a modelType A or B or D or E') network= x for k,v in enumerate(config): if v == 'M': network= common.maxPool(network, 2, 2) else: with tf.variable_scope('conv'+str(k)): network = common.spatialConvolution(network, 3, 1, v, wd= wd) network = tf.nn.relu(network) network= common.flatten(network) with tf.variable_scope('fc1'): network = common.fullyConnected(network, 4096, wd= wd) network = tf.nn.relu(network) network = common.batchNormalization(network, is_training= is_training) network = tf.nn.dropout(network, dropout_rate) with tf.variable_scope('fc2'): network = common.fullyConnected(network, 4096, wd= wd) network = tf.nn.relu(network) network = common.batchNormalization(network, is_training= is_training) network = tf.nn.dropout(network, dropout_rate) if not transfer_mode: with tf.variable_scope('output'): network = common.fullyConnected(network, num_output, wd= wd) else: with tf.variable_scope('transfer_output'): network = common.fullyConnected(network, num_output, wd= wd) return network
true
true
f701331f418b271808b257dda1cf537ba3ca9082
1,887
py
Python
utils/mix-mono-wav-scp.py
texpomru13/espnet
7ef005e832e2fb033f356c16f54e0f08762fb4b0
[ "Apache-2.0" ]
5,053
2017-12-13T06:21:41.000Z
2022-03-31T13:38:29.000Z
utils/mix-mono-wav-scp.py
texpomru13/espnet
7ef005e832e2fb033f356c16f54e0f08762fb4b0
[ "Apache-2.0" ]
3,666
2017-12-14T05:58:50.000Z
2022-03-31T22:11:49.000Z
utils/mix-mono-wav-scp.py
texpomru13/espnet
7ef005e832e2fb033f356c16f54e0f08762fb4b0
[ "Apache-2.0" ]
1,709
2017-12-13T01:02:42.000Z
2022-03-31T11:57:45.000Z
#!/usr/bin/env python3 import argparse import io import sys PY2 = sys.version_info[0] == 2 if PY2: from itertools import izip_longest as zip_longest else: from itertools import zip_longest def get_parser(): parser = argparse.ArgumentParser( formatter_class=argparse.ArgumentDefaultsHelpFormatter, description="Mixing wav.scp files into a multi-channel wav.scp " "using sox.", ) parser.add_argument("scp", type=str, nargs="+", help="Give wav.scp") parser.add_argument( "out", nargs="?", type=argparse.FileType("w"), default=sys.stdout, help="The output filename. " "If omitted, then output to sys.stdout", ) return parser def main(): parser = get_parser() args = parser.parse_args() fscps = [io.open(scp, "r", encoding="utf-8") for scp in args.scp] for linenum, lines in enumerate(zip_longest(*fscps)): keys = [] wavs = [] for line, scp in zip(lines, args.scp): if line is None: raise RuntimeError("Numbers of line mismatch") sps = line.split(" ", 1) if len(sps) != 2: raise RuntimeError( 'Invalid line is found: {}, line {}: "{}" '.format( scp, linenum, line ) ) key, wav = sps keys.append(key) wavs.append(wav.strip()) if not all(k == keys[0] for k in keys): raise RuntimeError( "The ids mismatch. Hint; the input files must be " "sorted and must have same ids: {}".format(keys) ) args.out.write( "{} sox -M {} -c {} -t wav - |\n".format( keys[0], " ".join("{}".format(w) for w in wavs), len(fscps) ) ) if __name__ == "__main__": main()
27.347826
86
0.532061
import argparse import io import sys PY2 = sys.version_info[0] == 2 if PY2: from itertools import izip_longest as zip_longest else: from itertools import zip_longest def get_parser(): parser = argparse.ArgumentParser( formatter_class=argparse.ArgumentDefaultsHelpFormatter, description="Mixing wav.scp files into a multi-channel wav.scp " "using sox.", ) parser.add_argument("scp", type=str, nargs="+", help="Give wav.scp") parser.add_argument( "out", nargs="?", type=argparse.FileType("w"), default=sys.stdout, help="The output filename. " "If omitted, then output to sys.stdout", ) return parser def main(): parser = get_parser() args = parser.parse_args() fscps = [io.open(scp, "r", encoding="utf-8") for scp in args.scp] for linenum, lines in enumerate(zip_longest(*fscps)): keys = [] wavs = [] for line, scp in zip(lines, args.scp): if line is None: raise RuntimeError("Numbers of line mismatch") sps = line.split(" ", 1) if len(sps) != 2: raise RuntimeError( 'Invalid line is found: {}, line {}: "{}" '.format( scp, linenum, line ) ) key, wav = sps keys.append(key) wavs.append(wav.strip()) if not all(k == keys[0] for k in keys): raise RuntimeError( "The ids mismatch. Hint; the input files must be " "sorted and must have same ids: {}".format(keys) ) args.out.write( "{} sox -M {} -c {} -t wav - |\n".format( keys[0], " ".join("{}".format(w) for w in wavs), len(fscps) ) ) if __name__ == "__main__": main()
true
true
f70133a40efab14cc0b7890e3dea48a3baaccc21
728
py
Python
kmeans/fish.py
innovator-zero/Python
f776eb081c6688c2f5a98b0050b33582c1769391
[ "Apache-2.0" ]
null
null
null
kmeans/fish.py
innovator-zero/Python
f776eb081c6688c2f5a98b0050b33582c1769391
[ "Apache-2.0" ]
3
2020-03-09T03:40:53.000Z
2020-11-12T15:36:03.000Z
kmeans/fish.py
innovator-zero/Python
f776eb081c6688c2f5a98b0050b33582c1769391
[ "Apache-2.0" ]
null
null
null
import numpy as np import random import matplotlib.pyplot as plt points=np.loadtxt('points.txt') herring_r = np.loadtxt('distribution.txt') herring=np.zeros((802,350)) for i in range(350): for j in range(802): herring[j,349-i]=herring_r[i,j] # s=np.zeros(10) # # for i in range(10): # x=int(round(points[i,0]))-1 # y=int(round(points[i,1])) # # for xx in range(x-11,x+12): # for yy in range(y-11,y+12): # if herring[xx,yy]>0: # s[i]+=herring[xx,yy] # # f = open('fish_count.txt', 'w') # for i in range(10): # f.write(str(s[i])+'\n') # f.close() s=0 for i in range(802): for j in range(350): if herring[i,j]>0: s+=herring[i,j] print(s)
20.222222
42
0.56456
import numpy as np import random import matplotlib.pyplot as plt points=np.loadtxt('points.txt') herring_r = np.loadtxt('distribution.txt') herring=np.zeros((802,350)) for i in range(350): for j in range(802): herring[j,349-i]=herring_r[i,j] s=0 for i in range(802): for j in range(350): if herring[i,j]>0: s+=herring[i,j] print(s)
true
true
f7013521dadc939389f624dadd24f24188547bb7
1,702
py
Python
general/chainerrl/baselines/branched_action_values.py
marioyc/baselines
587e66cbc409dcb7b6def0c2f1c444a548375caa
[ "MIT" ]
127
2019-08-06T13:56:53.000Z
2022-02-21T08:48:15.000Z
general/chainerrl/baselines/branched_action_values.py
marioyc/baselines
587e66cbc409dcb7b6def0c2f1c444a548375caa
[ "MIT" ]
12
2019-08-03T04:40:33.000Z
2020-08-31T06:37:31.000Z
general/chainerrl/baselines/branched_action_values.py
marioyc/baselines
587e66cbc409dcb7b6def0c2f1c444a548375caa
[ "MIT" ]
33
2019-08-06T05:06:07.000Z
2022-01-20T18:27:14.000Z
from __future__ import unicode_literals from __future__ import print_function from __future__ import division from __future__ import absolute_import from builtins import * # NOQA from future import standard_library standard_library.install_aliases() # NOQA from cached_property import cached_property from chainer import functions as F from chainerrl.action_value import ActionValue class BranchedActionValue(ActionValue): """Q-function output for a branched action space. Args: branches (list): Each element of the list is a Q-function for an action dimension """ def __init__(self, branches, q_values_formatter=lambda x: x): self.branches = branches self.q_values_formatter = q_values_formatter @cached_property def greedy_actions(self): actions = [] for branch in self.branches: actions.append(branch.q_values.array.argmax(axis=1).reshape(-1, 1)) return F.hstack(actions) @cached_property def max(self): chosen_q_values = [] for branch in self.branches: chosen_q_values.append(branch.max.reshape(-1, 1)) return F.hstack(chosen_q_values) def evaluate_actions(self, actions): branch_q_values = [] for i, branch in enumerate(self.branches): branch_actions = actions[:, i] branch_q_values.append(branch.evaluate_actions( branch_actions).reshape(-1, 1)) return F.hstack(branch_q_values) @property def params(self): branch_params = [] for branch in self.branches: branch_params.extend(list(branch.params)) return tuple(branch_params)
27.451613
79
0.682139
from __future__ import unicode_literals from __future__ import print_function from __future__ import division from __future__ import absolute_import from builtins import * from future import standard_library standard_library.install_aliases() from cached_property import cached_property from chainer import functions as F from chainerrl.action_value import ActionValue class BranchedActionValue(ActionValue): def __init__(self, branches, q_values_formatter=lambda x: x): self.branches = branches self.q_values_formatter = q_values_formatter @cached_property def greedy_actions(self): actions = [] for branch in self.branches: actions.append(branch.q_values.array.argmax(axis=1).reshape(-1, 1)) return F.hstack(actions) @cached_property def max(self): chosen_q_values = [] for branch in self.branches: chosen_q_values.append(branch.max.reshape(-1, 1)) return F.hstack(chosen_q_values) def evaluate_actions(self, actions): branch_q_values = [] for i, branch in enumerate(self.branches): branch_actions = actions[:, i] branch_q_values.append(branch.evaluate_actions( branch_actions).reshape(-1, 1)) return F.hstack(branch_q_values) @property def params(self): branch_params = [] for branch in self.branches: branch_params.extend(list(branch.params)) return tuple(branch_params)
true
true
f7013540106d82345f673369326f21c8329a90c1
2,681
py
Python
vlasisku/models.py
lojban/vlasisku
a1f69fd12f206785a4635db2aa839394844250a0
[ "Unlicense" ]
17
2015-02-13T00:30:46.000Z
2022-02-20T19:54:35.000Z
vlasisku/models.py
lojban/vlasisku
a1f69fd12f206785a4635db2aa839394844250a0
[ "Unlicense" ]
16
2015-02-15T05:18:37.000Z
2021-10-08T06:03:37.000Z
vlasisku/models.py
lojban/vlasisku
a1f69fd12f206785a4635db2aa839394844250a0
[ "Unlicense" ]
6
2015-10-10T19:38:34.000Z
2020-05-30T04:58:13.000Z
from vlasisku.utils import compound2affixes class Entry(object): """Container for jbovlaste entry data.""" #: The word (or compound) this entry describes. word = None #: The type of the word, such as ``'gismu'``. type = None #: A list of three-letter affix forms for the word. affixes = None #: A list of affixes including four and five-letter versions. searchaffixes = None #: The grammatical class if the word is a particle. grammarclass = None #: The grammatical class of this words terminator, if any. terminator = None #: A list of grammatical classes this word terminates, for terminators. terminates = None #: A list of two-tuples such as ``('<chapter>.<section>', 'http://...')``. cll = None #: HTML for the entry definition, such as a place structure. definition = None #: HTML for notes about the entry. notes = None #: Plain text definition. textdefinition = None #: Plain text notes. textnotes = None #: The :class:`~vlasisku.database.Root` instance this entry is in. db = None # We need new lists for every instance. def __init__(self, db): self.affixes = [] self.searchaffixes = [] self.terminates = [] self.cll = [] self.db = db def __str__(self): return self.word def __repr__(self): return '<Entry %s>' % self.word def components(self): """Build HTML that links the affixes in a compound to their corresponding words, with definitions in the link tooltips. """ if self.type == 'lujvo': components = '' for a in compound2affixes(self.word): if len(a) == 1: components += a else: word = [e for e in self.db.entries.values() if a in e.searchaffixes] if word: components += '<a href="%s" ' % word[0] components += 'title="<strong>%s:</strong> ' % word[0] components += '%s">%s</a>' % (word[0].definition, a) else: components += a return components class Gloss(object): """Container for jbovlaste gloss data.""" #: The actual gloss word. gloss = None #: The :class:`Entry` this glosses to. entry = None #: The sense in which this gloss word relates to the entry, or ``None``. sense = None #: The specific place of the entry this glosses to, if any. place = None def __str__(self): return self.entry.word
27.080808
78
0.560239
from vlasisku.utils import compound2affixes class Entry(object): word = None type = None affixes = None searchaffixes = None grammarclass = None terminator = None terminates = None cll = None definition = None notes = None textdefinition = None textnotes = None db = None def __init__(self, db): self.affixes = [] self.searchaffixes = [] self.terminates = [] self.cll = [] self.db = db def __str__(self): return self.word def __repr__(self): return '<Entry %s>' % self.word def components(self): if self.type == 'lujvo': components = '' for a in compound2affixes(self.word): if len(a) == 1: components += a else: word = [e for e in self.db.entries.values() if a in e.searchaffixes] if word: components += '<a href="%s" ' % word[0] components += 'title="<strong>%s:</strong> ' % word[0] components += '%s">%s</a>' % (word[0].definition, a) else: components += a return components class Gloss(object): gloss = None entry = None sense = None place = None def __str__(self): return self.entry.word
true
true
f70135f112f3979c1fcdbeb445078fc5e5ba0002
1,398
py
Python
acregnet/train_acregnet.py
luoyi1hao/ACRN_Chest_X-ray_IA
b2ecaf88e6b1bb59101fd2d611bf9d1e6716367a
[ "MIT" ]
1
2021-09-23T10:37:53.000Z
2021-09-23T10:37:53.000Z
acregnet/train_acregnet.py
luoyi1hao/ACRN_Chest_X-ray_IA
b2ecaf88e6b1bb59101fd2d611bf9d1e6716367a
[ "MIT" ]
null
null
null
acregnet/train_acregnet.py
luoyi1hao/ACRN_Chest_X-ray_IA
b2ecaf88e6b1bb59101fd2d611bf9d1e6716367a
[ "MIT" ]
null
null
null
from data import DataHandler from models import ACRegNet import tensorflow as tf from utils import get_random_batch, read_config_file, create_dir RUN_IN_GPU = False def train_acregnet_model(config): tf.reset_default_graph() tf_config = tf.ConfigProto() if RUN_IN_GPU: tf_config.gpu_options.allow_growth = True sess = tf.Session(config=tf_config) train_ims, _ = DataHandler.load_images(config['train_ims_file']) train_lbs, _ = DataHandler.load_labels(config['train_lbs_file']) print('Loading training data...done') acregnet = ACRegNet(sess, config, 'ACRegNet', is_train=True) print('Building AC-RegNet model...done') print('Training...') for i in range(config['iterations']): batch_ims_x, batch_ims_y, batch_lbs_x, batch_lbs_y = get_random_batch( train_ims, config['batch_size'], train_lbs) cur_loss = acregnet.fit( batch_ims_x, batch_ims_y, batch_lbs_x, batch_lbs_y) print('Iteration {:>8d}/{}: Loss: {}'.format( i + 1, config['iterations'], cur_loss)) acregnet.save(config['ckpt_dir']) print('Saving current AC-RegNet model...done') print('Training...done') tf.reset_default_graph() sess.close() if __name__ == "__main__": config = read_config_file('./config/JSRT/ACRegNet.cfg') create_dir(config['ckpt_dir']) train_acregnet_model(config)
29.125
78
0.690272
from data import DataHandler from models import ACRegNet import tensorflow as tf from utils import get_random_batch, read_config_file, create_dir RUN_IN_GPU = False def train_acregnet_model(config): tf.reset_default_graph() tf_config = tf.ConfigProto() if RUN_IN_GPU: tf_config.gpu_options.allow_growth = True sess = tf.Session(config=tf_config) train_ims, _ = DataHandler.load_images(config['train_ims_file']) train_lbs, _ = DataHandler.load_labels(config['train_lbs_file']) print('Loading training data...done') acregnet = ACRegNet(sess, config, 'ACRegNet', is_train=True) print('Building AC-RegNet model...done') print('Training...') for i in range(config['iterations']): batch_ims_x, batch_ims_y, batch_lbs_x, batch_lbs_y = get_random_batch( train_ims, config['batch_size'], train_lbs) cur_loss = acregnet.fit( batch_ims_x, batch_ims_y, batch_lbs_x, batch_lbs_y) print('Iteration {:>8d}/{}: Loss: {}'.format( i + 1, config['iterations'], cur_loss)) acregnet.save(config['ckpt_dir']) print('Saving current AC-RegNet model...done') print('Training...done') tf.reset_default_graph() sess.close() if __name__ == "__main__": config = read_config_file('./config/JSRT/ACRegNet.cfg') create_dir(config['ckpt_dir']) train_acregnet_model(config)
true
true
f7013784c179db6e1336c8feef0d98ad0bd7c810
4,346
py
Python
test_gr_nlp_toolkit/test_pipeline/test_pipeline.py
nlpaueb/gr-nlp-toolkit
fce7985a0e51d0924350d161626a11fa3f2e9e4e
[ "Apache-2.0" ]
16
2021-09-23T14:01:32.000Z
2022-03-21T20:13:40.000Z
test_gr_nlp_toolkit/test_pipeline/test_pipeline.py
nlpaueb/gr-nlp-toolkit
fce7985a0e51d0924350d161626a11fa3f2e9e4e
[ "Apache-2.0" ]
3
2021-07-15T12:21:53.000Z
2022-02-24T20:59:49.000Z
test_gr_nlp_toolkit/test_pipeline/test_pipeline.py
nlpaueb/gr-nlp-toolkit
fce7985a0e51d0924350d161626a11fa3f2e9e4e
[ "Apache-2.0" ]
2
2022-02-24T19:20:55.000Z
2022-03-17T01:08:11.000Z
import unittest from gr_nlp_toolkit.labels.dp_labels import dp_labels from gr_nlp_toolkit.labels.ner_labels import ner_labels from gr_nlp_toolkit.labels.pos_labels import pos_labels, pos_properties from gr_nlp_toolkit.pipeline.pipeline import Pipeline class TestPipeline(unittest.TestCase): def test_using_all_processors(self): nlp = Pipeline('dp,pos,ner') sentences = ["Η Ιταλία κέρδισε την Αγγλία στον τελικό του Euro το 2021", "Το ποιηματάκι το έγραψε ο διάσημος ποιητής, Νίκος Νικολαϊδης"] for sent in sentences: doc = nlp(sent) for token in doc.tokens: print(token.text, token.ner, token.upos, token.feats, token.head, token.deprel) self.assertIsNotNone(token.ner) self.assertTrue(token.ner in ner_labels) self.assertIsNotNone(token.head) self.assertIsNotNone(token.deprel) # We have to add plus one, because the cls token is removed self.assertTrue(token.head in range(0, len(doc.tokens) + 1)) self.assertTrue(token.deprel in dp_labels) self.assertIsNotNone(token.upos) self.assertTrue(token.upos in pos_labels['upos']) self.assertIsNotNone(token.feats) self.assertEqual(len(list(token.feats.keys())), len(pos_properties[token.upos])) for feat, value in token.feats.items(): self.assertTrue(feat in pos_properties[token.upos]) self.assertTrue(value in pos_labels[feat]) print(token.text, token.ner, token.upos, token.feats, token.head, token.deprel) self.assertIsNotNone(token.ner) self.assertTrue(token.ner in ner_labels) self.assertIsNotNone(token.head) self.assertIsNotNone(token.deprel) # We have to add plus one, because the cls token is removed self.assertTrue(token.head in range(0, len(doc.tokens) + 1)) self.assertTrue(token.deprel in dp_labels) self.assertIsNotNone(token.upos) self.assertTrue(token.upos in pos_labels['upos']) def test_annotations_are_same_with_multiple_configurations(self): nlp = Pipeline('dp,pos,ner') doc = nlp("Η Ιταλία κέρδισε την Αγγλία στον τελικό του Euro το 2021") deprels_preds = [] upos_preds = [] ner_preds = [] for token in doc.tokens: deprels_preds.append(token.deprel) upos_preds.append(token.upos) ner_preds.append(token.ner) nlp = Pipeline('dp') doc = nlp("Η Ιταλία κέρδισε την Αγγλία στον τελικό του Euro το 2021") new_deprels_preds = [] for token in doc.tokens: new_deprels_preds.append(token.deprel) nlp = Pipeline('pos') doc = nlp("Η Ιταλία κέρδισε την Αγγλία στον τελικό του Euro το 2021") new_upos_preds =[] for token in doc.tokens: new_upos_preds.append(token.upos) nlp = Pipeline('ner') doc = nlp("Η Ιταλία κέρδισε την Αγγλία στον τελικό του Euro το 2021") new_ner_preds =[] for token in doc.tokens: new_ner_preds.append(token.ner) self.assertEqual(new_deprels_preds, deprels_preds) self.assertEqual(new_upos_preds, upos_preds) self.assertEqual(new_ner_preds, ner_preds) def test_using_only_one_processor(self): nlp = Pipeline('ner') doc = nlp("Η Ιταλία κέρδισε την Αγγλία στον τελικό του Euro το 2021") for token in doc.tokens: self.assertIsNotNone(token.ner) self.assertTrue(token.ner in ner_labels) self.assertIsNone(token.head) self.assertIsNone(token.deprel) self.assertFalse(token.head in range(0, len(doc.tokens))) self.assertFalse(token.deprel in dp_labels) self.assertIsNone(token.upos) self.assertFalse(token.upos in pos_labels['upos']) for feat, value in token.feats.items(): self.assertFalse(feat in pos_properties[token.upos]) self.assertFalse(value in pos_labels[feat]) if __name__ == '__main__': unittest.main()
41
99
0.61942
import unittest from gr_nlp_toolkit.labels.dp_labels import dp_labels from gr_nlp_toolkit.labels.ner_labels import ner_labels from gr_nlp_toolkit.labels.pos_labels import pos_labels, pos_properties from gr_nlp_toolkit.pipeline.pipeline import Pipeline class TestPipeline(unittest.TestCase): def test_using_all_processors(self): nlp = Pipeline('dp,pos,ner') sentences = ["Η Ιταλία κέρδισε την Αγγλία στον τελικό του Euro το 2021", "Το ποιηματάκι το έγραψε ο διάσημος ποιητής, Νίκος Νικολαϊδης"] for sent in sentences: doc = nlp(sent) for token in doc.tokens: print(token.text, token.ner, token.upos, token.feats, token.head, token.deprel) self.assertIsNotNone(token.ner) self.assertTrue(token.ner in ner_labels) self.assertIsNotNone(token.head) self.assertIsNotNone(token.deprel) self.assertTrue(token.head in range(0, len(doc.tokens) + 1)) self.assertTrue(token.deprel in dp_labels) self.assertIsNotNone(token.upos) self.assertTrue(token.upos in pos_labels['upos']) self.assertIsNotNone(token.feats) self.assertEqual(len(list(token.feats.keys())), len(pos_properties[token.upos])) for feat, value in token.feats.items(): self.assertTrue(feat in pos_properties[token.upos]) self.assertTrue(value in pos_labels[feat]) print(token.text, token.ner, token.upos, token.feats, token.head, token.deprel) self.assertIsNotNone(token.ner) self.assertTrue(token.ner in ner_labels) self.assertIsNotNone(token.head) self.assertIsNotNone(token.deprel) self.assertTrue(token.head in range(0, len(doc.tokens) + 1)) self.assertTrue(token.deprel in dp_labels) self.assertIsNotNone(token.upos) self.assertTrue(token.upos in pos_labels['upos']) def test_annotations_are_same_with_multiple_configurations(self): nlp = Pipeline('dp,pos,ner') doc = nlp("Η Ιταλία κέρδισε την Αγγλία στον τελικό του Euro το 2021") deprels_preds = [] upos_preds = [] ner_preds = [] for token in doc.tokens: deprels_preds.append(token.deprel) upos_preds.append(token.upos) ner_preds.append(token.ner) nlp = Pipeline('dp') doc = nlp("Η Ιταλία κέρδισε την Αγγλία στον τελικό του Euro το 2021") new_deprels_preds = [] for token in doc.tokens: new_deprels_preds.append(token.deprel) nlp = Pipeline('pos') doc = nlp("Η Ιταλία κέρδισε την Αγγλία στον τελικό του Euro το 2021") new_upos_preds =[] for token in doc.tokens: new_upos_preds.append(token.upos) nlp = Pipeline('ner') doc = nlp("Η Ιταλία κέρδισε την Αγγλία στον τελικό του Euro το 2021") new_ner_preds =[] for token in doc.tokens: new_ner_preds.append(token.ner) self.assertEqual(new_deprels_preds, deprels_preds) self.assertEqual(new_upos_preds, upos_preds) self.assertEqual(new_ner_preds, ner_preds) def test_using_only_one_processor(self): nlp = Pipeline('ner') doc = nlp("Η Ιταλία κέρδισε την Αγγλία στον τελικό του Euro το 2021") for token in doc.tokens: self.assertIsNotNone(token.ner) self.assertTrue(token.ner in ner_labels) self.assertIsNone(token.head) self.assertIsNone(token.deprel) self.assertFalse(token.head in range(0, len(doc.tokens))) self.assertFalse(token.deprel in dp_labels) self.assertIsNone(token.upos) self.assertFalse(token.upos in pos_labels['upos']) for feat, value in token.feats.items(): self.assertFalse(feat in pos_properties[token.upos]) self.assertFalse(value in pos_labels[feat]) if __name__ == '__main__': unittest.main()
true
true
f701396b8b37e360dab733759a1eec188404061e
3,447
py
Python
ucsmsdk/mometa/initiator/InitiatorFcInitiatorEp.py
thinkitdata/ucsmsdk
da6599e1dbc1207a30eabe548a7e5791af5f476b
[ "Apache-2.0" ]
null
null
null
ucsmsdk/mometa/initiator/InitiatorFcInitiatorEp.py
thinkitdata/ucsmsdk
da6599e1dbc1207a30eabe548a7e5791af5f476b
[ "Apache-2.0" ]
null
null
null
ucsmsdk/mometa/initiator/InitiatorFcInitiatorEp.py
thinkitdata/ucsmsdk
da6599e1dbc1207a30eabe548a7e5791af5f476b
[ "Apache-2.0" ]
null
null
null
"""This module contains the general information for InitiatorFcInitiatorEp ManagedObject.""" from ...ucsmo import ManagedObject from ...ucscoremeta import MoPropertyMeta, MoMeta from ...ucsmeta import VersionMeta class InitiatorFcInitiatorEpConsts: PREF_ALTERNATE = "alternate" PREF_PREFERRED = "preferred" PROT_DERIVED = "derived" PROT_FC = "fc" PROT_ISCSI = "iscsi" class InitiatorFcInitiatorEp(ManagedObject): """This is InitiatorFcInitiatorEp class.""" consts = InitiatorFcInitiatorEpConsts() naming_props = set([u'name']) mo_meta = MoMeta("InitiatorFcInitiatorEp", "initiatorFcInitiatorEp", "fc-ini-[name]", VersionMeta.Version211a, "InputOutput", 0x3f, [], ["read-only"], [u'initiatorGroupEp'], [u'storageEpUser'], [None]) prop_meta = { "child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version211a, MoPropertyMeta.INTERNAL, 0x2, None, None, r"""((deleteAll|ignore|deleteNonPresent),){0,2}(deleteAll|ignore|deleteNonPresent){0,1}""", [], []), "dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version211a, MoPropertyMeta.READ_ONLY, 0x4, 0, 256, None, [], []), "ep_dn": MoPropertyMeta("ep_dn", "epDn", "string", VersionMeta.Version211a, MoPropertyMeta.READ_ONLY, None, 0, 256, None, [], []), "id": MoPropertyMeta("id", "id", "ulong", VersionMeta.Version211a, MoPropertyMeta.READ_ONLY, None, None, None, None, [], []), "name": MoPropertyMeta("name", "name", "string", VersionMeta.Version211a, MoPropertyMeta.NAMING, 0x8, None, None, r"""[\-\.:_a-zA-Z0-9]{1,16}""", [], []), "pref": MoPropertyMeta("pref", "pref", "string", VersionMeta.Version211a, MoPropertyMeta.READ_ONLY, None, None, None, None, ["alternate", "preferred"], []), "prot": MoPropertyMeta("prot", "prot", "string", VersionMeta.Version211a, MoPropertyMeta.READ_ONLY, None, None, None, None, ["derived", "fc", "iscsi"], []), "rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version211a, MoPropertyMeta.READ_ONLY, 0x10, 0, 256, None, [], []), "sacl": MoPropertyMeta("sacl", "sacl", "string", VersionMeta.Version302c, MoPropertyMeta.READ_ONLY, None, None, None, r"""((none|del|mod|addchild|cascade),){0,4}(none|del|mod|addchild|cascade){0,1}""", [], []), "status": MoPropertyMeta("status", "status", "string", VersionMeta.Version211a, MoPropertyMeta.READ_WRITE, 0x20, None, None, r"""((removed|created|modified|deleted),){0,3}(removed|created|modified|deleted){0,1}""", [], []), "wwpn": MoPropertyMeta("wwpn", "wwpn", "string", VersionMeta.Version211a, MoPropertyMeta.READ_ONLY, None, 0, 256, r"""(([A-Fa-f0-9][A-Fa-f0-9]:){7}[A-Fa-f0-9][A-Fa-f0-9])|0""", [], []), } prop_map = { "childAction": "child_action", "dn": "dn", "epDn": "ep_dn", "id": "id", "name": "name", "pref": "pref", "prot": "prot", "rn": "rn", "sacl": "sacl", "status": "status", "wwpn": "wwpn", } def __init__(self, parent_mo_or_dn, name, **kwargs): self._dirty_mask = 0 self.name = name self.child_action = None self.ep_dn = None self.id = None self.pref = None self.prot = None self.sacl = None self.status = None self.wwpn = None ManagedObject.__init__(self, "InitiatorFcInitiatorEp", parent_mo_or_dn, **kwargs)
53.030769
248
0.629823
from ...ucsmo import ManagedObject from ...ucscoremeta import MoPropertyMeta, MoMeta from ...ucsmeta import VersionMeta class InitiatorFcInitiatorEpConsts: PREF_ALTERNATE = "alternate" PREF_PREFERRED = "preferred" PROT_DERIVED = "derived" PROT_FC = "fc" PROT_ISCSI = "iscsi" class InitiatorFcInitiatorEp(ManagedObject): consts = InitiatorFcInitiatorEpConsts() naming_props = set([u'name']) mo_meta = MoMeta("InitiatorFcInitiatorEp", "initiatorFcInitiatorEp", "fc-ini-[name]", VersionMeta.Version211a, "InputOutput", 0x3f, [], ["read-only"], [u'initiatorGroupEp'], [u'storageEpUser'], [None]) prop_meta = { "child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version211a, MoPropertyMeta.INTERNAL, 0x2, None, None, r"""((deleteAll|ignore|deleteNonPresent),){0,2}(deleteAll|ignore|deleteNonPresent){0,1}""", [], []), "dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version211a, MoPropertyMeta.READ_ONLY, 0x4, 0, 256, None, [], []), "ep_dn": MoPropertyMeta("ep_dn", "epDn", "string", VersionMeta.Version211a, MoPropertyMeta.READ_ONLY, None, 0, 256, None, [], []), "id": MoPropertyMeta("id", "id", "ulong", VersionMeta.Version211a, MoPropertyMeta.READ_ONLY, None, None, None, None, [], []), "name": MoPropertyMeta("name", "name", "string", VersionMeta.Version211a, MoPropertyMeta.NAMING, 0x8, None, None, r"""[\-\.:_a-zA-Z0-9]{1,16}""", [], []), "pref": MoPropertyMeta("pref", "pref", "string", VersionMeta.Version211a, MoPropertyMeta.READ_ONLY, None, None, None, None, ["alternate", "preferred"], []), "prot": MoPropertyMeta("prot", "prot", "string", VersionMeta.Version211a, MoPropertyMeta.READ_ONLY, None, None, None, None, ["derived", "fc", "iscsi"], []), "rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version211a, MoPropertyMeta.READ_ONLY, 0x10, 0, 256, None, [], []), "sacl": MoPropertyMeta("sacl", "sacl", "string", VersionMeta.Version302c, MoPropertyMeta.READ_ONLY, None, None, None, r"""((none|del|mod|addchild|cascade),){0,4}(none|del|mod|addchild|cascade){0,1}""", [], []), "status": MoPropertyMeta("status", "status", "string", VersionMeta.Version211a, MoPropertyMeta.READ_WRITE, 0x20, None, None, r"""((removed|created|modified|deleted),){0,3}(removed|created|modified|deleted){0,1}""", [], []), "wwpn": MoPropertyMeta("wwpn", "wwpn", "string", VersionMeta.Version211a, MoPropertyMeta.READ_ONLY, None, 0, 256, r"""(([A-Fa-f0-9][A-Fa-f0-9]:){7}[A-Fa-f0-9][A-Fa-f0-9])|0""", [], []), } prop_map = { "childAction": "child_action", "dn": "dn", "epDn": "ep_dn", "id": "id", "name": "name", "pref": "pref", "prot": "prot", "rn": "rn", "sacl": "sacl", "status": "status", "wwpn": "wwpn", } def __init__(self, parent_mo_or_dn, name, **kwargs): self._dirty_mask = 0 self.name = name self.child_action = None self.ep_dn = None self.id = None self.pref = None self.prot = None self.sacl = None self.status = None self.wwpn = None ManagedObject.__init__(self, "InitiatorFcInitiatorEp", parent_mo_or_dn, **kwargs)
true
true
f7013a927d89ff620e05d8a99d6d0cc4f8c43d01
13,583
py
Python
examples/classification_modelnet40.py
NNstorm/MinkowskiEngine
443b37a58c379b2482b5d160d9e874b356b4bf2f
[ "MIT" ]
851
2020-07-09T21:35:06.000Z
2022-03-31T14:35:57.000Z
examples/classification_modelnet40.py
NNstorm/MinkowskiEngine
443b37a58c379b2482b5d160d9e874b356b4bf2f
[ "MIT" ]
301
2020-07-09T21:51:23.000Z
2022-03-30T12:23:24.000Z
examples/classification_modelnet40.py
NNstorm/MinkowskiEngine
443b37a58c379b2482b5d160d9e874b356b4bf2f
[ "MIT" ]
151
2020-07-15T09:22:09.000Z
2022-03-23T21:32:47.000Z
# Copyright (c) 2020 NVIDIA CORPORATION. # Copyright (c) 2018-2020 Chris Choy ([email protected]). # # Permission is hereby granted, free of charge, to any person obtaining a copy of # this software and associated documentation files (the "Software"), to deal in # the Software without restriction, including without limitation the rights to # use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies # of the Software, and to permit persons to whom the Software is furnished to do # so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. # # Please cite "4D Spatio-Temporal ConvNets: Minkowski Convolutional Neural # Networks", CVPR'19 (https://arxiv.org/abs/1904.08755) if you use any part # of the code. import argparse import sklearn.metrics as metrics import numpy as np import torch import torch.nn as nn import torch.utils.data from torch.utils.data import DataLoader import torch.optim as optim import torch.nn.functional as F import MinkowskiEngine as ME from examples.pointnet import ( PointNet, MinkowskiPointNet, CoordinateTransformation, ModelNet40H5, stack_collate_fn, minkowski_collate_fn, ) from examples.common import seed_all parser = argparse.ArgumentParser() parser.add_argument("--voxel_size", type=float, default=0.05) parser.add_argument("--max_steps", type=int, default=100000) parser.add_argument("--val_freq", type=int, default=1000) parser.add_argument("--batch_size", default=32, type=int) parser.add_argument("--lr", default=1e-1, type=float) parser.add_argument("--weight_decay", type=float, default=1e-4) parser.add_argument("--num_workers", type=int, default=2) parser.add_argument("--stat_freq", type=int, default=100) parser.add_argument("--weights", type=str, default="modelnet.pth") parser.add_argument("--seed", type=int, default=777) parser.add_argument("--translation", type=float, default=0.2) parser.add_argument("--test_translation", type=float, default=0.0) parser.add_argument( "--network", type=str, choices=["pointnet", "minkpointnet", "minkfcnn", "minksplatfcnn"], default="minkfcnn", ) class MinkowskiFCNN(ME.MinkowskiNetwork): def __init__( self, in_channel, out_channel, embedding_channel=1024, channels=(32, 48, 64, 96, 128), D=3, ): ME.MinkowskiNetwork.__init__(self, D) self.network_initialization( in_channel, out_channel, channels=channels, embedding_channel=embedding_channel, kernel_size=3, D=D, ) self.weight_initialization() def get_mlp_block(self, in_channel, out_channel): return nn.Sequential( ME.MinkowskiLinear(in_channel, out_channel, bias=False), ME.MinkowskiBatchNorm(out_channel), ME.MinkowskiLeakyReLU(), ) def get_conv_block(self, in_channel, out_channel, kernel_size, stride): return nn.Sequential( ME.MinkowskiConvolution( in_channel, out_channel, kernel_size=kernel_size, stride=stride, dimension=self.D, ), ME.MinkowskiBatchNorm(out_channel), ME.MinkowskiLeakyReLU(), ) def network_initialization( self, in_channel, out_channel, channels, embedding_channel, kernel_size, D=3, ): self.mlp1 = self.get_mlp_block(in_channel, channels[0]) self.conv1 = self.get_conv_block( channels[0], channels[1], kernel_size=kernel_size, stride=1, ) self.conv2 = self.get_conv_block( channels[1], channels[2], kernel_size=kernel_size, stride=2, ) self.conv3 = self.get_conv_block( channels[2], channels[3], kernel_size=kernel_size, stride=2, ) self.conv4 = self.get_conv_block( channels[3], channels[4], kernel_size=kernel_size, stride=2, ) self.conv5 = nn.Sequential( self.get_conv_block( channels[1] + channels[2] + channels[3] + channels[4], embedding_channel // 4, kernel_size=3, stride=2, ), self.get_conv_block( embedding_channel // 4, embedding_channel // 2, kernel_size=3, stride=2, ), self.get_conv_block( embedding_channel // 2, embedding_channel, kernel_size=3, stride=2, ), ) self.pool = ME.MinkowskiMaxPooling(kernel_size=3, stride=2, dimension=D) self.global_max_pool = ME.MinkowskiGlobalMaxPooling() self.global_avg_pool = ME.MinkowskiGlobalAvgPooling() self.final = nn.Sequential( self.get_mlp_block(embedding_channel * 2, 512), ME.MinkowskiDropout(), self.get_mlp_block(512, 512), ME.MinkowskiLinear(512, out_channel, bias=True), ) # No, Dropout, last 256 linear, AVG_POOLING 92% def weight_initialization(self): for m in self.modules(): if isinstance(m, ME.MinkowskiConvolution): ME.utils.kaiming_normal_(m.kernel, mode="fan_out", nonlinearity="relu") if isinstance(m, ME.MinkowskiBatchNorm): nn.init.constant_(m.bn.weight, 1) nn.init.constant_(m.bn.bias, 0) def forward(self, x: ME.TensorField): x = self.mlp1(x) y = x.sparse() y = self.conv1(y) y1 = self.pool(y) y = self.conv2(y1) y2 = self.pool(y) y = self.conv3(y2) y3 = self.pool(y) y = self.conv4(y3) y4 = self.pool(y) x1 = y1.slice(x) x2 = y2.slice(x) x3 = y3.slice(x) x4 = y4.slice(x) x = ME.cat(x1, x2, x3, x4) y = self.conv5(x.sparse()) x1 = self.global_max_pool(y) x2 = self.global_avg_pool(y) return self.final(ME.cat(x1, x2)).F class GlobalMaxAvgPool(torch.nn.Module): def __init__(self): torch.nn.Module.__init__(self) self.global_max_pool = ME.MinkowskiGlobalMaxPooling() self.global_avg_pool = ME.MinkowskiGlobalAvgPooling() def forward(self, tensor): x = self.global_max_pool(tensor) y = self.global_avg_pool(tensor) return ME.cat(x, y) class MinkowskiSplatFCNN(MinkowskiFCNN): def __init__( self, in_channel, out_channel, embedding_channel=1024, channels=(32, 48, 64, 96, 128), D=3, ): MinkowskiFCNN.__init__( self, in_channel, out_channel, embedding_channel, channels, D ) def forward(self, x: ME.TensorField): x = self.mlp1(x) y = x.splat() y = self.conv1(y) y1 = self.pool(y) y = self.conv2(y1) y2 = self.pool(y) y = self.conv3(y2) y3 = self.pool(y) y = self.conv4(y3) y4 = self.pool(y) x1 = y1.interpolate(x) x2 = y2.interpolate(x) x3 = y3.interpolate(x) x4 = y4.interpolate(x) x = ME.cat(x1, x2, x3, x4) y = self.conv5(x.sparse()) x1 = self.global_max_pool(y) x2 = self.global_avg_pool(y) return self.final(ME.cat(x1, x2)).F STR2NETWORK = dict( pointnet=PointNet, minkpointnet=MinkowskiPointNet, minkfcnn=MinkowskiFCNN, minksplatfcnn=MinkowskiSplatFCNN, ) def create_input_batch(batch, is_minknet, device="cuda", quantization_size=0.05): if is_minknet: batch["coordinates"][:, 1:] = batch["coordinates"][:, 1:] / quantization_size return ME.TensorField( coordinates=batch["coordinates"], features=batch["features"], device=device, ) else: return batch["coordinates"].permute(0, 2, 1).to(device) class CoordinateTranslation: def __init__(self, translation): self.trans = translation def __call__(self, coords): if self.trans > 0: coords += np.random.uniform(low=-self.trans, high=self.trans, size=[1, 3]) return coords def make_data_loader(phase, is_minknet, config): assert phase in ["train", "val", "test"] is_train = phase == "train" dataset = ModelNet40H5( phase=phase, transform=CoordinateTransformation(trans=config.translation) if is_train else CoordinateTranslation(config.test_translation), data_root="modelnet40_ply_hdf5_2048", ) return DataLoader( dataset, num_workers=config.num_workers, shuffle=is_train, collate_fn=minkowski_collate_fn if is_minknet else stack_collate_fn, batch_size=config.batch_size, ) def test(net, device, config, phase="val"): is_minknet = isinstance(net, ME.MinkowskiNetwork) data_loader = make_data_loader( "test", is_minknet, config=config, ) net.eval() labels, preds = [], [] with torch.no_grad(): for batch in data_loader: input = create_input_batch( batch, is_minknet, device=device, quantization_size=config.voxel_size, ) logit = net(input) pred = torch.argmax(logit, 1) labels.append(batch["labels"].cpu().numpy()) preds.append(pred.cpu().numpy()) torch.cuda.empty_cache() return metrics.accuracy_score(np.concatenate(labels), np.concatenate(preds)) def criterion(pred, labels, smoothing=True): """Calculate cross entropy loss, apply label smoothing if needed.""" labels = labels.contiguous().view(-1) if smoothing: eps = 0.2 n_class = pred.size(1) one_hot = torch.zeros_like(pred).scatter(1, labels.view(-1, 1), 1) one_hot = one_hot * (1 - eps) + (1 - one_hot) * eps / (n_class - 1) log_prb = F.log_softmax(pred, dim=1) loss = -(one_hot * log_prb).sum(dim=1).mean() else: loss = F.cross_entropy(pred, labels, reduction="mean") return loss def train(net, device, config): is_minknet = isinstance(net, ME.MinkowskiNetwork) optimizer = optim.SGD( net.parameters(), lr=config.lr, momentum=0.9, weight_decay=config.weight_decay, ) scheduler = optim.lr_scheduler.CosineAnnealingLR( optimizer, T_max=config.max_steps, ) print(optimizer) print(scheduler) train_iter = iter(make_data_loader("train", is_minknet, config)) best_metric = 0 net.train() for i in range(config.max_steps): optimizer.zero_grad() try: data_dict = train_iter.next() except StopIteration: train_iter = iter(make_data_loader("train", is_minknet, config)) data_dict = train_iter.next() input = create_input_batch( data_dict, is_minknet, device=device, quantization_size=config.voxel_size ) logit = net(input) loss = criterion(logit, data_dict["labels"].to(device)) loss.backward() optimizer.step() scheduler.step() torch.cuda.empty_cache() if i % config.stat_freq == 0: print(f"Iter: {i}, Loss: {loss.item():.3e}") if i % config.val_freq == 0 and i > 0: torch.save( { "state_dict": net.state_dict(), "optimizer": optimizer.state_dict(), "scheduler": scheduler.state_dict(), "curr_iter": i, }, config.weights, ) accuracy = test(net, device, config, phase="val") if best_metric < accuracy: best_metric = accuracy print(f"Validation accuracy: {accuracy}. Best accuracy: {best_metric}") net.train() if __name__ == "__main__": config = parser.parse_args() seed_all(config.seed) device = torch.device("cuda" if torch.cuda.is_available() else "cpu") print("===================ModelNet40 Dataset===================") print(f"Training with translation {config.translation}") print(f"Evaluating with translation {config.test_translation}") print("=============================================\n\n") net = STR2NETWORK[config.network]( in_channel=3, out_channel=40, embedding_channel=1024 ).to(device) print("===================Network===================") print(net) print("=============================================\n\n") train(net, device, config) accuracy = test(net, device, config, phase="test") print(f"Test accuracy: {accuracy}")
30.730769
87
0.596628
# Networks", CVPR'19 (https://arxiv.org/abs/1904.08755) if you use any part # of the code. import argparse import sklearn.metrics as metrics import numpy as np import torch import torch.nn as nn import torch.utils.data from torch.utils.data import DataLoader import torch.optim as optim import torch.nn.functional as F import MinkowskiEngine as ME from examples.pointnet import ( PointNet, MinkowskiPointNet, CoordinateTransformation, ModelNet40H5, stack_collate_fn, minkowski_collate_fn, ) from examples.common import seed_all parser = argparse.ArgumentParser() parser.add_argument("--voxel_size", type=float, default=0.05) parser.add_argument("--max_steps", type=int, default=100000) parser.add_argument("--val_freq", type=int, default=1000) parser.add_argument("--batch_size", default=32, type=int) parser.add_argument("--lr", default=1e-1, type=float) parser.add_argument("--weight_decay", type=float, default=1e-4) parser.add_argument("--num_workers", type=int, default=2) parser.add_argument("--stat_freq", type=int, default=100) parser.add_argument("--weights", type=str, default="modelnet.pth") parser.add_argument("--seed", type=int, default=777) parser.add_argument("--translation", type=float, default=0.2) parser.add_argument("--test_translation", type=float, default=0.0) parser.add_argument( "--network", type=str, choices=["pointnet", "minkpointnet", "minkfcnn", "minksplatfcnn"], default="minkfcnn", ) class MinkowskiFCNN(ME.MinkowskiNetwork): def __init__( self, in_channel, out_channel, embedding_channel=1024, channels=(32, 48, 64, 96, 128), D=3, ): ME.MinkowskiNetwork.__init__(self, D) self.network_initialization( in_channel, out_channel, channels=channels, embedding_channel=embedding_channel, kernel_size=3, D=D, ) self.weight_initialization() def get_mlp_block(self, in_channel, out_channel): return nn.Sequential( ME.MinkowskiLinear(in_channel, out_channel, bias=False), ME.MinkowskiBatchNorm(out_channel), ME.MinkowskiLeakyReLU(), ) def get_conv_block(self, in_channel, out_channel, kernel_size, stride): return nn.Sequential( ME.MinkowskiConvolution( in_channel, out_channel, kernel_size=kernel_size, stride=stride, dimension=self.D, ), ME.MinkowskiBatchNorm(out_channel), ME.MinkowskiLeakyReLU(), ) def network_initialization( self, in_channel, out_channel, channels, embedding_channel, kernel_size, D=3, ): self.mlp1 = self.get_mlp_block(in_channel, channels[0]) self.conv1 = self.get_conv_block( channels[0], channels[1], kernel_size=kernel_size, stride=1, ) self.conv2 = self.get_conv_block( channels[1], channels[2], kernel_size=kernel_size, stride=2, ) self.conv3 = self.get_conv_block( channels[2], channels[3], kernel_size=kernel_size, stride=2, ) self.conv4 = self.get_conv_block( channels[3], channels[4], kernel_size=kernel_size, stride=2, ) self.conv5 = nn.Sequential( self.get_conv_block( channels[1] + channels[2] + channels[3] + channels[4], embedding_channel // 4, kernel_size=3, stride=2, ), self.get_conv_block( embedding_channel // 4, embedding_channel // 2, kernel_size=3, stride=2, ), self.get_conv_block( embedding_channel // 2, embedding_channel, kernel_size=3, stride=2, ), ) self.pool = ME.MinkowskiMaxPooling(kernel_size=3, stride=2, dimension=D) self.global_max_pool = ME.MinkowskiGlobalMaxPooling() self.global_avg_pool = ME.MinkowskiGlobalAvgPooling() self.final = nn.Sequential( self.get_mlp_block(embedding_channel * 2, 512), ME.MinkowskiDropout(), self.get_mlp_block(512, 512), ME.MinkowskiLinear(512, out_channel, bias=True), ) # No, Dropout, last 256 linear, AVG_POOLING 92% def weight_initialization(self): for m in self.modules(): if isinstance(m, ME.MinkowskiConvolution): ME.utils.kaiming_normal_(m.kernel, mode="fan_out", nonlinearity="relu") if isinstance(m, ME.MinkowskiBatchNorm): nn.init.constant_(m.bn.weight, 1) nn.init.constant_(m.bn.bias, 0) def forward(self, x: ME.TensorField): x = self.mlp1(x) y = x.sparse() y = self.conv1(y) y1 = self.pool(y) y = self.conv2(y1) y2 = self.pool(y) y = self.conv3(y2) y3 = self.pool(y) y = self.conv4(y3) y4 = self.pool(y) x1 = y1.slice(x) x2 = y2.slice(x) x3 = y3.slice(x) x4 = y4.slice(x) x = ME.cat(x1, x2, x3, x4) y = self.conv5(x.sparse()) x1 = self.global_max_pool(y) x2 = self.global_avg_pool(y) return self.final(ME.cat(x1, x2)).F class GlobalMaxAvgPool(torch.nn.Module): def __init__(self): torch.nn.Module.__init__(self) self.global_max_pool = ME.MinkowskiGlobalMaxPooling() self.global_avg_pool = ME.MinkowskiGlobalAvgPooling() def forward(self, tensor): x = self.global_max_pool(tensor) y = self.global_avg_pool(tensor) return ME.cat(x, y) class MinkowskiSplatFCNN(MinkowskiFCNN): def __init__( self, in_channel, out_channel, embedding_channel=1024, channels=(32, 48, 64, 96, 128), D=3, ): MinkowskiFCNN.__init__( self, in_channel, out_channel, embedding_channel, channels, D ) def forward(self, x: ME.TensorField): x = self.mlp1(x) y = x.splat() y = self.conv1(y) y1 = self.pool(y) y = self.conv2(y1) y2 = self.pool(y) y = self.conv3(y2) y3 = self.pool(y) y = self.conv4(y3) y4 = self.pool(y) x1 = y1.interpolate(x) x2 = y2.interpolate(x) x3 = y3.interpolate(x) x4 = y4.interpolate(x) x = ME.cat(x1, x2, x3, x4) y = self.conv5(x.sparse()) x1 = self.global_max_pool(y) x2 = self.global_avg_pool(y) return self.final(ME.cat(x1, x2)).F STR2NETWORK = dict( pointnet=PointNet, minkpointnet=MinkowskiPointNet, minkfcnn=MinkowskiFCNN, minksplatfcnn=MinkowskiSplatFCNN, ) def create_input_batch(batch, is_minknet, device="cuda", quantization_size=0.05): if is_minknet: batch["coordinates"][:, 1:] = batch["coordinates"][:, 1:] / quantization_size return ME.TensorField( coordinates=batch["coordinates"], features=batch["features"], device=device, ) else: return batch["coordinates"].permute(0, 2, 1).to(device) class CoordinateTranslation: def __init__(self, translation): self.trans = translation def __call__(self, coords): if self.trans > 0: coords += np.random.uniform(low=-self.trans, high=self.trans, size=[1, 3]) return coords def make_data_loader(phase, is_minknet, config): assert phase in ["train", "val", "test"] is_train = phase == "train" dataset = ModelNet40H5( phase=phase, transform=CoordinateTransformation(trans=config.translation) if is_train else CoordinateTranslation(config.test_translation), data_root="modelnet40_ply_hdf5_2048", ) return DataLoader( dataset, num_workers=config.num_workers, shuffle=is_train, collate_fn=minkowski_collate_fn if is_minknet else stack_collate_fn, batch_size=config.batch_size, ) def test(net, device, config, phase="val"): is_minknet = isinstance(net, ME.MinkowskiNetwork) data_loader = make_data_loader( "test", is_minknet, config=config, ) net.eval() labels, preds = [], [] with torch.no_grad(): for batch in data_loader: input = create_input_batch( batch, is_minknet, device=device, quantization_size=config.voxel_size, ) logit = net(input) pred = torch.argmax(logit, 1) labels.append(batch["labels"].cpu().numpy()) preds.append(pred.cpu().numpy()) torch.cuda.empty_cache() return metrics.accuracy_score(np.concatenate(labels), np.concatenate(preds)) def criterion(pred, labels, smoothing=True): labels = labels.contiguous().view(-1) if smoothing: eps = 0.2 n_class = pred.size(1) one_hot = torch.zeros_like(pred).scatter(1, labels.view(-1, 1), 1) one_hot = one_hot * (1 - eps) + (1 - one_hot) * eps / (n_class - 1) log_prb = F.log_softmax(pred, dim=1) loss = -(one_hot * log_prb).sum(dim=1).mean() else: loss = F.cross_entropy(pred, labels, reduction="mean") return loss def train(net, device, config): is_minknet = isinstance(net, ME.MinkowskiNetwork) optimizer = optim.SGD( net.parameters(), lr=config.lr, momentum=0.9, weight_decay=config.weight_decay, ) scheduler = optim.lr_scheduler.CosineAnnealingLR( optimizer, T_max=config.max_steps, ) print(optimizer) print(scheduler) train_iter = iter(make_data_loader("train", is_minknet, config)) best_metric = 0 net.train() for i in range(config.max_steps): optimizer.zero_grad() try: data_dict = train_iter.next() except StopIteration: train_iter = iter(make_data_loader("train", is_minknet, config)) data_dict = train_iter.next() input = create_input_batch( data_dict, is_minknet, device=device, quantization_size=config.voxel_size ) logit = net(input) loss = criterion(logit, data_dict["labels"].to(device)) loss.backward() optimizer.step() scheduler.step() torch.cuda.empty_cache() if i % config.stat_freq == 0: print(f"Iter: {i}, Loss: {loss.item():.3e}") if i % config.val_freq == 0 and i > 0: torch.save( { "state_dict": net.state_dict(), "optimizer": optimizer.state_dict(), "scheduler": scheduler.state_dict(), "curr_iter": i, }, config.weights, ) accuracy = test(net, device, config, phase="val") if best_metric < accuracy: best_metric = accuracy print(f"Validation accuracy: {accuracy}. Best accuracy: {best_metric}") net.train() if __name__ == "__main__": config = parser.parse_args() seed_all(config.seed) device = torch.device("cuda" if torch.cuda.is_available() else "cpu") print("===================ModelNet40 Dataset===================") print(f"Training with translation {config.translation}") print(f"Evaluating with translation {config.test_translation}") print("=============================================\n\n") net = STR2NETWORK[config.network]( in_channel=3, out_channel=40, embedding_channel=1024 ).to(device) print("===================Network===================") print(net) print("=============================================\n\n") train(net, device, config) accuracy = test(net, device, config, phase="test") print(f"Test accuracy: {accuracy}")
true
true
f7013b07af9877903da335d65ccea7f54b94087f
455
py
Python
computescore.py
serook/coursera_python4everybody
4886543abd77b3001dca6a16f7edb5a29474380f
[ "Apache-2.0" ]
null
null
null
computescore.py
serook/coursera_python4everybody
4886543abd77b3001dca6a16f7edb5a29474380f
[ "Apache-2.0" ]
null
null
null
computescore.py
serook/coursera_python4everybody
4886543abd77b3001dca6a16f7edb5a29474380f
[ "Apache-2.0" ]
null
null
null
def computescore(s): if s >= 0.9 and s <= 1.0: print 'grade is A' elif s >= 0.8 and s<=1.0 : print 'grade is B' elif s >= 0.7 and s<=1.0 : print 'grade is C' elif s >= 0.6 and s <= 1.0 : print 'grade is D' elif s >= 0.0 and s <= 0.6 : print 'grade is F' else : print 'ERROR' return s inp=input('enter numberscore\n') score=float(inp) result=computescore(score) print "we are back" , result
22.75
32
0.540659
def computescore(s): if s >= 0.9 and s <= 1.0: print 'grade is A' elif s >= 0.8 and s<=1.0 : print 'grade is B' elif s >= 0.7 and s<=1.0 : print 'grade is C' elif s >= 0.6 and s <= 1.0 : print 'grade is D' elif s >= 0.0 and s <= 0.6 : print 'grade is F' else : print 'ERROR' return s inp=input('enter numberscore\n') score=float(inp) result=computescore(score) print "we are back" , result
false
true
f7013b119a6edae5d60d9e763ae96f9479d984f6
781
py
Python
googlemaps/__init__.py
billyhoe22/google-maps-services-python
98c0375dcdc9d8606ca408b8e13081b03757b6bc
[ "Apache-2.0" ]
null
null
null
googlemaps/__init__.py
billyhoe22/google-maps-services-python
98c0375dcdc9d8606ca408b8e13081b03757b6bc
[ "Apache-2.0" ]
null
null
null
googlemaps/__init__.py
billyhoe22/google-maps-services-python
98c0375dcdc9d8606ca408b8e13081b03757b6bc
[ "Apache-2.0" ]
null
null
null
# # Copyright 2014 Google Inc. All rights reserved. # # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. # __version__ = "2.4.5-dev" from googlemaps.client import Client import googlemaps.exceptions # Allow sphinx to pick up these symbols for the documentation. __all__ = ["Client"]
31.24
79
0.759283
__version__ = "2.4.5-dev" from googlemaps.client import Client import googlemaps.exceptions __all__ = ["Client"]
true
true
f7013b41ca99a41d0c49aaab4c74a26c3e55845b
1,111
py
Python
crimsobot/models/wordle_results.py
h-anjru/crimsoBOT
8e678a6408ae99e740abfc8ab337543c666c1fb1
[ "MIT" ]
4
2019-12-26T11:09:30.000Z
2020-02-17T23:40:04.000Z
crimsobot/models/wordle_results.py
crimso4547/crimsoBOT
2867eeaff2df8b693daee74d644733c446f8156e
[ "MIT" ]
5
2019-11-19T01:24:59.000Z
2020-02-09T18:54:45.000Z
crimsobot/models/wordle_results.py
h-anjru/crimsoBOT
8e678a6408ae99e740abfc8ab337543c666c1fb1
[ "MIT" ]
2
2019-12-14T05:42:03.000Z
2020-01-11T05:49:30.000Z
from typing import Any from tortoise import fields from tortoise.models import Model from crimsobot.models import DiscordUser from crimsobot.models.user import User class WordleResults(Model): uuid = fields.UUIDField(pk=True) name = fields.TextField(default='wordle result') user = fields.ForeignKeyField('models.User', related_name='wordle_results', index=True) guesses = fields.IntField() # guesses to solve word (0 for quit) word = fields.TextField() # word guessed created_at = fields.DatetimeField(null=True, auto_now_add=True) @classmethod async def create_result(cls, discord_user: DiscordUser, guesses: int, word: str) -> None: user = await User.get_by_discord_user(discord_user) result = WordleResults(user=user, guesses=guesses, word=word) await result.save() @classmethod async def fetch_all_by_user(cls, discord_user: DiscordUser) -> Any: user = await User.get_by_discord_user(discord_user) stat = await WordleResults.filter(user=user) return stat class Meta: table = 'wordle_results'
30.027027
93
0.717372
from typing import Any from tortoise import fields from tortoise.models import Model from crimsobot.models import DiscordUser from crimsobot.models.user import User class WordleResults(Model): uuid = fields.UUIDField(pk=True) name = fields.TextField(default='wordle result') user = fields.ForeignKeyField('models.User', related_name='wordle_results', index=True) guesses = fields.IntField() word = fields.TextField() created_at = fields.DatetimeField(null=True, auto_now_add=True) @classmethod async def create_result(cls, discord_user: DiscordUser, guesses: int, word: str) -> None: user = await User.get_by_discord_user(discord_user) result = WordleResults(user=user, guesses=guesses, word=word) await result.save() @classmethod async def fetch_all_by_user(cls, discord_user: DiscordUser) -> Any: user = await User.get_by_discord_user(discord_user) stat = await WordleResults.filter(user=user) return stat class Meta: table = 'wordle_results'
true
true
f7013bc44a2ffbfdf74f6e376e79f89f9dcbc2c0
34,836
py
Python
tests/test_feedexport.py
KevvKo/scrapy
3d027fb578532d504b3dbfaa77a06c3560f85d3c
[ "BSD-3-Clause" ]
1
2021-07-25T19:45:24.000Z
2021-07-25T19:45:24.000Z
tests/test_feedexport.py
zhangjinrong/scrapy
3d027fb578532d504b3dbfaa77a06c3560f85d3c
[ "BSD-3-Clause" ]
null
null
null
tests/test_feedexport.py
zhangjinrong/scrapy
3d027fb578532d504b3dbfaa77a06c3560f85d3c
[ "BSD-3-Clause" ]
null
null
null
import csv import json import os import random import shutil import string import tempfile import warnings from io import BytesIO from logging import getLogger from pathlib import Path from string import ascii_letters, digits from unittest import mock from urllib.parse import urljoin, urlparse, quote from urllib.request import pathname2url import lxml.etree from testfixtures import LogCapture from twisted.internet import defer from twisted.trial import unittest from w3lib.url import file_uri_to_path, path_to_file_uri from zope.interface import implementer from zope.interface.verify import verifyObject import scrapy from scrapy.crawler import CrawlerRunner from scrapy.exporters import CsvItemExporter from scrapy.extensions.feedexport import (BlockingFeedStorage, FileFeedStorage, FTPFeedStorage, IFeedStorage, S3FeedStorage, StdoutFeedStorage) from scrapy.settings import Settings from scrapy.utils.python import to_unicode from scrapy.utils.test import assert_aws_environ, get_crawler, get_s3_content_and_delete from tests.mockserver import MockServer class FileFeedStorageTest(unittest.TestCase): def test_store_file_uri(self): path = os.path.abspath(self.mktemp()) uri = path_to_file_uri(path) return self._assert_stores(FileFeedStorage(uri), path) def test_store_file_uri_makedirs(self): path = os.path.abspath(self.mktemp()) path = os.path.join(path, 'more', 'paths', 'file.txt') uri = path_to_file_uri(path) return self._assert_stores(FileFeedStorage(uri), path) def test_store_direct_path(self): path = os.path.abspath(self.mktemp()) return self._assert_stores(FileFeedStorage(path), path) def test_store_direct_path_relative(self): path = self.mktemp() return self._assert_stores(FileFeedStorage(path), path) def test_interface(self): path = self.mktemp() st = FileFeedStorage(path) verifyObject(IFeedStorage, st) @defer.inlineCallbacks def _assert_stores(self, storage, path): spider = scrapy.Spider("default") file = storage.open(spider) file.write(b"content") yield storage.store(file) self.assertTrue(os.path.exists(path)) try: with open(path, 'rb') as fp: self.assertEqual(fp.read(), b"content") finally: os.unlink(path) class FTPFeedStorageTest(unittest.TestCase): def get_test_spider(self, settings=None): class TestSpider(scrapy.Spider): name = 'test_spider' crawler = get_crawler(settings_dict=settings) spider = TestSpider.from_crawler(crawler) return spider def test_store(self): uri = os.environ.get('FEEDTEST_FTP_URI') path = os.environ.get('FEEDTEST_FTP_PATH') if not (uri and path): raise unittest.SkipTest("No FTP server available for testing") st = FTPFeedStorage(uri) verifyObject(IFeedStorage, st) return self._assert_stores(st, path) def test_store_active_mode(self): uri = os.environ.get('FEEDTEST_FTP_URI') path = os.environ.get('FEEDTEST_FTP_PATH') if not (uri and path): raise unittest.SkipTest("No FTP server available for testing") use_active_mode = {'FEED_STORAGE_FTP_ACTIVE': True} crawler = get_crawler(settings_dict=use_active_mode) st = FTPFeedStorage.from_crawler(crawler, uri) verifyObject(IFeedStorage, st) return self._assert_stores(st, path) def test_uri_auth_quote(self): # RFC3986: 3.2.1. User Information pw_quoted = quote(string.punctuation, safe='') st = FTPFeedStorage('ftp://foo:%[email protected]/some_path' % pw_quoted) self.assertEqual(st.password, string.punctuation) @defer.inlineCallbacks def _assert_stores(self, storage, path): spider = self.get_test_spider() file = storage.open(spider) file.write(b"content") yield storage.store(file) self.assertTrue(os.path.exists(path)) try: with open(path, 'rb') as fp: self.assertEqual(fp.read(), b"content") # again, to check s3 objects are overwritten yield storage.store(BytesIO(b"new content")) with open(path, 'rb') as fp: self.assertEqual(fp.read(), b"new content") finally: os.unlink(path) class BlockingFeedStorageTest(unittest.TestCase): def get_test_spider(self, settings=None): class TestSpider(scrapy.Spider): name = 'test_spider' crawler = get_crawler(settings_dict=settings) spider = TestSpider.from_crawler(crawler) return spider def test_default_temp_dir(self): b = BlockingFeedStorage() tmp = b.open(self.get_test_spider()) tmp_path = os.path.dirname(tmp.name) self.assertEqual(tmp_path, tempfile.gettempdir()) def test_temp_file(self): b = BlockingFeedStorage() tests_path = os.path.dirname(os.path.abspath(__file__)) spider = self.get_test_spider({'FEED_TEMPDIR': tests_path}) tmp = b.open(spider) tmp_path = os.path.dirname(tmp.name) self.assertEqual(tmp_path, tests_path) def test_invalid_folder(self): b = BlockingFeedStorage() tests_path = os.path.dirname(os.path.abspath(__file__)) invalid_path = os.path.join(tests_path, 'invalid_path') spider = self.get_test_spider({'FEED_TEMPDIR': invalid_path}) self.assertRaises(OSError, b.open, spider=spider) class S3FeedStorageTest(unittest.TestCase): @mock.patch('scrapy.utils.project.get_project_settings', new=mock.MagicMock(return_value={'AWS_ACCESS_KEY_ID': 'conf_key', 'AWS_SECRET_ACCESS_KEY': 'conf_secret'}), create=True) def test_parse_credentials(self): try: import boto # noqa: F401 except ImportError: raise unittest.SkipTest("S3FeedStorage requires boto") aws_credentials = {'AWS_ACCESS_KEY_ID': 'settings_key', 'AWS_SECRET_ACCESS_KEY': 'settings_secret'} crawler = get_crawler(settings_dict=aws_credentials) # Instantiate with crawler storage = S3FeedStorage.from_crawler(crawler, 's3://mybucket/export.csv') self.assertEqual(storage.access_key, 'settings_key') self.assertEqual(storage.secret_key, 'settings_secret') # Instantiate directly storage = S3FeedStorage('s3://mybucket/export.csv', aws_credentials['AWS_ACCESS_KEY_ID'], aws_credentials['AWS_SECRET_ACCESS_KEY']) self.assertEqual(storage.access_key, 'settings_key') self.assertEqual(storage.secret_key, 'settings_secret') # URI priority > settings priority storage = S3FeedStorage('s3://uri_key:uri_secret@mybucket/export.csv', aws_credentials['AWS_ACCESS_KEY_ID'], aws_credentials['AWS_SECRET_ACCESS_KEY']) self.assertEqual(storage.access_key, 'uri_key') self.assertEqual(storage.secret_key, 'uri_secret') # Backward compatibility for initialising without settings with warnings.catch_warnings(record=True) as w: storage = S3FeedStorage('s3://mybucket/export.csv') self.assertEqual(storage.access_key, 'conf_key') self.assertEqual(storage.secret_key, 'conf_secret') self.assertTrue('without AWS keys' in str(w[-1].message)) @defer.inlineCallbacks def test_store(self): assert_aws_environ() uri = os.environ.get('S3_TEST_FILE_URI') if not uri: raise unittest.SkipTest("No S3 URI available for testing") access_key = os.environ.get('AWS_ACCESS_KEY_ID') secret_key = os.environ.get('AWS_SECRET_ACCESS_KEY') storage = S3FeedStorage(uri, access_key, secret_key) verifyObject(IFeedStorage, storage) file = storage.open(scrapy.Spider("default")) expected_content = b"content: \xe2\x98\x83" file.write(expected_content) yield storage.store(file) u = urlparse(uri) content = get_s3_content_and_delete(u.hostname, u.path[1:]) self.assertEqual(content, expected_content) def test_init_without_acl(self): storage = S3FeedStorage( 's3://mybucket/export.csv', 'access_key', 'secret_key' ) self.assertEqual(storage.access_key, 'access_key') self.assertEqual(storage.secret_key, 'secret_key') self.assertEqual(storage.acl, None) def test_init_with_acl(self): storage = S3FeedStorage( 's3://mybucket/export.csv', 'access_key', 'secret_key', 'custom-acl' ) self.assertEqual(storage.access_key, 'access_key') self.assertEqual(storage.secret_key, 'secret_key') self.assertEqual(storage.acl, 'custom-acl') def test_from_crawler_without_acl(self): settings = { 'AWS_ACCESS_KEY_ID': 'access_key', 'AWS_SECRET_ACCESS_KEY': 'secret_key', } crawler = get_crawler(settings_dict=settings) storage = S3FeedStorage.from_crawler( crawler, 's3://mybucket/export.csv' ) self.assertEqual(storage.access_key, 'access_key') self.assertEqual(storage.secret_key, 'secret_key') self.assertEqual(storage.acl, None) def test_from_crawler_with_acl(self): settings = { 'AWS_ACCESS_KEY_ID': 'access_key', 'AWS_SECRET_ACCESS_KEY': 'secret_key', 'FEED_STORAGE_S3_ACL': 'custom-acl', } crawler = get_crawler(settings_dict=settings) storage = S3FeedStorage.from_crawler( crawler, 's3://mybucket/export.csv' ) self.assertEqual(storage.access_key, 'access_key') self.assertEqual(storage.secret_key, 'secret_key') self.assertEqual(storage.acl, 'custom-acl') @defer.inlineCallbacks def test_store_botocore_without_acl(self): try: import botocore # noqa: F401 except ImportError: raise unittest.SkipTest('botocore is required') storage = S3FeedStorage( 's3://mybucket/export.csv', 'access_key', 'secret_key', ) self.assertEqual(storage.access_key, 'access_key') self.assertEqual(storage.secret_key, 'secret_key') self.assertEqual(storage.acl, None) storage.s3_client = mock.MagicMock() yield storage.store(BytesIO(b'test file')) self.assertNotIn('ACL', storage.s3_client.put_object.call_args[1]) @defer.inlineCallbacks def test_store_botocore_with_acl(self): try: import botocore # noqa: F401 except ImportError: raise unittest.SkipTest('botocore is required') storage = S3FeedStorage( 's3://mybucket/export.csv', 'access_key', 'secret_key', 'custom-acl' ) self.assertEqual(storage.access_key, 'access_key') self.assertEqual(storage.secret_key, 'secret_key') self.assertEqual(storage.acl, 'custom-acl') storage.s3_client = mock.MagicMock() yield storage.store(BytesIO(b'test file')) self.assertEqual( storage.s3_client.put_object.call_args[1].get('ACL'), 'custom-acl' ) @defer.inlineCallbacks def test_store_not_botocore_without_acl(self): storage = S3FeedStorage( 's3://mybucket/export.csv', 'access_key', 'secret_key', ) self.assertEqual(storage.access_key, 'access_key') self.assertEqual(storage.secret_key, 'secret_key') self.assertEqual(storage.acl, None) storage.is_botocore = False storage.connect_s3 = mock.MagicMock() self.assertFalse(storage.is_botocore) yield storage.store(BytesIO(b'test file')) conn = storage.connect_s3(*storage.connect_s3.call_args) bucket = conn.get_bucket(*conn.get_bucket.call_args) key = bucket.new_key(*bucket.new_key.call_args) self.assertNotIn( dict(policy='custom-acl'), key.set_contents_from_file.call_args ) @defer.inlineCallbacks def test_store_not_botocore_with_acl(self): storage = S3FeedStorage( 's3://mybucket/export.csv', 'access_key', 'secret_key', 'custom-acl' ) self.assertEqual(storage.access_key, 'access_key') self.assertEqual(storage.secret_key, 'secret_key') self.assertEqual(storage.acl, 'custom-acl') storage.is_botocore = False storage.connect_s3 = mock.MagicMock() self.assertFalse(storage.is_botocore) yield storage.store(BytesIO(b'test file')) conn = storage.connect_s3(*storage.connect_s3.call_args) bucket = conn.get_bucket(*conn.get_bucket.call_args) key = bucket.new_key(*bucket.new_key.call_args) self.assertIn( dict(policy='custom-acl'), key.set_contents_from_file.call_args ) class StdoutFeedStorageTest(unittest.TestCase): @defer.inlineCallbacks def test_store(self): out = BytesIO() storage = StdoutFeedStorage('stdout:', _stdout=out) file = storage.open(scrapy.Spider("default")) file.write(b"content") yield storage.store(file) self.assertEqual(out.getvalue(), b"content") class FromCrawlerMixin: init_with_crawler = False @classmethod def from_crawler(cls, crawler, *args, **kwargs): cls.init_with_crawler = True return cls(*args, **kwargs) class FromCrawlerCsvItemExporter(CsvItemExporter, FromCrawlerMixin): pass class FromCrawlerFileFeedStorage(FileFeedStorage, FromCrawlerMixin): pass @implementer(IFeedStorage) class LogOnStoreFileStorage: """ This storage logs inside `store` method. It can be used to make sure `store` method is invoked. """ def __init__(self, uri): self.path = file_uri_to_path(uri) self.logger = getLogger() def open(self, spider): return tempfile.NamedTemporaryFile(prefix='feed-') def store(self, file): self.logger.info('Storage.store is called') file.close() class FeedExportTest(unittest.TestCase): class MyItem(scrapy.Item): foo = scrapy.Field() egg = scrapy.Field() baz = scrapy.Field() def setUp(self): self.temp_dir = tempfile.mkdtemp() def tearDown(self): shutil.rmtree(self.temp_dir, ignore_errors=True) def _random_temp_filename(self): chars = [random.choice(ascii_letters + digits) for _ in range(15)] filename = ''.join(chars) return os.path.join(self.temp_dir, filename) @defer.inlineCallbacks def run_and_export(self, spider_cls, settings): """ Run spider with specified settings; return exported data. """ FEEDS = settings.get('FEEDS') or {} settings['FEEDS'] = { urljoin('file:', pathname2url(str(file_path))): feed for file_path, feed in FEEDS.items() } content = {} try: with MockServer() as s: runner = CrawlerRunner(Settings(settings)) spider_cls.start_urls = [s.url('/')] yield runner.crawl(spider_cls) for file_path, feed in FEEDS.items(): if not os.path.exists(str(file_path)): continue with open(str(file_path), 'rb') as f: content[feed['format']] = f.read() finally: for file_path in FEEDS.keys(): if not os.path.exists(str(file_path)): continue os.remove(str(file_path)) return content @defer.inlineCallbacks def exported_data(self, items, settings): """ Return exported data which a spider yielding ``items`` would return. """ class TestSpider(scrapy.Spider): name = 'testspider' def parse(self, response): for item in items: yield item data = yield self.run_and_export(TestSpider, settings) return data @defer.inlineCallbacks def exported_no_data(self, settings): """ Return exported data which a spider yielding no ``items`` would return. """ class TestSpider(scrapy.Spider): name = 'testspider' def parse(self, response): pass data = yield self.run_and_export(TestSpider, settings) return data @defer.inlineCallbacks def assertExportedCsv(self, items, header, rows, settings=None, ordered=True): settings = settings or {} settings.update({ 'FEEDS': { self._random_temp_filename(): {'format': 'csv'}, }, }) data = yield self.exported_data(items, settings) reader = csv.DictReader(to_unicode(data['csv']).splitlines()) got_rows = list(reader) if ordered: self.assertEqual(reader.fieldnames, header) else: self.assertEqual(set(reader.fieldnames), set(header)) self.assertEqual(rows, got_rows) @defer.inlineCallbacks def assertExportedJsonLines(self, items, rows, settings=None): settings = settings or {} settings.update({ 'FEEDS': { self._random_temp_filename(): {'format': 'jl'}, }, }) data = yield self.exported_data(items, settings) parsed = [json.loads(to_unicode(line)) for line in data['jl'].splitlines()] rows = [{k: v for k, v in row.items() if v} for row in rows] self.assertEqual(rows, parsed) @defer.inlineCallbacks def assertExportedXml(self, items, rows, settings=None): settings = settings or {} settings.update({ 'FEEDS': { self._random_temp_filename(): {'format': 'xml'}, }, }) data = yield self.exported_data(items, settings) rows = [{k: v for k, v in row.items() if v} for row in rows] root = lxml.etree.fromstring(data['xml']) got_rows = [{e.tag: e.text for e in it} for it in root.findall('item')] self.assertEqual(rows, got_rows) @defer.inlineCallbacks def assertExportedMultiple(self, items, rows, settings=None): settings = settings or {} settings.update({ 'FEEDS': { self._random_temp_filename(): {'format': 'xml'}, self._random_temp_filename(): {'format': 'json'}, }, }) data = yield self.exported_data(items, settings) rows = [{k: v for k, v in row.items() if v} for row in rows] # XML root = lxml.etree.fromstring(data['xml']) xml_rows = [{e.tag: e.text for e in it} for it in root.findall('item')] self.assertEqual(rows, xml_rows) # JSON json_rows = json.loads(to_unicode(data['json'])) self.assertEqual(rows, json_rows) def _load_until_eof(self, data, load_func): result = [] with tempfile.TemporaryFile() as temp: temp.write(data) temp.seek(0) while True: try: result.append(load_func(temp)) except EOFError: break return result @defer.inlineCallbacks def assertExportedPickle(self, items, rows, settings=None): settings = settings or {} settings.update({ 'FEEDS': { self._random_temp_filename(): {'format': 'pickle'}, }, }) data = yield self.exported_data(items, settings) expected = [{k: v for k, v in row.items() if v} for row in rows] import pickle result = self._load_until_eof(data['pickle'], load_func=pickle.load) self.assertEqual(expected, result) @defer.inlineCallbacks def assertExportedMarshal(self, items, rows, settings=None): settings = settings or {} settings.update({ 'FEEDS': { self._random_temp_filename(): {'format': 'marshal'}, }, }) data = yield self.exported_data(items, settings) expected = [{k: v for k, v in row.items() if v} for row in rows] import marshal result = self._load_until_eof(data['marshal'], load_func=marshal.load) self.assertEqual(expected, result) @defer.inlineCallbacks def assertExported(self, items, header, rows, settings=None, ordered=True): yield self.assertExportedCsv(items, header, rows, settings, ordered) yield self.assertExportedJsonLines(items, rows, settings) yield self.assertExportedXml(items, rows, settings) yield self.assertExportedPickle(items, rows, settings) yield self.assertExportedMarshal(items, rows, settings) yield self.assertExportedMultiple(items, rows, settings) @defer.inlineCallbacks def test_export_items(self): # feed exporters use field names from Item items = [ self.MyItem({'foo': 'bar1', 'egg': 'spam1'}), self.MyItem({'foo': 'bar2', 'egg': 'spam2', 'baz': 'quux2'}), ] rows = [ {'egg': 'spam1', 'foo': 'bar1', 'baz': ''}, {'egg': 'spam2', 'foo': 'bar2', 'baz': 'quux2'} ] header = self.MyItem.fields.keys() yield self.assertExported(items, header, rows, ordered=False) @defer.inlineCallbacks def test_export_no_items_not_store_empty(self): for fmt in ('json', 'jsonlines', 'xml', 'csv'): settings = { 'FEEDS': { self._random_temp_filename(): {'format': fmt}, }, } data = yield self.exported_no_data(settings) self.assertEqual(data[fmt], b'') @defer.inlineCallbacks def test_export_no_items_store_empty(self): formats = ( ('json', b'[]'), ('jsonlines', b''), ('xml', b'<?xml version="1.0" encoding="utf-8"?>\n<items></items>'), ('csv', b''), ) for fmt, expctd in formats: settings = { 'FEEDS': { self._random_temp_filename(): {'format': fmt}, }, 'FEED_STORE_EMPTY': True, 'FEED_EXPORT_INDENT': None, } data = yield self.exported_no_data(settings) self.assertEqual(data[fmt], expctd) @defer.inlineCallbacks def test_export_no_items_multiple_feeds(self): """ Make sure that `storage.store` is called for every feed. """ settings = { 'FEEDS': { self._random_temp_filename(): {'format': 'json'}, self._random_temp_filename(): {'format': 'xml'}, self._random_temp_filename(): {'format': 'csv'}, }, 'FEED_STORAGES': {'file': 'tests.test_feedexport.LogOnStoreFileStorage'}, 'FEED_STORE_EMPTY': False } with LogCapture() as log: yield self.exported_no_data(settings) print(log) self.assertEqual(str(log).count('Storage.store is called'), 3) @defer.inlineCallbacks def test_export_multiple_item_classes(self): class MyItem2(scrapy.Item): foo = scrapy.Field() hello = scrapy.Field() items = [ self.MyItem({'foo': 'bar1', 'egg': 'spam1'}), MyItem2({'hello': 'world2', 'foo': 'bar2'}), self.MyItem({'foo': 'bar3', 'egg': 'spam3', 'baz': 'quux3'}), {'hello': 'world4', 'egg': 'spam4'}, ] # by default, Scrapy uses fields of the first Item for CSV and # all fields for JSON Lines header = self.MyItem.fields.keys() rows_csv = [ {'egg': 'spam1', 'foo': 'bar1', 'baz': ''}, {'egg': '', 'foo': 'bar2', 'baz': ''}, {'egg': 'spam3', 'foo': 'bar3', 'baz': 'quux3'}, {'egg': 'spam4', 'foo': '', 'baz': ''}, ] rows_jl = [dict(row) for row in items] yield self.assertExportedCsv(items, header, rows_csv, ordered=False) yield self.assertExportedJsonLines(items, rows_jl) # edge case: FEED_EXPORT_FIELDS==[] means the same as default None settings = {'FEED_EXPORT_FIELDS': []} yield self.assertExportedCsv(items, header, rows_csv, ordered=False) yield self.assertExportedJsonLines(items, rows_jl, settings) # it is possible to override fields using FEED_EXPORT_FIELDS header = ["foo", "baz", "hello"] settings = {'FEED_EXPORT_FIELDS': header} rows = [ {'foo': 'bar1', 'baz': '', 'hello': ''}, {'foo': 'bar2', 'baz': '', 'hello': 'world2'}, {'foo': 'bar3', 'baz': 'quux3', 'hello': ''}, {'foo': '', 'baz': '', 'hello': 'world4'}, ] yield self.assertExported(items, header, rows, settings=settings, ordered=True) @defer.inlineCallbacks def test_export_dicts(self): # When dicts are used, only keys from the first row are used as # a header for CSV, and all fields are used for JSON Lines. items = [ {'foo': 'bar', 'egg': 'spam'}, {'foo': 'bar', 'egg': 'spam', 'baz': 'quux'}, ] rows_csv = [ {'egg': 'spam', 'foo': 'bar'}, {'egg': 'spam', 'foo': 'bar'} ] rows_jl = items yield self.assertExportedCsv(items, ['egg', 'foo'], rows_csv, ordered=False) yield self.assertExportedJsonLines(items, rows_jl) @defer.inlineCallbacks def test_export_feed_export_fields(self): # FEED_EXPORT_FIELDS option allows to order export fields # and to select a subset of fields to export, both for Items and dicts. for item_cls in [self.MyItem, dict]: items = [ item_cls({'foo': 'bar1', 'egg': 'spam1'}), item_cls({'foo': 'bar2', 'egg': 'spam2', 'baz': 'quux2'}), ] # export all columns settings = {'FEED_EXPORT_FIELDS': 'foo,baz,egg'} rows = [ {'egg': 'spam1', 'foo': 'bar1', 'baz': ''}, {'egg': 'spam2', 'foo': 'bar2', 'baz': 'quux2'} ] yield self.assertExported(items, ['foo', 'baz', 'egg'], rows, settings=settings, ordered=True) # export a subset of columns settings = {'FEED_EXPORT_FIELDS': 'egg,baz'} rows = [ {'egg': 'spam1', 'baz': ''}, {'egg': 'spam2', 'baz': 'quux2'} ] yield self.assertExported(items, ['egg', 'baz'], rows, settings=settings, ordered=True) @defer.inlineCallbacks def test_export_encoding(self): items = [dict({'foo': u'Test\xd6'})] formats = { 'json': '[{"foo": "Test\\u00d6"}]'.encode('utf-8'), 'jsonlines': '{"foo": "Test\\u00d6"}\n'.encode('utf-8'), 'xml': ( '<?xml version="1.0" encoding="utf-8"?>\n' '<items><item><foo>Test\xd6</foo></item></items>' ).encode('utf-8'), 'csv': 'foo\r\nTest\xd6\r\n'.encode('utf-8'), } for fmt, expected in formats.items(): settings = { 'FEEDS': { self._random_temp_filename(): {'format': fmt}, }, 'FEED_EXPORT_INDENT': None, } data = yield self.exported_data(items, settings) self.assertEqual(expected, data[fmt]) formats = { 'json': '[{"foo": "Test\xd6"}]'.encode('latin-1'), 'jsonlines': '{"foo": "Test\xd6"}\n'.encode('latin-1'), 'xml': ( '<?xml version="1.0" encoding="latin-1"?>\n' '<items><item><foo>Test\xd6</foo></item></items>' ).encode('latin-1'), 'csv': 'foo\r\nTest\xd6\r\n'.encode('latin-1'), } for fmt, expected in formats.items(): settings = { 'FEEDS': { self._random_temp_filename(): {'format': fmt}, }, 'FEED_EXPORT_INDENT': None, 'FEED_EXPORT_ENCODING': 'latin-1', } data = yield self.exported_data(items, settings) self.assertEqual(expected, data[fmt]) @defer.inlineCallbacks def test_export_multiple_configs(self): items = [dict({'foo': u'FOO', 'bar': u'BAR'})] formats = { 'json': '[\n{"bar": "BAR"}\n]'.encode('utf-8'), 'xml': ( '<?xml version="1.0" encoding="latin-1"?>\n' '<items>\n <item>\n <foo>FOO</foo>\n </item>\n</items>' ).encode('latin-1'), 'csv': 'bar,foo\r\nBAR,FOO\r\n'.encode('utf-8'), } settings = { 'FEEDS': { self._random_temp_filename(): { 'format': 'json', 'indent': 0, 'fields': ['bar'], 'encoding': 'utf-8', }, self._random_temp_filename(): { 'format': 'xml', 'indent': 2, 'fields': ['foo'], 'encoding': 'latin-1', }, self._random_temp_filename(): { 'format': 'csv', 'indent': None, 'fields': ['bar', 'foo'], 'encoding': 'utf-8', }, }, } data = yield self.exported_data(items, settings) for fmt, expected in formats.items(): self.assertEqual(expected, data[fmt]) @defer.inlineCallbacks def test_export_indentation(self): items = [ {'foo': ['bar']}, {'key': 'value'}, ] test_cases = [ # JSON { 'format': 'json', 'indent': None, 'expected': b'[{"foo": ["bar"]},{"key": "value"}]', }, { 'format': 'json', 'indent': -1, 'expected': b"""[ {"foo": ["bar"]}, {"key": "value"} ]""", }, { 'format': 'json', 'indent': 0, 'expected': b"""[ {"foo": ["bar"]}, {"key": "value"} ]""", }, { 'format': 'json', 'indent': 2, 'expected': b"""[ { "foo": [ "bar" ] }, { "key": "value" } ]""", }, { 'format': 'json', 'indent': 4, 'expected': b"""[ { "foo": [ "bar" ] }, { "key": "value" } ]""", }, { 'format': 'json', 'indent': 5, 'expected': b"""[ { "foo": [ "bar" ] }, { "key": "value" } ]""", }, # XML { 'format': 'xml', 'indent': None, 'expected': b"""<?xml version="1.0" encoding="utf-8"?> <items><item><foo><value>bar</value></foo></item><item><key>value</key></item></items>""", }, { 'format': 'xml', 'indent': -1, 'expected': b"""<?xml version="1.0" encoding="utf-8"?> <items> <item><foo><value>bar</value></foo></item> <item><key>value</key></item> </items>""", }, { 'format': 'xml', 'indent': 0, 'expected': b"""<?xml version="1.0" encoding="utf-8"?> <items> <item><foo><value>bar</value></foo></item> <item><key>value</key></item> </items>""", }, { 'format': 'xml', 'indent': 2, 'expected': b"""<?xml version="1.0" encoding="utf-8"?> <items> <item> <foo> <value>bar</value> </foo> </item> <item> <key>value</key> </item> </items>""", }, { 'format': 'xml', 'indent': 4, 'expected': b"""<?xml version="1.0" encoding="utf-8"?> <items> <item> <foo> <value>bar</value> </foo> </item> <item> <key>value</key> </item> </items>""", }, { 'format': 'xml', 'indent': 5, 'expected': b"""<?xml version="1.0" encoding="utf-8"?> <items> <item> <foo> <value>bar</value> </foo> </item> <item> <key>value</key> </item> </items>""", }, ] for row in test_cases: settings = { 'FEEDS': { self._random_temp_filename(): { 'format': row['format'], 'indent': row['indent'], }, }, } data = yield self.exported_data(items, settings) self.assertEqual(row['expected'], data[row['format']]) @defer.inlineCallbacks def test_init_exporters_storages_with_crawler(self): settings = { 'FEED_EXPORTERS': {'csv': 'tests.test_feedexport.FromCrawlerCsvItemExporter'}, 'FEED_STORAGES': {'file': 'tests.test_feedexport.FromCrawlerFileFeedStorage'}, 'FEEDS': { self._random_temp_filename(): {'format': 'csv'}, }, } yield self.exported_data(items=[], settings=settings) self.assertTrue(FromCrawlerCsvItemExporter.init_with_crawler) self.assertTrue(FromCrawlerFileFeedStorage.init_with_crawler) @defer.inlineCallbacks def test_pathlib_uri(self): feed_path = Path(self._random_temp_filename()) settings = { 'FEED_STORE_EMPTY': True, 'FEEDS': { feed_path: {'format': 'csv'} }, } data = yield self.exported_no_data(settings) self.assertEqual(data['csv'], b'')
33.88716
95
0.556292
import csv import json import os import random import shutil import string import tempfile import warnings from io import BytesIO from logging import getLogger from pathlib import Path from string import ascii_letters, digits from unittest import mock from urllib.parse import urljoin, urlparse, quote from urllib.request import pathname2url import lxml.etree from testfixtures import LogCapture from twisted.internet import defer from twisted.trial import unittest from w3lib.url import file_uri_to_path, path_to_file_uri from zope.interface import implementer from zope.interface.verify import verifyObject import scrapy from scrapy.crawler import CrawlerRunner from scrapy.exporters import CsvItemExporter from scrapy.extensions.feedexport import (BlockingFeedStorage, FileFeedStorage, FTPFeedStorage, IFeedStorage, S3FeedStorage, StdoutFeedStorage) from scrapy.settings import Settings from scrapy.utils.python import to_unicode from scrapy.utils.test import assert_aws_environ, get_crawler, get_s3_content_and_delete from tests.mockserver import MockServer class FileFeedStorageTest(unittest.TestCase): def test_store_file_uri(self): path = os.path.abspath(self.mktemp()) uri = path_to_file_uri(path) return self._assert_stores(FileFeedStorage(uri), path) def test_store_file_uri_makedirs(self): path = os.path.abspath(self.mktemp()) path = os.path.join(path, 'more', 'paths', 'file.txt') uri = path_to_file_uri(path) return self._assert_stores(FileFeedStorage(uri), path) def test_store_direct_path(self): path = os.path.abspath(self.mktemp()) return self._assert_stores(FileFeedStorage(path), path) def test_store_direct_path_relative(self): path = self.mktemp() return self._assert_stores(FileFeedStorage(path), path) def test_interface(self): path = self.mktemp() st = FileFeedStorage(path) verifyObject(IFeedStorage, st) @defer.inlineCallbacks def _assert_stores(self, storage, path): spider = scrapy.Spider("default") file = storage.open(spider) file.write(b"content") yield storage.store(file) self.assertTrue(os.path.exists(path)) try: with open(path, 'rb') as fp: self.assertEqual(fp.read(), b"content") finally: os.unlink(path) class FTPFeedStorageTest(unittest.TestCase): def get_test_spider(self, settings=None): class TestSpider(scrapy.Spider): name = 'test_spider' crawler = get_crawler(settings_dict=settings) spider = TestSpider.from_crawler(crawler) return spider def test_store(self): uri = os.environ.get('FEEDTEST_FTP_URI') path = os.environ.get('FEEDTEST_FTP_PATH') if not (uri and path): raise unittest.SkipTest("No FTP server available for testing") st = FTPFeedStorage(uri) verifyObject(IFeedStorage, st) return self._assert_stores(st, path) def test_store_active_mode(self): uri = os.environ.get('FEEDTEST_FTP_URI') path = os.environ.get('FEEDTEST_FTP_PATH') if not (uri and path): raise unittest.SkipTest("No FTP server available for testing") use_active_mode = {'FEED_STORAGE_FTP_ACTIVE': True} crawler = get_crawler(settings_dict=use_active_mode) st = FTPFeedStorage.from_crawler(crawler, uri) verifyObject(IFeedStorage, st) return self._assert_stores(st, path) def test_uri_auth_quote(self): pw_quoted = quote(string.punctuation, safe='') st = FTPFeedStorage('ftp://foo:%[email protected]/some_path' % pw_quoted) self.assertEqual(st.password, string.punctuation) @defer.inlineCallbacks def _assert_stores(self, storage, path): spider = self.get_test_spider() file = storage.open(spider) file.write(b"content") yield storage.store(file) self.assertTrue(os.path.exists(path)) try: with open(path, 'rb') as fp: self.assertEqual(fp.read(), b"content") yield storage.store(BytesIO(b"new content")) with open(path, 'rb') as fp: self.assertEqual(fp.read(), b"new content") finally: os.unlink(path) class BlockingFeedStorageTest(unittest.TestCase): def get_test_spider(self, settings=None): class TestSpider(scrapy.Spider): name = 'test_spider' crawler = get_crawler(settings_dict=settings) spider = TestSpider.from_crawler(crawler) return spider def test_default_temp_dir(self): b = BlockingFeedStorage() tmp = b.open(self.get_test_spider()) tmp_path = os.path.dirname(tmp.name) self.assertEqual(tmp_path, tempfile.gettempdir()) def test_temp_file(self): b = BlockingFeedStorage() tests_path = os.path.dirname(os.path.abspath(__file__)) spider = self.get_test_spider({'FEED_TEMPDIR': tests_path}) tmp = b.open(spider) tmp_path = os.path.dirname(tmp.name) self.assertEqual(tmp_path, tests_path) def test_invalid_folder(self): b = BlockingFeedStorage() tests_path = os.path.dirname(os.path.abspath(__file__)) invalid_path = os.path.join(tests_path, 'invalid_path') spider = self.get_test_spider({'FEED_TEMPDIR': invalid_path}) self.assertRaises(OSError, b.open, spider=spider) class S3FeedStorageTest(unittest.TestCase): @mock.patch('scrapy.utils.project.get_project_settings', new=mock.MagicMock(return_value={'AWS_ACCESS_KEY_ID': 'conf_key', 'AWS_SECRET_ACCESS_KEY': 'conf_secret'}), create=True) def test_parse_credentials(self): try: import boto except ImportError: raise unittest.SkipTest("S3FeedStorage requires boto") aws_credentials = {'AWS_ACCESS_KEY_ID': 'settings_key', 'AWS_SECRET_ACCESS_KEY': 'settings_secret'} crawler = get_crawler(settings_dict=aws_credentials) storage = S3FeedStorage.from_crawler(crawler, 's3://mybucket/export.csv') self.assertEqual(storage.access_key, 'settings_key') self.assertEqual(storage.secret_key, 'settings_secret') storage = S3FeedStorage('s3://mybucket/export.csv', aws_credentials['AWS_ACCESS_KEY_ID'], aws_credentials['AWS_SECRET_ACCESS_KEY']) self.assertEqual(storage.access_key, 'settings_key') self.assertEqual(storage.secret_key, 'settings_secret') storage = S3FeedStorage('s3://uri_key:uri_secret@mybucket/export.csv', aws_credentials['AWS_ACCESS_KEY_ID'], aws_credentials['AWS_SECRET_ACCESS_KEY']) self.assertEqual(storage.access_key, 'uri_key') self.assertEqual(storage.secret_key, 'uri_secret') with warnings.catch_warnings(record=True) as w: storage = S3FeedStorage('s3://mybucket/export.csv') self.assertEqual(storage.access_key, 'conf_key') self.assertEqual(storage.secret_key, 'conf_secret') self.assertTrue('without AWS keys' in str(w[-1].message)) @defer.inlineCallbacks def test_store(self): assert_aws_environ() uri = os.environ.get('S3_TEST_FILE_URI') if not uri: raise unittest.SkipTest("No S3 URI available for testing") access_key = os.environ.get('AWS_ACCESS_KEY_ID') secret_key = os.environ.get('AWS_SECRET_ACCESS_KEY') storage = S3FeedStorage(uri, access_key, secret_key) verifyObject(IFeedStorage, storage) file = storage.open(scrapy.Spider("default")) expected_content = b"content: \xe2\x98\x83" file.write(expected_content) yield storage.store(file) u = urlparse(uri) content = get_s3_content_and_delete(u.hostname, u.path[1:]) self.assertEqual(content, expected_content) def test_init_without_acl(self): storage = S3FeedStorage( 's3://mybucket/export.csv', 'access_key', 'secret_key' ) self.assertEqual(storage.access_key, 'access_key') self.assertEqual(storage.secret_key, 'secret_key') self.assertEqual(storage.acl, None) def test_init_with_acl(self): storage = S3FeedStorage( 's3://mybucket/export.csv', 'access_key', 'secret_key', 'custom-acl' ) self.assertEqual(storage.access_key, 'access_key') self.assertEqual(storage.secret_key, 'secret_key') self.assertEqual(storage.acl, 'custom-acl') def test_from_crawler_without_acl(self): settings = { 'AWS_ACCESS_KEY_ID': 'access_key', 'AWS_SECRET_ACCESS_KEY': 'secret_key', } crawler = get_crawler(settings_dict=settings) storage = S3FeedStorage.from_crawler( crawler, 's3://mybucket/export.csv' ) self.assertEqual(storage.access_key, 'access_key') self.assertEqual(storage.secret_key, 'secret_key') self.assertEqual(storage.acl, None) def test_from_crawler_with_acl(self): settings = { 'AWS_ACCESS_KEY_ID': 'access_key', 'AWS_SECRET_ACCESS_KEY': 'secret_key', 'FEED_STORAGE_S3_ACL': 'custom-acl', } crawler = get_crawler(settings_dict=settings) storage = S3FeedStorage.from_crawler( crawler, 's3://mybucket/export.csv' ) self.assertEqual(storage.access_key, 'access_key') self.assertEqual(storage.secret_key, 'secret_key') self.assertEqual(storage.acl, 'custom-acl') @defer.inlineCallbacks def test_store_botocore_without_acl(self): try: import botocore except ImportError: raise unittest.SkipTest('botocore is required') storage = S3FeedStorage( 's3://mybucket/export.csv', 'access_key', 'secret_key', ) self.assertEqual(storage.access_key, 'access_key') self.assertEqual(storage.secret_key, 'secret_key') self.assertEqual(storage.acl, None) storage.s3_client = mock.MagicMock() yield storage.store(BytesIO(b'test file')) self.assertNotIn('ACL', storage.s3_client.put_object.call_args[1]) @defer.inlineCallbacks def test_store_botocore_with_acl(self): try: import botocore except ImportError: raise unittest.SkipTest('botocore is required') storage = S3FeedStorage( 's3://mybucket/export.csv', 'access_key', 'secret_key', 'custom-acl' ) self.assertEqual(storage.access_key, 'access_key') self.assertEqual(storage.secret_key, 'secret_key') self.assertEqual(storage.acl, 'custom-acl') storage.s3_client = mock.MagicMock() yield storage.store(BytesIO(b'test file')) self.assertEqual( storage.s3_client.put_object.call_args[1].get('ACL'), 'custom-acl' ) @defer.inlineCallbacks def test_store_not_botocore_without_acl(self): storage = S3FeedStorage( 's3://mybucket/export.csv', 'access_key', 'secret_key', ) self.assertEqual(storage.access_key, 'access_key') self.assertEqual(storage.secret_key, 'secret_key') self.assertEqual(storage.acl, None) storage.is_botocore = False storage.connect_s3 = mock.MagicMock() self.assertFalse(storage.is_botocore) yield storage.store(BytesIO(b'test file')) conn = storage.connect_s3(*storage.connect_s3.call_args) bucket = conn.get_bucket(*conn.get_bucket.call_args) key = bucket.new_key(*bucket.new_key.call_args) self.assertNotIn( dict(policy='custom-acl'), key.set_contents_from_file.call_args ) @defer.inlineCallbacks def test_store_not_botocore_with_acl(self): storage = S3FeedStorage( 's3://mybucket/export.csv', 'access_key', 'secret_key', 'custom-acl' ) self.assertEqual(storage.access_key, 'access_key') self.assertEqual(storage.secret_key, 'secret_key') self.assertEqual(storage.acl, 'custom-acl') storage.is_botocore = False storage.connect_s3 = mock.MagicMock() self.assertFalse(storage.is_botocore) yield storage.store(BytesIO(b'test file')) conn = storage.connect_s3(*storage.connect_s3.call_args) bucket = conn.get_bucket(*conn.get_bucket.call_args) key = bucket.new_key(*bucket.new_key.call_args) self.assertIn( dict(policy='custom-acl'), key.set_contents_from_file.call_args ) class StdoutFeedStorageTest(unittest.TestCase): @defer.inlineCallbacks def test_store(self): out = BytesIO() storage = StdoutFeedStorage('stdout:', _stdout=out) file = storage.open(scrapy.Spider("default")) file.write(b"content") yield storage.store(file) self.assertEqual(out.getvalue(), b"content") class FromCrawlerMixin: init_with_crawler = False @classmethod def from_crawler(cls, crawler, *args, **kwargs): cls.init_with_crawler = True return cls(*args, **kwargs) class FromCrawlerCsvItemExporter(CsvItemExporter, FromCrawlerMixin): pass class FromCrawlerFileFeedStorage(FileFeedStorage, FromCrawlerMixin): pass @implementer(IFeedStorage) class LogOnStoreFileStorage: def __init__(self, uri): self.path = file_uri_to_path(uri) self.logger = getLogger() def open(self, spider): return tempfile.NamedTemporaryFile(prefix='feed-') def store(self, file): self.logger.info('Storage.store is called') file.close() class FeedExportTest(unittest.TestCase): class MyItem(scrapy.Item): foo = scrapy.Field() egg = scrapy.Field() baz = scrapy.Field() def setUp(self): self.temp_dir = tempfile.mkdtemp() def tearDown(self): shutil.rmtree(self.temp_dir, ignore_errors=True) def _random_temp_filename(self): chars = [random.choice(ascii_letters + digits) for _ in range(15)] filename = ''.join(chars) return os.path.join(self.temp_dir, filename) @defer.inlineCallbacks def run_and_export(self, spider_cls, settings): FEEDS = settings.get('FEEDS') or {} settings['FEEDS'] = { urljoin('file:', pathname2url(str(file_path))): feed for file_path, feed in FEEDS.items() } content = {} try: with MockServer() as s: runner = CrawlerRunner(Settings(settings)) spider_cls.start_urls = [s.url('/')] yield runner.crawl(spider_cls) for file_path, feed in FEEDS.items(): if not os.path.exists(str(file_path)): continue with open(str(file_path), 'rb') as f: content[feed['format']] = f.read() finally: for file_path in FEEDS.keys(): if not os.path.exists(str(file_path)): continue os.remove(str(file_path)) return content @defer.inlineCallbacks def exported_data(self, items, settings): class TestSpider(scrapy.Spider): name = 'testspider' def parse(self, response): for item in items: yield item data = yield self.run_and_export(TestSpider, settings) return data @defer.inlineCallbacks def exported_no_data(self, settings): class TestSpider(scrapy.Spider): name = 'testspider' def parse(self, response): pass data = yield self.run_and_export(TestSpider, settings) return data @defer.inlineCallbacks def assertExportedCsv(self, items, header, rows, settings=None, ordered=True): settings = settings or {} settings.update({ 'FEEDS': { self._random_temp_filename(): {'format': 'csv'}, }, }) data = yield self.exported_data(items, settings) reader = csv.DictReader(to_unicode(data['csv']).splitlines()) got_rows = list(reader) if ordered: self.assertEqual(reader.fieldnames, header) else: self.assertEqual(set(reader.fieldnames), set(header)) self.assertEqual(rows, got_rows) @defer.inlineCallbacks def assertExportedJsonLines(self, items, rows, settings=None): settings = settings or {} settings.update({ 'FEEDS': { self._random_temp_filename(): {'format': 'jl'}, }, }) data = yield self.exported_data(items, settings) parsed = [json.loads(to_unicode(line)) for line in data['jl'].splitlines()] rows = [{k: v for k, v in row.items() if v} for row in rows] self.assertEqual(rows, parsed) @defer.inlineCallbacks def assertExportedXml(self, items, rows, settings=None): settings = settings or {} settings.update({ 'FEEDS': { self._random_temp_filename(): {'format': 'xml'}, }, }) data = yield self.exported_data(items, settings) rows = [{k: v for k, v in row.items() if v} for row in rows] root = lxml.etree.fromstring(data['xml']) got_rows = [{e.tag: e.text for e in it} for it in root.findall('item')] self.assertEqual(rows, got_rows) @defer.inlineCallbacks def assertExportedMultiple(self, items, rows, settings=None): settings = settings or {} settings.update({ 'FEEDS': { self._random_temp_filename(): {'format': 'xml'}, self._random_temp_filename(): {'format': 'json'}, }, }) data = yield self.exported_data(items, settings) rows = [{k: v for k, v in row.items() if v} for row in rows] root = lxml.etree.fromstring(data['xml']) xml_rows = [{e.tag: e.text for e in it} for it in root.findall('item')] self.assertEqual(rows, xml_rows) json_rows = json.loads(to_unicode(data['json'])) self.assertEqual(rows, json_rows) def _load_until_eof(self, data, load_func): result = [] with tempfile.TemporaryFile() as temp: temp.write(data) temp.seek(0) while True: try: result.append(load_func(temp)) except EOFError: break return result @defer.inlineCallbacks def assertExportedPickle(self, items, rows, settings=None): settings = settings or {} settings.update({ 'FEEDS': { self._random_temp_filename(): {'format': 'pickle'}, }, }) data = yield self.exported_data(items, settings) expected = [{k: v for k, v in row.items() if v} for row in rows] import pickle result = self._load_until_eof(data['pickle'], load_func=pickle.load) self.assertEqual(expected, result) @defer.inlineCallbacks def assertExportedMarshal(self, items, rows, settings=None): settings = settings or {} settings.update({ 'FEEDS': { self._random_temp_filename(): {'format': 'marshal'}, }, }) data = yield self.exported_data(items, settings) expected = [{k: v for k, v in row.items() if v} for row in rows] import marshal result = self._load_until_eof(data['marshal'], load_func=marshal.load) self.assertEqual(expected, result) @defer.inlineCallbacks def assertExported(self, items, header, rows, settings=None, ordered=True): yield self.assertExportedCsv(items, header, rows, settings, ordered) yield self.assertExportedJsonLines(items, rows, settings) yield self.assertExportedXml(items, rows, settings) yield self.assertExportedPickle(items, rows, settings) yield self.assertExportedMarshal(items, rows, settings) yield self.assertExportedMultiple(items, rows, settings) @defer.inlineCallbacks def test_export_items(self): items = [ self.MyItem({'foo': 'bar1', 'egg': 'spam1'}), self.MyItem({'foo': 'bar2', 'egg': 'spam2', 'baz': 'quux2'}), ] rows = [ {'egg': 'spam1', 'foo': 'bar1', 'baz': ''}, {'egg': 'spam2', 'foo': 'bar2', 'baz': 'quux2'} ] header = self.MyItem.fields.keys() yield self.assertExported(items, header, rows, ordered=False) @defer.inlineCallbacks def test_export_no_items_not_store_empty(self): for fmt in ('json', 'jsonlines', 'xml', 'csv'): settings = { 'FEEDS': { self._random_temp_filename(): {'format': fmt}, }, } data = yield self.exported_no_data(settings) self.assertEqual(data[fmt], b'') @defer.inlineCallbacks def test_export_no_items_store_empty(self): formats = ( ('json', b'[]'), ('jsonlines', b''), ('xml', b'<?xml version="1.0" encoding="utf-8"?>\n<items></items>'), ('csv', b''), ) for fmt, expctd in formats: settings = { 'FEEDS': { self._random_temp_filename(): {'format': fmt}, }, 'FEED_STORE_EMPTY': True, 'FEED_EXPORT_INDENT': None, } data = yield self.exported_no_data(settings) self.assertEqual(data[fmt], expctd) @defer.inlineCallbacks def test_export_no_items_multiple_feeds(self): settings = { 'FEEDS': { self._random_temp_filename(): {'format': 'json'}, self._random_temp_filename(): {'format': 'xml'}, self._random_temp_filename(): {'format': 'csv'}, }, 'FEED_STORAGES': {'file': 'tests.test_feedexport.LogOnStoreFileStorage'}, 'FEED_STORE_EMPTY': False } with LogCapture() as log: yield self.exported_no_data(settings) print(log) self.assertEqual(str(log).count('Storage.store is called'), 3) @defer.inlineCallbacks def test_export_multiple_item_classes(self): class MyItem2(scrapy.Item): foo = scrapy.Field() hello = scrapy.Field() items = [ self.MyItem({'foo': 'bar1', 'egg': 'spam1'}), MyItem2({'hello': 'world2', 'foo': 'bar2'}), self.MyItem({'foo': 'bar3', 'egg': 'spam3', 'baz': 'quux3'}), {'hello': 'world4', 'egg': 'spam4'}, ] header = self.MyItem.fields.keys() rows_csv = [ {'egg': 'spam1', 'foo': 'bar1', 'baz': ''}, {'egg': '', 'foo': 'bar2', 'baz': ''}, {'egg': 'spam3', 'foo': 'bar3', 'baz': 'quux3'}, {'egg': 'spam4', 'foo': '', 'baz': ''}, ] rows_jl = [dict(row) for row in items] yield self.assertExportedCsv(items, header, rows_csv, ordered=False) yield self.assertExportedJsonLines(items, rows_jl) settings = {'FEED_EXPORT_FIELDS': []} yield self.assertExportedCsv(items, header, rows_csv, ordered=False) yield self.assertExportedJsonLines(items, rows_jl, settings) header = ["foo", "baz", "hello"] settings = {'FEED_EXPORT_FIELDS': header} rows = [ {'foo': 'bar1', 'baz': '', 'hello': ''}, {'foo': 'bar2', 'baz': '', 'hello': 'world2'}, {'foo': 'bar3', 'baz': 'quux3', 'hello': ''}, {'foo': '', 'baz': '', 'hello': 'world4'}, ] yield self.assertExported(items, header, rows, settings=settings, ordered=True) @defer.inlineCallbacks def test_export_dicts(self): items = [ {'foo': 'bar', 'egg': 'spam'}, {'foo': 'bar', 'egg': 'spam', 'baz': 'quux'}, ] rows_csv = [ {'egg': 'spam', 'foo': 'bar'}, {'egg': 'spam', 'foo': 'bar'} ] rows_jl = items yield self.assertExportedCsv(items, ['egg', 'foo'], rows_csv, ordered=False) yield self.assertExportedJsonLines(items, rows_jl) @defer.inlineCallbacks def test_export_feed_export_fields(self): for item_cls in [self.MyItem, dict]: items = [ item_cls({'foo': 'bar1', 'egg': 'spam1'}), item_cls({'foo': 'bar2', 'egg': 'spam2', 'baz': 'quux2'}), ] settings = {'FEED_EXPORT_FIELDS': 'foo,baz,egg'} rows = [ {'egg': 'spam1', 'foo': 'bar1', 'baz': ''}, {'egg': 'spam2', 'foo': 'bar2', 'baz': 'quux2'} ] yield self.assertExported(items, ['foo', 'baz', 'egg'], rows, settings=settings, ordered=True) settings = {'FEED_EXPORT_FIELDS': 'egg,baz'} rows = [ {'egg': 'spam1', 'baz': ''}, {'egg': 'spam2', 'baz': 'quux2'} ] yield self.assertExported(items, ['egg', 'baz'], rows, settings=settings, ordered=True) @defer.inlineCallbacks def test_export_encoding(self): items = [dict({'foo': u'Test\xd6'})] formats = { 'json': '[{"foo": "Test\\u00d6"}]'.encode('utf-8'), 'jsonlines': '{"foo": "Test\\u00d6"}\n'.encode('utf-8'), 'xml': ( '<?xml version="1.0" encoding="utf-8"?>\n' '<items><item><foo>Test\xd6</foo></item></items>' ).encode('utf-8'), 'csv': 'foo\r\nTest\xd6\r\n'.encode('utf-8'), } for fmt, expected in formats.items(): settings = { 'FEEDS': { self._random_temp_filename(): {'format': fmt}, }, 'FEED_EXPORT_INDENT': None, } data = yield self.exported_data(items, settings) self.assertEqual(expected, data[fmt]) formats = { 'json': '[{"foo": "Test\xd6"}]'.encode('latin-1'), 'jsonlines': '{"foo": "Test\xd6"}\n'.encode('latin-1'), 'xml': ( '<?xml version="1.0" encoding="latin-1"?>\n' '<items><item><foo>Test\xd6</foo></item></items>' ).encode('latin-1'), 'csv': 'foo\r\nTest\xd6\r\n'.encode('latin-1'), } for fmt, expected in formats.items(): settings = { 'FEEDS': { self._random_temp_filename(): {'format': fmt}, }, 'FEED_EXPORT_INDENT': None, 'FEED_EXPORT_ENCODING': 'latin-1', } data = yield self.exported_data(items, settings) self.assertEqual(expected, data[fmt]) @defer.inlineCallbacks def test_export_multiple_configs(self): items = [dict({'foo': u'FOO', 'bar': u'BAR'})] formats = { 'json': '[\n{"bar": "BAR"}\n]'.encode('utf-8'), 'xml': ( '<?xml version="1.0" encoding="latin-1"?>\n' '<items>\n <item>\n <foo>FOO</foo>\n </item>\n</items>' ).encode('latin-1'), 'csv': 'bar,foo\r\nBAR,FOO\r\n'.encode('utf-8'), } settings = { 'FEEDS': { self._random_temp_filename(): { 'format': 'json', 'indent': 0, 'fields': ['bar'], 'encoding': 'utf-8', }, self._random_temp_filename(): { 'format': 'xml', 'indent': 2, 'fields': ['foo'], 'encoding': 'latin-1', }, self._random_temp_filename(): { 'format': 'csv', 'indent': None, 'fields': ['bar', 'foo'], 'encoding': 'utf-8', }, }, } data = yield self.exported_data(items, settings) for fmt, expected in formats.items(): self.assertEqual(expected, data[fmt]) @defer.inlineCallbacks def test_export_indentation(self): items = [ {'foo': ['bar']}, {'key': 'value'}, ] test_cases = [ { 'format': 'json', 'indent': None, 'expected': b'[{"foo": ["bar"]},{"key": "value"}]', }, { 'format': 'json', 'indent': -1, 'expected': b"""[ {"foo": ["bar"]}, {"key": "value"} ]""", }, { 'format': 'json', 'indent': 0, 'expected': b"""[ {"foo": ["bar"]}, {"key": "value"} ]""", }, { 'format': 'json', 'indent': 2, 'expected': b"""[ { "foo": [ "bar" ] }, { "key": "value" } ]""", }, { 'format': 'json', 'indent': 4, 'expected': b"""[ { "foo": [ "bar" ] }, { "key": "value" } ]""", }, { 'format': 'json', 'indent': 5, 'expected': b"""[ { "foo": [ "bar" ] }, { "key": "value" } ]""", }, { 'format': 'xml', 'indent': None, 'expected': b"""<?xml version="1.0" encoding="utf-8"?> <items><item><foo><value>bar</value></foo></item><item><key>value</key></item></items>""", }, { 'format': 'xml', 'indent': -1, 'expected': b"""<?xml version="1.0" encoding="utf-8"?> <items> <item><foo><value>bar</value></foo></item> <item><key>value</key></item> </items>""", }, { 'format': 'xml', 'indent': 0, 'expected': b"""<?xml version="1.0" encoding="utf-8"?> <items> <item><foo><value>bar</value></foo></item> <item><key>value</key></item> </items>""", }, { 'format': 'xml', 'indent': 2, 'expected': b"""<?xml version="1.0" encoding="utf-8"?> <items> <item> <foo> <value>bar</value> </foo> </item> <item> <key>value</key> </item> </items>""", }, { 'format': 'xml', 'indent': 4, 'expected': b"""<?xml version="1.0" encoding="utf-8"?> <items> <item> <foo> <value>bar</value> </foo> </item> <item> <key>value</key> </item> </items>""", }, { 'format': 'xml', 'indent': 5, 'expected': b"""<?xml version="1.0" encoding="utf-8"?> <items> <item> <foo> <value>bar</value> </foo> </item> <item> <key>value</key> </item> </items>""", }, ] for row in test_cases: settings = { 'FEEDS': { self._random_temp_filename(): { 'format': row['format'], 'indent': row['indent'], }, }, } data = yield self.exported_data(items, settings) self.assertEqual(row['expected'], data[row['format']]) @defer.inlineCallbacks def test_init_exporters_storages_with_crawler(self): settings = { 'FEED_EXPORTERS': {'csv': 'tests.test_feedexport.FromCrawlerCsvItemExporter'}, 'FEED_STORAGES': {'file': 'tests.test_feedexport.FromCrawlerFileFeedStorage'}, 'FEEDS': { self._random_temp_filename(): {'format': 'csv'}, }, } yield self.exported_data(items=[], settings=settings) self.assertTrue(FromCrawlerCsvItemExporter.init_with_crawler) self.assertTrue(FromCrawlerFileFeedStorage.init_with_crawler) @defer.inlineCallbacks def test_pathlib_uri(self): feed_path = Path(self._random_temp_filename()) settings = { 'FEED_STORE_EMPTY': True, 'FEEDS': { feed_path: {'format': 'csv'} }, } data = yield self.exported_no_data(settings) self.assertEqual(data['csv'], b'')
true
true
f7013d0c15568153ebe3a74156ce34b8ce74602d
224
py
Python
extraesia/extraesia/doctype/extraesia_settings/test_extraesia_settings.py
jamesriady/extraesia
036a8fa09467b4119a13875cfa79d4a56f7ee78b
[ "MIT" ]
null
null
null
extraesia/extraesia/doctype/extraesia_settings/test_extraesia_settings.py
jamesriady/extraesia
036a8fa09467b4119a13875cfa79d4a56f7ee78b
[ "MIT" ]
null
null
null
extraesia/extraesia/doctype/extraesia_settings/test_extraesia_settings.py
jamesriady/extraesia
036a8fa09467b4119a13875cfa79d4a56f7ee78b
[ "MIT" ]
1
2020-10-16T08:45:48.000Z
2020-10-16T08:45:48.000Z
# -*- coding: utf-8 -*- # Copyright (c) 2020, Youssef Restom and Contributors # See license.txt from __future__ import unicode_literals # import frappe import unittest class TestExtraesiaSettings(unittest.TestCase): pass
20.363636
53
0.772321
from __future__ import unicode_literals import unittest class TestExtraesiaSettings(unittest.TestCase): pass
true
true
f7013db89cca1bbd790d811b072435b06291b762
1,053
py
Python
setup.py
goerz/better-apidoc
99bec799618cc5cf4ea6d062a0027094c4e06632
[ "BSD-2-Clause" ]
25
2017-03-28T09:48:15.000Z
2021-10-15T08:58:30.000Z
setup.py
goerz/better-apidoc
99bec799618cc5cf4ea6d062a0027094c4e06632
[ "BSD-2-Clause" ]
15
2017-03-28T14:03:56.000Z
2021-05-10T20:29:48.000Z
setup.py
goerz/better-apidoc
99bec799618cc5cf4ea6d062a0027094c4e06632
[ "BSD-2-Clause" ]
6
2017-03-28T09:48:18.000Z
2021-05-09T21:27:49.000Z
#!/usr/bin/env python import setuptools def get_version(filename): with open(filename) as in_fh: for line in in_fh: if line.startswith('__version__'): return line.split('=')[1].strip()[1:-1] raise ValueError("Cannot extract version from %s" % filename) setuptools.setup( name="better-apidoc", version=get_version("better_apidoc.py"), url="https://github.com/goerz/better-apidoc", author="Michael Goerz", author_email="[email protected]", description="A version of sphinx-apidoc with support for templating", install_requires=[ 'sphinx', 'jinja2' ], extras_require={'dev': ['pytest', ]}, py_modules=['better_apidoc'], entry_points=''' [console_scripts] better-apidoc=better_apidoc:main ''', classifiers=[ 'Environment :: Console', 'Natural Language :: English', 'License :: OSI Approved :: BSD License', 'Programming Language :: Python', 'Programming Language :: Python :: 3', ], )
28.459459
73
0.619183
import setuptools def get_version(filename): with open(filename) as in_fh: for line in in_fh: if line.startswith('__version__'): return line.split('=')[1].strip()[1:-1] raise ValueError("Cannot extract version from %s" % filename) setuptools.setup( name="better-apidoc", version=get_version("better_apidoc.py"), url="https://github.com/goerz/better-apidoc", author="Michael Goerz", author_email="[email protected]", description="A version of sphinx-apidoc with support for templating", install_requires=[ 'sphinx', 'jinja2' ], extras_require={'dev': ['pytest', ]}, py_modules=['better_apidoc'], entry_points=''' [console_scripts] better-apidoc=better_apidoc:main ''', classifiers=[ 'Environment :: Console', 'Natural Language :: English', 'License :: OSI Approved :: BSD License', 'Programming Language :: Python', 'Programming Language :: Python :: 3', ], )
true
true
f7013e240cd599786e4b94fb704df0ca4ddfeb5b
388
py
Python
StackApp/env/lib/python2.7/site-packages/pylint/test/functional/raising_non_exception_py3.py
jonathanmusila/StackOverflow-Lite
a9a03f129592c6f741eb4d1e608ca2db0e40bf11
[ "MIT" ]
35
2016-09-22T22:53:14.000Z
2020-02-13T15:12:21.000Z
virtual/lib/python3.6/site-packages/pylint/test/functional/raising_non_exception_py3.py
evantoh/patient-management-system
6637eb1344775633759165260ed99843581c0e72
[ "Unlicense" ]
32
2018-05-01T05:24:43.000Z
2022-03-11T23:20:39.000Z
virtual/lib/python3.6/site-packages/pylint/test/functional/raising_non_exception_py3.py
evantoh/patient-management-system
6637eb1344775633759165260ed99843581c0e72
[ "Unlicense" ]
88
2016-11-27T02:16:11.000Z
2020-02-28T05:10:26.000Z
"""The following code should emit a raising-non-exception. Previously, it didn't, due to a bug in the check for bad-exception-context, which prevented further checking on the Raise node. """ # pylint: disable=import-error, too-few-public-methods from missing_module import missing class Exc(object): """Not an actual exception.""" raise Exc from missing # [raising-non-exception]
27.714286
75
0.755155
from missing_module import missing class Exc(object): raise Exc from missing
true
true
f7013e3219432a2fa9878be3fd59a8559b4bbcd8
709
py
Python
django_daraja/urls.py
martinmogusu/django-daraja
8a4788d3da9d7b0794a472b5320897480ec73ee6
[ "MIT" ]
14
2019-02-25T07:31:28.000Z
2022-02-09T13:48:11.000Z
django_daraja/urls.py
martinmogusu/django-daraja
8a4788d3da9d7b0794a472b5320897480ec73ee6
[ "MIT" ]
2
2019-12-14T20:20:52.000Z
2022-02-18T08:08:31.000Z
django_daraja/urls.py
martinmogusu/django-daraja
8a4788d3da9d7b0794a472b5320897480ec73ee6
[ "MIT" ]
11
2019-09-15T18:53:28.000Z
2022-03-10T06:12:04.000Z
from django.conf.urls import url, include from . import views test_patterns = [ url(r'^$', views.index, name='django_daraja_index'), url(r'^oauth/success', views.oauth_success, name='test_oauth_success'), url(r'^stk-push/success', views.stk_push_success, name='test_stk_push_success'), url(r'^business-payment/success', views.business_payment_success, name='test_business_payment_success'), url(r'^salary-payment/success', views.salary_payment_success, name='test_salary_payment_success'), url(r'^promotion-payment/success', views.promotion_payment_success, name='test_promotion_payment_success'), ] urlpatterns = [ url(r'^$', views.index, name='index'), url(r'^tests/', include(test_patterns)), ]
39.388889
108
0.765867
from django.conf.urls import url, include from . import views test_patterns = [ url(r'^$', views.index, name='django_daraja_index'), url(r'^oauth/success', views.oauth_success, name='test_oauth_success'), url(r'^stk-push/success', views.stk_push_success, name='test_stk_push_success'), url(r'^business-payment/success', views.business_payment_success, name='test_business_payment_success'), url(r'^salary-payment/success', views.salary_payment_success, name='test_salary_payment_success'), url(r'^promotion-payment/success', views.promotion_payment_success, name='test_promotion_payment_success'), ] urlpatterns = [ url(r'^$', views.index, name='index'), url(r'^tests/', include(test_patterns)), ]
true
true
f7013ee950543f8a9cca0d5c76ee787cb943881b
52,341
py
Python
tests/test_api_build.py
eklitzke/conda-build
5229bf972511e506548b77113ad5deb9856c2130
[ "BSD-3-Clause" ]
null
null
null
tests/test_api_build.py
eklitzke/conda-build
5229bf972511e506548b77113ad5deb9856c2130
[ "BSD-3-Clause" ]
null
null
null
tests/test_api_build.py
eklitzke/conda-build
5229bf972511e506548b77113ad5deb9856c2130
[ "BSD-3-Clause" ]
null
null
null
""" This module tests the build API. These are high-level integration tests. """ import base64 from collections import OrderedDict from glob import glob import logging import os import re import subprocess import sys import json import uuid # for version import conda from conda_build.conda_interface import PY3, url_path, LinkError, CondaError, cc_conda_build import conda_build from binstar_client.commands import remove, show from binstar_client.errors import NotFound from pkg_resources import parse_version import pytest import yaml import tarfile from conda_build import api, exceptions, __version__ from conda_build.build import VersionOrder from conda_build.render import finalize_metadata from conda_build.utils import (copy_into, on_win, check_call_env, convert_path_for_cygwin_or_msys2, package_has_file, check_output_env, get_conda_operation_locks) from conda_build.os_utils.external import find_executable from conda_build.exceptions import DependencyNeedsBuildingError from .utils import is_valid_dir, metadata_dir, fail_dir, add_mangling, FileNotFoundError # define a few commonly used recipes - use os.path.join(metadata_dir, recipe) elsewhere empty_sections = os.path.join(metadata_dir, "empty_sections") def represent_ordereddict(dumper, data): value = [] for item_key, item_value in data.items(): node_key = dumper.represent_data(item_key) node_value = dumper.represent_data(item_value) value.append((node_key, node_value)) return yaml.nodes.MappingNode(u'tag:yaml.org,2002:map', value) yaml.add_representer(OrderedDict, represent_ordereddict) class AnacondaClientArgs(object): def __init__(self, specs, token=None, site=None, log_level=logging.INFO, force=False): from binstar_client.utils import parse_specs self.specs = [parse_specs(specs)] self.spec = self.specs[0] self.token = token self.site = site self.log_level = log_level self.force = force def describe_root(cwd=None): if not cwd: cwd = os.path.abspath(os.path.join(os.path.dirname(__file__), "..")) tag = check_output_env(["git", "describe", "--abbrev=0"], cwd=cwd).rstrip() if PY3: tag = tag.decode("utf-8") return tag @pytest.fixture(params=[dirname for dirname in os.listdir(metadata_dir) if is_valid_dir(metadata_dir, dirname)]) def recipe(request): return os.path.join(metadata_dir, request.param) # This tests any of the folders in the test-recipes/metadata folder that don't start with _ def test_recipe_builds(recipe, testing_config, testing_workdir, monkeypatch): # These variables are defined solely for testing purposes, # so they can be checked within build scripts monkeypatch.setenv("CONDA_TEST_VAR", "conda_test") monkeypatch.setenv("CONDA_TEST_VAR_2", "conda_test_2") api.build(recipe, config=testing_config) def test_token_upload(testing_workdir, testing_metadata): folder_uuid = uuid.uuid4().hex # generated with conda_test_account user, command: # anaconda auth --create --name CONDA_BUILD_UPLOAD_TEST --scopes 'api repos conda' args = AnacondaClientArgs(specs="conda_build_test/test_token_upload_" + folder_uuid, token="co-143399b8-276e-48db-b43f-4a3de839a024", force=True) with pytest.raises(NotFound): show.main(args) testing_metadata.meta['package']['name'] = '_'.join([testing_metadata.name(), folder_uuid]) testing_metadata.config.token = args.token # the folder with the test recipe to upload api.build(testing_metadata) # make sure that the package is available (should raise if it doesn't) show.main(args) # clean up - we don't actually want this package to exist remove.main(args) # verify cleanup: with pytest.raises(NotFound): show.main(args) @pytest.mark.parametrize("service_name", ["binstar", "anaconda"]) def test_no_anaconda_upload_condarc(service_name, testing_workdir, testing_config, capfd): api.build(empty_sections, config=testing_config) output, error = capfd.readouterr() assert "Automatic uploading is disabled" in output, error def test_git_describe_info_on_branch(testing_config): recipe_path = os.path.join(metadata_dir, "_git_describe_number_branch") m = api.render(recipe_path, config=testing_config)[0][0] output = api.get_output_file_path(m)[0] # missing hash because we set custom build string in meta.yaml test_path = os.path.join(testing_config.croot, testing_config.host_subdir, "git_describe_number_branch-1.20.2.0-1_g82c6ba6.tar.bz2") assert test_path == output def test_no_include_recipe_config_arg(testing_metadata): """Two ways to not include recipe: build/include_recipe: False in meta.yaml; or this. Former is tested with specific recipe.""" outputs = api.build(testing_metadata) assert package_has_file(outputs[0], "info/recipe/meta.yaml") # make sure that it is not there when the command line flag is passed testing_metadata.config.include_recipe = False testing_metadata.meta['build']['number'] = 2 # We cannot test packages without recipes as we cannot render them output_file = api.build(testing_metadata, notest=True)[0] assert not package_has_file(output_file, "info/recipe/meta.yaml") def test_no_include_recipe_meta_yaml(testing_metadata, testing_config): # first, make sure that the recipe is there by default. This test copied from above, but copied # as a sanity check here. outputs = api.build(testing_metadata) assert package_has_file(outputs[0], "info/recipe/meta.yaml") output_file = api.build(os.path.join(metadata_dir, '_no_include_recipe'), config=testing_config)[0] assert not package_has_file(output_file, "info/recipe/meta.yaml") def test_early_abort(testing_config, capfd): """There have been some problems with conda-build dropping out early. Make sure we aren't causing them""" api.build(os.path.join(metadata_dir, '_test_early_abort'), config=testing_config) output, error = capfd.readouterr() assert "Hello World" in output def test_output_build_path_git_source(testing_workdir, testing_config): recipe_path = os.path.join(metadata_dir, "source_git_jinja2") m = api.render(recipe_path, config=testing_config)[0][0] output = api.get_output_file_paths(m)[0] _hash = m.hash_dependencies() test_path = os.path.join(testing_config.croot, testing_config.host_subdir, "conda-build-test-source-git-jinja2-1.20.2-py{}{}{}_0_g262d444.tar.bz2".format( sys.version_info.major, sys.version_info.minor, _hash)) assert output == test_path @pytest.mark.serial def test_build_with_no_activate_does_not_activate(): api.build(os.path.join(metadata_dir, '_set_env_var_no_activate_build'), activate=False, anaconda_upload=False) @pytest.mark.serial def test_build_with_activate_does_activate(): api.build(os.path.join(metadata_dir, '_set_env_var_activate_build'), activate=True, anaconda_upload=False) @pytest.mark.skipif(sys.platform == "win32", reason="no binary prefix manipulation done on windows.") def test_binary_has_prefix_files(testing_workdir, testing_config): api.build(os.path.join(metadata_dir, '_binary_has_prefix_files'), config=testing_config) def test_relative_path_git_versioning(testing_workdir, testing_config): # conda_build_test_recipe is a manual step. Clone it at the same level as # your conda-build source. cwd = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', 'conda_build_test_recipe')) tag = describe_root(cwd) output = api.get_output_file_path(os.path.join(metadata_dir, "_source_git_jinja2_relative_path"), config=testing_config)[0] assert tag in output def test_relative_git_url_git_versioning(testing_workdir, testing_config): cwd = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', 'conda_build_test_recipe')) tag = describe_root(cwd) recipe = os.path.join(metadata_dir, "_source_git_jinja2_relative_git_url") output = api.get_output_file_path(recipe, config=testing_config)[0] assert tag in output def test_dirty_variable_available_in_build_scripts(testing_workdir, testing_config): recipe = os.path.join(metadata_dir, "_dirty_skip_section") testing_config.dirty = True api.build(recipe, config=testing_config) with pytest.raises(subprocess.CalledProcessError): testing_config.dirty = False api.build(recipe, config=testing_config) def dummy_executable(folder, exename): # empty prefix by default - extra bit at beginning of file if sys.platform == "win32": exename = exename + ".bat" dummyfile = os.path.join(folder, exename) if sys.platform == "win32": prefix = "@echo off\n" else: prefix = "#!/bin/bash\nexec 1>&2\n" with open(dummyfile, 'w') as f: f.write(prefix + """ echo ******* You have reached the dummy {}. It is likely there is a bug in echo ******* conda that makes it not add the _build/bin directory onto the echo ******* PATH before running the source checkout tool exit -1 """.format(exename)) if sys.platform != "win32": import stat st = os.stat(dummyfile) os.chmod(dummyfile, st.st_mode | stat.S_IEXEC) return exename def test_checkout_tool_as_dependency(testing_workdir, testing_config, monkeypatch): # "hide" svn by putting a known bad one on PATH exename = dummy_executable(testing_workdir, "svn") monkeypatch.setenv("PATH", testing_workdir, prepend=os.pathsep) FNULL = open(os.devnull, 'w') with pytest.raises(subprocess.CalledProcessError, message="Dummy svn was not executed"): check_call_env([exename, '--version'], stderr=FNULL) FNULL.close() env = dict(os.environ) env["PATH"] = os.pathsep.join([testing_workdir, env["PATH"]]) api.build(os.path.join(metadata_dir, '_checkout_tool_as_dependency'), config=testing_config) platforms = ["64" if sys.maxsize > 2**32 else "32"] if sys.platform == "win32": platforms = set(["32", ] + platforms) compilers = ["2.7", "3.4", "3.5"] msvc_vers = ['9.0', '10.0', '14.0'] else: msvc_vers = [] compilers = [".".join([str(sys.version_info.major), str(sys.version_info.minor)])] @pytest.mark.skipif(sys.platform != "win32", reason="MSVC only on windows") @pytest.mark.parametrize("msvc_ver", msvc_vers) def test_build_msvc_compiler(msvc_ver, monkeypatch): # verify that the correct compiler is available cl_versions = {"9.0": 15, "10.0": 16, "11.0": 17, "12.0": 18, "14.0": 19} monkeypatch.setenv('CONDATEST_MSVC_VER', msvc_ver) monkeypatch.setenv('CL_EXE_VERSION', str(cl_versions[msvc_ver])) try: # Always build Python 2.7 - but set MSVC version manually via Jinja template api.build(os.path.join(metadata_dir, '_build_msvc_compiler'), python="2.7") except: raise finally: del os.environ['CONDATEST_MSVC_VER'] del os.environ['CL_EXE_VERSION'] @pytest.mark.parametrize("platform", platforms) @pytest.mark.parametrize("target_compiler", compilers) def test_cmake_generator(platform, target_compiler, testing_workdir, testing_config): testing_config.variant['python'] = target_compiler api.build(os.path.join(metadata_dir, '_cmake_generator'), config=testing_config) @pytest.mark.skipif(sys.platform == "win32", reason="No windows symlinks") def test_symlink_fail(testing_workdir, testing_config, capfd): with pytest.raises((SystemExit, FileNotFoundError)): api.build(os.path.join(fail_dir, "symlinks"), config=testing_config) # output, error = capfd.readouterr() # assert error.count("Error") == 6, "did not find appropriate count of Error in: " + error def test_pip_in_meta_yaml_fail(testing_workdir, testing_config): with pytest.raises(ValueError) as exc: api.build(os.path.join(fail_dir, "pip_reqs_fail_informatively"), config=testing_config) assert "environment.yml" in str(exc) def test_recursive_fail(testing_workdir, testing_config): with pytest.raises((RuntimeError, exceptions.DependencyNeedsBuildingError)) as exc: api.build(os.path.join(fail_dir, "recursive-build"), config=testing_config) # indentation critical here. If you indent this, and the exception is not raised, then # the exc variable here isn't really completely created and shows really strange errors: # AttributeError: 'ExceptionInfo' object has no attribute 'typename' assert "recursive-build2" in str(exc.value) def test_jinja_typo(testing_workdir, testing_config): with pytest.raises(SystemExit) as exc: api.build(os.path.join(fail_dir, "source_git_jinja2_oops"), config=testing_config) assert "GIT_DSECRIBE_TAG" in exc.exconly() @pytest.mark.serial def test_skip_existing(testing_workdir, testing_config, capfd): # build the recipe first api.build(empty_sections, config=testing_config) api.build(empty_sections, config=testing_config, skip_existing=True) output, error = capfd.readouterr() assert "are already built" in output @pytest.mark.serial def test_skip_existing_url(testing_metadata, testing_workdir, capfd): # make sure that it is built outputs = api.build(testing_metadata) # Copy our package into some new folder output_dir = os.path.join(testing_workdir, 'someoutput') platform = os.path.join(output_dir, testing_metadata.config.host_subdir) os.makedirs(platform) copy_into(outputs[0], os.path.join(platform, os.path.basename(outputs[0]))) # create the index so conda can find the file api.update_index(platform, config=testing_metadata.config) # HACK: manually create noarch location there, so that conda 4.3.2+ considers a valid channel noarch = os.path.join(output_dir, 'noarch') os.makedirs(noarch) api.update_index(noarch, config=testing_metadata.config) testing_metadata.config.skip_existing = True testing_metadata.config.channel_urls = [url_path(output_dir)] api.build(testing_metadata) output, error = capfd.readouterr() assert "are already built" in output assert url_path(testing_metadata.config.croot) in output def test_failed_tests_exit_build(testing_workdir, testing_config): """https://github.com/conda/conda-build/issues/1112""" with pytest.raises(SystemExit) as exc: api.build(os.path.join(metadata_dir, "_test_failed_test_exits"), config=testing_config) assert 'TESTS FAILED' in str(exc) def test_requirements_txt_for_run_reqs(testing_workdir, testing_config): """ If run reqs are blank, then conda-build looks for requirements.txt in the recipe folder. There has been a report of issue with unsatisfiable requirements at https://github.com/Anaconda-Platform/anaconda-server/issues/2565 This test attempts to reproduce those conditions: a channel other than defaults with this requirements.txt """ testing_config.channel_urls = ('conda_build_test', ) api.build(os.path.join(metadata_dir, "_requirements_txt_run_reqs"), config=testing_config) @pytest.mark.serial def test_compileall_compiles_all_good_files(testing_workdir, testing_config): output = api.build(os.path.join(metadata_dir, "_compile-test"), config=testing_config)[0] good_files = ['f1.py', 'f3.py'] bad_file = 'f2_bad.py' for f in good_files: assert package_has_file(output, f) # look for the compiled file also assert package_has_file(output, add_mangling(f)) assert package_has_file(output, bad_file) assert not package_has_file(output, add_mangling(bad_file)) def test_render_setup_py_old_funcname(testing_workdir, testing_config, caplog): api.build(os.path.join(metadata_dir, "_source_setuptools"), config=testing_config) assert "Deprecation notice: the load_setuptools function has been renamed to " in caplog.text @pytest.mark.skipif(not on_win, reason="only Windows is insane enough to have backslashes in paths") def test_backslash_in_always_include_files_path(testing_config): api.build(os.path.join(metadata_dir, '_backslash_in_include_files')) with pytest.raises(RuntimeError): api.build(os.path.join(fail_dir, 'backslash_in_include_files')) def test_build_metadata_object(testing_metadata): api.build(testing_metadata) @pytest.mark.skipif(on_win, reason="fortran compilers on win are hard.") def test_numpy_setup_py_data(testing_config): recipe_path = os.path.join(metadata_dir, '_numpy_setup_py_data') m = api.render(recipe_path, config=testing_config, numpy="1.11")[0][0] _hash = m.hash_dependencies() assert os.path.basename(api.get_output_file_path(m)[0]) == \ "load_setup_py_test-1.0a1-np111py{0}{1}{2}_1.tar.bz2".format( sys.version_info.major, sys.version_info.minor, _hash) def test_relative_git_url_submodule_clone(testing_workdir, testing_config, monkeypatch): """ A multi-part test encompassing the following checks: 1. That git submodules identified with both relative and absolute URLs can be mirrored and cloned. 2. That changes pushed to the original repository are updated in the mirror and finally reflected in the package version and filename via `GIT_DESCRIBE_TAG`. 3. That `source.py` is using `check_call_env` and `check_output_env` and that those functions are using tools from the build env. """ toplevel = os.path.join(testing_workdir, 'toplevel') os.mkdir(toplevel) relative_sub = os.path.join(testing_workdir, 'relative_sub') os.mkdir(relative_sub) absolute_sub = os.path.join(testing_workdir, 'absolute_sub') os.mkdir(absolute_sub) sys_git_env = os.environ.copy() sys_git_env['GIT_AUTHOR_NAME'] = 'conda-build' sys_git_env['GIT_AUTHOR_EMAIL'] = '[email protected]' sys_git_env['GIT_COMMITTER_NAME'] = 'conda-build' sys_git_env['GIT_COMMITTER_EMAIL'] = '[email protected]' # Find the git executable before putting our dummy one on PATH. git = find_executable('git') # Put the broken git on os.environ["PATH"] exename = dummy_executable(testing_workdir, 'git') monkeypatch.setenv("PATH", testing_workdir, prepend=os.pathsep) # .. and ensure it gets run (and fails). FNULL = open(os.devnull, 'w') # Strangely .. # stderr=FNULL suppresses the output from echo on OS X whereas # stdout=FNULL suppresses the output from echo on Windows with pytest.raises(subprocess.CalledProcessError, message="Dummy git was not executed"): check_call_env([exename, '--version'], stdout=FNULL, stderr=FNULL) FNULL.close() for tag in range(2): os.chdir(absolute_sub) if tag == 0: check_call_env([git, 'init'], env=sys_git_env) with open('absolute', 'w') as f: f.write(str(tag)) check_call_env([git, 'add', 'absolute'], env=sys_git_env) check_call_env([git, 'commit', '-m', 'absolute{}'.format(tag)], env=sys_git_env) os.chdir(relative_sub) if tag == 0: check_call_env([git, 'init'], env=sys_git_env) with open('relative', 'w') as f: f.write(str(tag)) check_call_env([git, 'add', 'relative'], env=sys_git_env) check_call_env([git, 'commit', '-m', 'relative{}'.format(tag)], env=sys_git_env) os.chdir(toplevel) if tag == 0: check_call_env([git, 'init'], env=sys_git_env) with open('toplevel', 'w') as f: f.write(str(tag)) check_call_env([git, 'add', 'toplevel'], env=sys_git_env) check_call_env([git, 'commit', '-m', 'toplevel{}'.format(tag)], env=sys_git_env) if tag == 0: check_call_env([git, 'submodule', 'add', convert_path_for_cygwin_or_msys2(git, absolute_sub), 'absolute'], env=sys_git_env) check_call_env([git, 'submodule', 'add', '../relative_sub', 'relative'], env=sys_git_env) else: # Once we use a more recent Git for Windows than 2.6.4 on Windows or m2-git we # can change this to `git submodule update --recursive`. check_call_env([git, 'submodule', 'foreach', git, 'pull'], env=sys_git_env) check_call_env([git, 'commit', '-am', 'added submodules@{}'.format(tag)], env=sys_git_env) check_call_env([git, 'tag', '-a', str(tag), '-m', 'tag {}'.format(tag)], env=sys_git_env) # It is possible to use `Git for Windows` here too, though you *must* not use a different # (type of) git than the one used above to add the absolute submodule, because .gitmodules # stores the absolute path and that is not interchangeable between MSYS2 and native Win32. # # Also, git is set to False here because it needs to be rebuilt with the longer prefix. As # things stand, my _b_env folder for this test contains more than 80 characters. requirements = ('requirements', OrderedDict([ ('build', ['git # [False]', 'm2-git # [win]', 'm2-filesystem # [win]'])])) recipe_dir = os.path.join(testing_workdir, 'recipe') if not os.path.exists(recipe_dir): os.makedirs(recipe_dir) filename = os.path.join(testing_workdir, 'recipe', 'meta.yaml') data = OrderedDict([ ('package', OrderedDict([ ('name', 'relative_submodules'), ('version', '{{ GIT_DESCRIBE_TAG }}')])), ('source', OrderedDict([ ('git_url', toplevel), ('git_tag', str(tag))])), requirements, ('build', OrderedDict([ ('script', ['git --no-pager submodule --quiet foreach git log -n 1 --pretty=format:%%s > ' '%PREFIX%\\summaries.txt # [win]', 'git --no-pager submodule --quiet foreach git log -n 1 --pretty=format:%s > ' '$PREFIX/summaries.txt # [not win]']) ])), ('test', OrderedDict([ ('commands', ['echo absolute{}relative{} > %PREFIX%\\expected_summaries.txt # [win]' .format(tag, tag), 'fc.exe /W %PREFIX%\\expected_summaries.txt %PREFIX%\\summaries.txt # [win]', 'echo absolute{}relative{} > $PREFIX/expected_summaries.txt # [not win]' .format(tag, tag), 'diff -wuN ${PREFIX}/expected_summaries.txt ${PREFIX}/summaries.txt # [not win]']) ])) ]) with open(filename, 'w') as outfile: outfile.write(yaml.dump(data, default_flow_style=False, width=999999999)) # Reset the path because our broken, dummy `git` would cause `render_recipe` # to fail, while no `git` will cause the build_dependencies to be installed. monkeypatch.undo() # This will (after one spin round the loop) install and run 'git' with the # build env prepended to os.environ[] metadata = api.render(testing_workdir, config=testing_config)[0][0] output = api.get_output_file_path(metadata, config=testing_config)[0] assert ("relative_submodules-{}-".format(tag) in output) api.build(metadata, config=testing_config) def test_noarch(testing_workdir): filename = os.path.join(testing_workdir, 'meta.yaml') for noarch in (False, True): data = OrderedDict([ ('package', OrderedDict([ ('name', 'test'), ('version', '0.0.0')])), ('build', OrderedDict([ ('noarch', str(noarch))])) ]) with open(filename, 'w') as outfile: outfile.write(yaml.dump(data, default_flow_style=False, width=999999999)) output = api.get_output_file_path(testing_workdir)[0] assert (os.path.sep + "noarch" + os.path.sep in output or not noarch) assert (os.path.sep + "noarch" + os.path.sep not in output or noarch) def test_disable_pip(testing_config, testing_metadata): testing_metadata.config.disable_pip = True testing_metadata.meta['build']['script'] = 'python -c "import pip; print(pip.__version__)"' with pytest.raises(subprocess.CalledProcessError): api.build(testing_metadata) testing_metadata.meta['build']['script'] = ('python -c "import setuptools; ' 'print(setuptools.__version__)"') with pytest.raises(subprocess.CalledProcessError): api.build(testing_metadata) @pytest.mark.skipif(not sys.platform.startswith('linux'), reason="rpath fixup only done on Linux so far.") def test_rpath_linux(testing_config): api.build(os.path.join(metadata_dir, "_rpath"), config=testing_config) def test_noarch_none_value(testing_workdir, testing_config): recipe = os.path.join(metadata_dir, "_noarch_none") with pytest.raises(exceptions.CondaBuildException): api.build(recipe, config=testing_config) def test_noarch_foo_value(testing_config): outputs = api.build(os.path.join(metadata_dir, "noarch_generic"), config=testing_config) metadata = json.loads(package_has_file(outputs[0], 'info/index.json').decode()) assert metadata['noarch'] == "generic" def test_about_json_content(testing_metadata): outputs = api.build(testing_metadata) about = json.loads(package_has_file(outputs[0], 'info/about.json').decode()) assert 'conda_version' in about and about['conda_version'] == conda.__version__ assert 'conda_build_version' in about and about['conda_build_version'] == __version__ assert 'channels' in about and about['channels'] try: assert 'env_vars' in about and about['env_vars'] except AssertionError: # new versions of conda support this, so we should raise errors. if VersionOrder(conda.__version__) >= VersionOrder('4.2.10'): raise else: pass assert 'root_pkgs' in about and about['root_pkgs'] @pytest.mark.xfail(parse_version(conda.__version__) < parse_version("4.3.14"), reason="new noarch supported starting with conda 4.3.14") def test_noarch_python_with_tests(testing_config): recipe = os.path.join(metadata_dir, "_noarch_python_with_tests") api.build(recipe, config=testing_config) def test_noarch_python_1(testing_config): output = api.build(os.path.join(metadata_dir, "_noarch_python"), config=testing_config)[0] assert package_has_file(output, 'info/files') is not '' extra = json.loads(package_has_file(output, 'info/link.json').decode()) assert 'noarch' in extra assert 'entry_points' in extra['noarch'] assert 'type' in extra['noarch'] assert 'package_metadata_version' in extra def test_legacy_noarch_python(testing_config): output = api.build(os.path.join(metadata_dir, "_legacy_noarch_python"), config=testing_config)[0] # make sure that the package is going into the noarch folder assert os.path.basename(os.path.dirname(output)) == 'noarch' @pytest.mark.skipif(parse_version(conda.__version__) < parse_version("4.5"), reason="full preferred env implementation deferred to conda 4.5") def test_preferred_env(testing_config): recipe = os.path.join(metadata_dir, "_preferred_env") output = api.build(recipe, config=testing_config)[0] extra = json.loads(package_has_file(output, 'info/link.json').decode()) assert 'preferred_env' in extra assert 'name' in extra['preferred_env'] assert 'executable_paths' in extra['preferred_env'] exe_paths = extra['preferred_env']['executable_paths'] if on_win: assert exe_paths == ['Scripts/exepath1.bat', 'Scripts/exepath2.bat'] else: assert exe_paths == ['bin/exepath1', 'bin/exepath2'] assert 'package_metadata_version' in extra @pytest.mark.serial def test_skip_compile_pyc(testing_config): outputs = api.build(os.path.join(metadata_dir, "skip_compile_pyc"), config=testing_config) tf = tarfile.open(outputs[0]) pyc_count = 0 for f in tf.getmembers(): filename = os.path.basename(f.name) _, ext = os.path.splitext(filename) basename = filename.split('.', 1)[0] if basename == 'skip_compile_pyc': assert not ext == '.pyc', "a skip_compile_pyc .pyc was compiled: {}".format(filename) if ext == '.pyc': assert basename == 'compile_pyc', "an unexpected .pyc was compiled: {}".format(filename) pyc_count = pyc_count + 1 assert pyc_count == 2, "there should be 2 .pyc files, instead there were {}".format(pyc_count) def test_detect_binary_files_with_prefix(testing_config): outputs = api.build(os.path.join(metadata_dir, "_detect_binary_files_with_prefix"), config=testing_config) matches = [] with tarfile.open(outputs[0]) as tf: has_prefix = tf.extractfile('info/has_prefix') contents = [p.strip().decode('utf-8') for p in has_prefix.readlines()] has_prefix.close() matches = [entry for entry in contents if entry.endswith('binary-has-prefix') or entry.endswith('"binary-has-prefix"')] assert len(matches) == 1, "binary-has-prefix not recorded in info/has_prefix" assert ' binary ' in matches[0], "binary-has-prefix not recorded as binary in info/has_prefix" def test_skip_detect_binary_files_with_prefix(testing_config): recipe = os.path.join(metadata_dir, "_skip_detect_binary_files_with_prefix") outputs = api.build(recipe, config=testing_config) matches = [] with tarfile.open(outputs[0]) as tf: try: has_prefix = tf.extractfile('info/has_prefix') contents = [p.strip().decode('utf-8') for p in has_prefix.readlines()] has_prefix.close() matches = [entry for entry in contents if entry.endswith('binary-has-prefix') or entry.endswith('"binary-has-prefix"')] except: pass assert len(matches) == 0, "binary-has-prefix recorded in info/has_prefix despite:" \ "build/detect_binary_files_with_prefix: false" def test_fix_permissions(testing_config): recipe = os.path.join(metadata_dir, "fix_permissions") outputs = api.build(recipe, config=testing_config) with tarfile.open(outputs[0]) as tf: for f in tf.getmembers(): assert f.mode & 0o444 == 0o444, "tar member '{}' has invalid (read) mode".format(f.name) @pytest.mark.skipif(not on_win, reason="windows-only functionality") @pytest.mark.parametrize('recipe_name', ["_script_win_creates_exe", "_script_win_creates_exe_garbled"]) def test_script_win_creates_exe(testing_config, recipe_name): recipe = os.path.join(metadata_dir, recipe_name) outputs = api.build(recipe, config=testing_config) assert package_has_file(outputs[0], 'Scripts/test-script.exe') assert package_has_file(outputs[0], 'Scripts/test-script-script.py') def test_output_folder_moves_file(testing_metadata, testing_workdir): testing_metadata.config.output_folder = testing_workdir outputs = api.build(testing_metadata, no_test=True) assert outputs[0].startswith(testing_workdir) def test_info_files_json(testing_config): outputs = api.build(os.path.join(metadata_dir, "ignore_some_prefix_files"), config=testing_config) assert package_has_file(outputs[0], "info/paths.json") with tarfile.open(outputs[0]) as tf: data = json.loads(tf.extractfile('info/paths.json').read().decode('utf-8')) fields = ["_path", "sha256", "size_in_bytes", "path_type", "file_mode", "no_link", "prefix_placeholder", "inode_paths"] for key in data.keys(): assert key in ['paths', 'paths_version'] for paths in data.get('paths'): for field in paths.keys(): assert field in fields assert len(data.get('paths')) == 2 for file in data.get('paths'): for key in file.keys(): assert key in fields short_path = file.get("_path") if short_path == "test.sh" or short_path == "test.bat": assert file.get("prefix_placeholder") is not None assert file.get("file_mode") is not None else: assert file.get("prefix_placeholder") is None assert file.get("file_mode") is None def test_build_expands_wildcards(mocker, testing_workdir): build_tree = mocker.patch("conda_build.build.build_tree") config = api.Config() files = ['abc', 'acb'] for f in files: os.makedirs(f) with open(os.path.join(f, 'meta.yaml'), 'w') as fh: fh.write('\n') api.build(["a*"], config=config) output = [os.path.join(os.getcwd(), path, 'meta.yaml') for path in files] build_tree.assert_called_once_with(output, build_only=False, config=mocker.ANY, need_source_download=True, notest=False, post=None, variants=None) @pytest.mark.serial @pytest.mark.parametrize('set_build_id', [True, False]) def test_remove_workdir_default(testing_config, caplog, set_build_id): recipe = os.path.join(metadata_dir, '_keep_work_dir') # make a metadata object - otherwise the build folder is computed within the build, but does # not alter the config object that is passed in. This is by design - we always make copies # of the config object rather than edit it in place, so that variants don't clobber one # another metadata = api.render(recipe, config=testing_config)[0][0] api.build(metadata, set_build_id=set_build_id) assert not glob(os.path.join(metadata.config.work_dir, '*')) @pytest.mark.serial def test_keep_workdir_and_dirty_reuse(testing_config, capfd): recipe = os.path.join(metadata_dir, '_keep_work_dir') # make a metadata object - otherwise the build folder is computed within the build, but does # not alter the config object that is passed in. This is by design - we always make copies # of the config object rather than edit it in place, so that variants don't clobber one # another metadata = api.render(recipe, config=testing_config, dirty=True, remove_work_dir=False)[0][0] workdir = metadata.config.work_dir api.build(metadata) out, err = capfd.readouterr() assert glob(os.path.join(metadata.config.work_dir, '*')) # test that --dirty reuses the same old folder metadata = api.render(recipe, config=testing_config, dirty=True, remove_work_dir=False)[0][0] assert workdir == metadata.config.work_dir # test that without --dirty, we don't reuse the folder metadata = api.render(recipe, config=testing_config)[0][0] assert workdir != metadata.config.work_dir testing_config.clean() def test_workdir_removal_warning(testing_config, caplog): recipe = os.path.join(metadata_dir, '_test_uses_src_dir') with pytest.raises(ValueError) as exc: api.build(recipe, config=testing_config) assert "work dir is removed" in str(exc) # @pytest.mark.serial # @pytest.mark.skipif(not sys.platform.startswith('linux'), # reason="cross compiler packages created only on Linux right now") # @pytest.mark.xfail(VersionOrder(conda.__version__) < VersionOrder('4.3.2'), # reason="not completely implemented yet") # def test_cross_compiler(testing_workdir, testing_config, capfd): # # TODO: testing purposes. Package from @mingwandroid's channel, copied to conda_build_test # testing_config.channel_urls = ('conda_build_test', ) # # activation is necessary to set the appropriate toolchain env vars # testing_config.activate = True # # testing_config.debug = True # recipe_dir = os.path.join(metadata_dir, '_cross_helloworld') # output = api.build(recipe_dir, config=testing_config)[0] # assert output.startswith(os.path.join(testing_config.croot, 'linux-imx351uc')) @pytest.mark.skipif(sys.platform != 'darwin', reason="relevant to mac only") def test_append_python_app_osx(testing_config): """Recipes that use osx_is_app need to have python.app in their runtime requirements. conda-build will add it if it's missing.""" recipe = os.path.join(metadata_dir, '_osx_is_app_missing_python_app') # tests will fail here if python.app is not added to the run reqs by conda-build, because # without it, pythonw will be missing. api.build(recipe, config=testing_config) # Not sure about this behavior. Basically, people need to realize that if they # start with a recipe from disk, they should not then alter the metadata # object. Later reparsing will clobber their edits to the object. The # complicated thing is that these edits are indistinguishable from Jinja2 # templating doing its normal thing. # def test_clobbering_manually_set_metadata_raises(testing_metadata, testing_workdir): # api.output_yaml(testing_metadata, 'meta.yaml') # metadata = api.render(testing_workdir)[0][0] # # make the package meta dict out of sync with file contents # metadata.meta['package']['name'] = 'steve' # # re-render happens as part of build. We should see an error about clobbering our customized # # meta dict # with pytest.raises(ValueError): # api.build(metadata) @pytest.mark.serial def test_run_exports(testing_metadata, testing_config, testing_workdir): api.build(os.path.join(metadata_dir, '_run_exports'), config=testing_config) api.build(os.path.join(metadata_dir, '_run_exports_implicit_weak'), config=testing_config) # run_exports is tricky. We mostly only ever want things in "host". Here are the conditions: # 1. only build section present (legacy recipe). Here, use run_exports from build. testing_metadata.meta['requirements']['build'] = ['test_has_run_exports'] api.output_yaml(testing_metadata, 'meta.yaml') m = api.render(testing_workdir, config=testing_config)[0][0] assert 'strong_pinned_package 1.0.*' in m.meta['requirements']['run'] assert 'weak_pinned_package 1.0.*' in m.meta['requirements']['run'] # 2. host present. Use run_exports from host, ignore 'weak' ones from build. All are # weak by default. testing_metadata.meta['requirements']['build'] = ['test_has_run_exports_implicit_weak'] testing_metadata.meta['requirements']['host'] = ['python'] api.output_yaml(testing_metadata, 'meta.yaml') m = api.render(testing_workdir, config=testing_config)[0][0] assert 'weak_pinned_package 2.0.*' not in m.meta['requirements']['run'] # 3. host present, and deps in build have "strong" run_exports section. use host, add # in "strong" from build. testing_metadata.meta['requirements']['build'] = ['test_has_run_exports'] testing_metadata.meta['requirements']['host'] = ['test_has_run_exports_implicit_weak'] api.output_yaml(testing_metadata, 'meta.yaml') m = api.render(testing_workdir, config=testing_config)[0][0] assert any('strong_pinned_package 1.0' in req for req in m.meta['requirements']['host']) assert 'strong_pinned_package 1.0.*' in m.meta['requirements']['run'] # weak one from test_has_run_exports should be excluded, since it is a build dep assert 'weak_pinned_package 1.0.*' not in m.meta['requirements']['run'] assert 'weak_pinned_package 2.0.*' in m.meta['requirements']['run'] @pytest.mark.serial def test_ignore_run_exports(testing_metadata, testing_config): # need to clear conda's index, or else we somehow pick up the test_run_exports folder # above for our package here. api.build(os.path.join(metadata_dir, '_run_exports'), config=testing_config) testing_metadata.meta['requirements']['build'] = ['test_has_run_exports'] testing_metadata.meta['build']['ignore_run_exports'] = ['downstream_pinned_package'] testing_metadata.config.index = None m = finalize_metadata(testing_metadata) assert 'downstream_pinned_package 1.0' not in m.meta['requirements']['run'] def test_pin_subpackage_exact(testing_config): recipe = os.path.join(metadata_dir, '_pin_subpackage_exact') ms = api.render(recipe, config=testing_config) assert any(re.match(r'run_exports_subpkg 1.0 h[a-f0-9]{%s}_0' % testing_config.hash_length, req) for (m, _, _) in ms for req in m.meta['requirements']['run']) api.build(recipe, config=testing_config) @pytest.mark.skipif(sys.platform != 'linux', reason="xattr code written here is specific to linux") def test_copy_read_only_file_with_xattr(testing_config, testing_workdir): src_recipe = os.path.join(metadata_dir, '_xattr_copy') recipe = os.path.join(testing_workdir, '_xattr_copy') copy_into(src_recipe, recipe) # file is r/w for owner, but we change it to 400 after setting the attribute ro_file = os.path.join(recipe, 'mode_400_file') subprocess.check_call('setfattr -n user.attrib -v somevalue {}'.format(ro_file), shell=True) subprocess.check_call('chmod 400 {}'.format(ro_file), shell=True) api.build(recipe, config=testing_config) @pytest.mark.serial def test_env_creation_fail_exits_build(testing_config): recipe = os.path.join(metadata_dir, '_post_link_exits_after_retry') with pytest.raises((RuntimeError, LinkError, CondaError)): api.build(recipe, config=testing_config) recipe = os.path.join(metadata_dir, '_post_link_exits_tests') with pytest.raises((RuntimeError, LinkError, CondaError)): api.build(recipe, config=testing_config) @pytest.mark.serial def test_recursion_packages(testing_config): """Two packages that need to be built are listed in the recipe make sure that both get built before the one needing them gets built.""" recipe = os.path.join(metadata_dir, '_recursive-build-two-packages') api.build(recipe, config=testing_config) @pytest.mark.serial def test_recursion_layers(testing_config): """go two 'hops' - try to build a, but a needs b, so build b first, then come back to a""" recipe = os.path.join(metadata_dir, '_recursive-build-two-layers') api.build(recipe, config=testing_config) @pytest.mark.skipif(sys.platform != 'win32', reason=("spaces break openssl prefix " "replacement on *nix")) def test_croot_with_spaces(testing_metadata, testing_workdir): testing_metadata.config.croot = os.path.join(testing_workdir, "space path") api.build(testing_metadata) def test_unknown_selectors(testing_config): recipe = os.path.join(metadata_dir, 'unknown_selector') api.build(recipe, config=testing_config) def test_extract_tarball_with_unicode_filename(testing_config): """See https://github.com/conda/conda-build/pull/1779""" recipe = os.path.join(metadata_dir, '_unicode_in_tarball') api.build(recipe, config=testing_config) @pytest.mark.serial def test_failed_recipe_leaves_folders(testing_config, testing_workdir): recipe = os.path.join(fail_dir, 'recursive-build') m = api.render(recipe, config=testing_config)[0][0] locks = get_conda_operation_locks(m.config) with pytest.raises((RuntimeError, exceptions.DependencyNeedsBuildingError)): api.build(m) assert os.path.isdir(m.config.build_folder), 'build folder was removed' assert os.listdir(m.config.build_folder), 'build folder has no files' # make sure that it does not leave lock files, though, as these cause permission errors on # centralized installations any_locks = False locks_list = set() for lock in locks: if os.path.isfile(lock.lock_file): any_locks = True dest_path = base64.b64decode(os.path.basename(lock.lock_file)) if PY3 and hasattr(dest_path, 'decode'): dest_path = dest_path.decode() locks_list.add((lock.lock_file, dest_path)) assert not any_locks, "remaining locks:\n{}".format('\n'.join('->'.join((l, r)) for (l, r) in locks_list)) def test_only_r_env_vars_defined(testing_config): recipe = os.path.join(metadata_dir, '_r_env_defined') testing_config.channel_urls = ('r', ) api.build(recipe, config=testing_config) def test_only_perl_env_vars_defined(testing_config): recipe = os.path.join(metadata_dir, '_perl_env_defined') testing_config.channel_urls = ('c3i_test', ) api.build(recipe, config=testing_config) @pytest.mark.skipif(on_win, reason='no lua package on win') def test_only_lua_env(testing_config): recipe = os.path.join(metadata_dir, '_lua_env_defined') testing_config.channel_urls = ('conda-forge', ) testing_config.prefix_length = 80 testing_config.set_build_id = False api.build(recipe, config=testing_config) def test_run_constrained_stores_constrains_info(testing_config): recipe = os.path.join(metadata_dir, '_run_constrained') out_file = api.build(recipe, config=testing_config)[0] info_contents = json.loads(package_has_file(out_file, 'info/index.json')) assert 'constrains' in info_contents assert len(info_contents['constrains']) == 1 assert info_contents['constrains'][0] == 'bzip2 1.*' @pytest.mark.serial def test_no_locking(testing_config): recipe = os.path.join(metadata_dir, 'source_git_jinja2') api.update_index(os.path.join(testing_config.croot, testing_config.subdir), config=testing_config) api.build(recipe, config=testing_config, locking=False) def test_test_dependencies(testing_workdir, testing_config): recipe = os.path.join(fail_dir, 'check_test_dependencies') with pytest.raises(exceptions.DependencyNeedsBuildingError) as e: api.build(recipe, config=testing_config) assert 'Unsatisfiable dependencies for platform ' in str(e.value) assert 'pytest-package-does-not-exist' in str(e.value) def test_runtime_dependencies(testing_workdir, testing_config): recipe = os.path.join(fail_dir, 'check_runtime_dependencies') with pytest.raises(exceptions.DependencyNeedsBuildingError) as e: api.build(recipe, config=testing_config) assert 'Unsatisfiable dependencies for platform ' in str(e.value) assert 'some-nonexistent-package1' in str(e.value) def test_no_force_upload_condarc_setting(mocker, testing_workdir, testing_metadata): testing_metadata.config.anaconda_upload = True del testing_metadata.meta['test'] api.output_yaml(testing_metadata, 'meta.yaml') call = mocker.patch.object(conda_build.build.subprocess, 'call') cc_conda_build['force_upload'] = False pkg = api.build(testing_workdir) assert call.called_once_with(['anaconda', 'upload', pkg]) del cc_conda_build['force_upload'] pkg = api.build(testing_workdir) assert call.called_once_with(['anaconda', 'upload', '--force', pkg]) def test_setup_py_data_in_env(testing_config): recipe = os.path.join(metadata_dir, '_setup_py_data_in_env') # should pass with any modern python (just not 3.5) api.build(recipe, config=testing_config) # make sure it fails with our special python logic with pytest.raises(subprocess.CalledProcessError): api.build(recipe, config=testing_config, python='3.4') def test_numpy_xx(testing_config): recipe = os.path.join(metadata_dir, '_numpy_xx') api.build(recipe, config=testing_config, numpy='1.12') def test_numpy_xx_host(testing_config): recipe = os.path.join(metadata_dir, '_numpy_xx_host') api.build(recipe, config=testing_config, numpy='1.12') def test_python_xx(testing_config): recipe = os.path.join(metadata_dir, '_python_xx') api.build(recipe, config=testing_config, python='3.4') def test_indirect_numpy_dependency(testing_metadata): testing_metadata.meta['requirements']['build'] = ['arrow-cpp 0.5.*'] testing_metadata.config.channel_urls = ['conda-forge'] api.build(testing_metadata, numpy=1.13) def test_dependencies_with_notest(testing_workdir, testing_config): recipe = os.path.join(metadata_dir, '_test_dependencies') api.build(recipe, config=testing_config, notest=True) with pytest.raises(DependencyNeedsBuildingError) as excinfo: api.build(recipe, config=testing_config, notest=False) assert 'Unsatisfiable dependencies for platform' in str(excinfo.value) assert 'somenonexistentpackage1' in str(excinfo.value) def test_source_cache_build(testing_workdir): recipe = os.path.join(metadata_dir, 'source_git_jinja2') config = api.Config(src_cache_root=testing_workdir) api.build(recipe, notest=True, config=config) git_cache_directory = '{}/git_cache' .format(testing_workdir) assert os.path.isdir(git_cache_directory) files = [filename for _, _, filenames in os.walk(git_cache_directory) for filename in filenames] assert len(files) > 0 def test_copy_test_source_files(testing_config): recipe = os.path.join(metadata_dir, '_test_test_source_files') filenames = set() for copy in (False, True): testing_config.copy_test_source_files = copy outputs = api.build(recipe, notest=False, config=testing_config) filenames.add(os.path.basename(outputs[0])) tf = tarfile.open(outputs[0]) found = False for f in tf.getmembers(): if f.name.startswith('info/test/'): found = True break if found: assert copy, "'info/test/' found in tar.bz2 but not copying test source files" else: assert not copy, "'info/test/' not found in tar.bz2 but copying test source files" assert len(filenames) == 2, "copy_test_source_files does not modify the build hash but should" def test_pin_depends(testing_config): """purpose of 'record' argument is to put a 'requires' file that records pinned run dependencies """ recipe = os.path.join(metadata_dir, '_pin_depends_record') m = api.render(recipe, config=testing_config)[0][0] # the recipe python is not pinned, and having pin_depends set to record will not show it in record assert not any(re.search('python\s+[23]\.', dep) for dep in m.meta['requirements']['run']) output = api.build(m, config=testing_config)[0] requires = package_has_file(output, 'info/requires') assert requires if PY3 and hasattr(requires, 'decode'): requires = requires.decode() assert re.search('python\=[23]\.', requires), "didn't find pinned python in info/requires" def test_failed_patch_exits_build(testing_config): with pytest.raises(RuntimeError): api.build(os.path.join(metadata_dir, '_bad_patch'), config=testing_config) def test_version_mismatch_in_variant_does_not_infinitely_rebuild_folder(testing_config): # unsatisfiable; also not buildable (test_a recipe version is 2.0) testing_config.variant['test_a'] = "1.0" recipe = os.path.join(metadata_dir, '_build_deps_no_infinite_loop', 'test_b') with pytest.raises(DependencyNeedsBuildingError): api.build(recipe, config=testing_config) # passes now, because package can be built, or is already built. Doesn't matter which. testing_config.variant['test_a'] = "2.0" api.build(recipe, config=testing_config)
43.726817
102
0.690701
import base64 from collections import OrderedDict from glob import glob import logging import os import re import subprocess import sys import json import uuid import conda from conda_build.conda_interface import PY3, url_path, LinkError, CondaError, cc_conda_build import conda_build from binstar_client.commands import remove, show from binstar_client.errors import NotFound from pkg_resources import parse_version import pytest import yaml import tarfile from conda_build import api, exceptions, __version__ from conda_build.build import VersionOrder from conda_build.render import finalize_metadata from conda_build.utils import (copy_into, on_win, check_call_env, convert_path_for_cygwin_or_msys2, package_has_file, check_output_env, get_conda_operation_locks) from conda_build.os_utils.external import find_executable from conda_build.exceptions import DependencyNeedsBuildingError from .utils import is_valid_dir, metadata_dir, fail_dir, add_mangling, FileNotFoundError empty_sections = os.path.join(metadata_dir, "empty_sections") def represent_ordereddict(dumper, data): value = [] for item_key, item_value in data.items(): node_key = dumper.represent_data(item_key) node_value = dumper.represent_data(item_value) value.append((node_key, node_value)) return yaml.nodes.MappingNode(u'tag:yaml.org,2002:map', value) yaml.add_representer(OrderedDict, represent_ordereddict) class AnacondaClientArgs(object): def __init__(self, specs, token=None, site=None, log_level=logging.INFO, force=False): from binstar_client.utils import parse_specs self.specs = [parse_specs(specs)] self.spec = self.specs[0] self.token = token self.site = site self.log_level = log_level self.force = force def describe_root(cwd=None): if not cwd: cwd = os.path.abspath(os.path.join(os.path.dirname(__file__), "..")) tag = check_output_env(["git", "describe", "--abbrev=0"], cwd=cwd).rstrip() if PY3: tag = tag.decode("utf-8") return tag @pytest.fixture(params=[dirname for dirname in os.listdir(metadata_dir) if is_valid_dir(metadata_dir, dirname)]) def recipe(request): return os.path.join(metadata_dir, request.param) def test_recipe_builds(recipe, testing_config, testing_workdir, monkeypatch): # These variables are defined solely for testing purposes, # so they can be checked within build scripts monkeypatch.setenv("CONDA_TEST_VAR", "conda_test") monkeypatch.setenv("CONDA_TEST_VAR_2", "conda_test_2") api.build(recipe, config=testing_config) def test_token_upload(testing_workdir, testing_metadata): folder_uuid = uuid.uuid4().hex # generated with conda_test_account user, command: # anaconda auth --create --name CONDA_BUILD_UPLOAD_TEST --scopes 'api repos conda' args = AnacondaClientArgs(specs="conda_build_test/test_token_upload_" + folder_uuid, token="co-143399b8-276e-48db-b43f-4a3de839a024", force=True) with pytest.raises(NotFound): show.main(args) testing_metadata.meta['package']['name'] = '_'.join([testing_metadata.name(), folder_uuid]) testing_metadata.config.token = args.token # the folder with the test recipe to upload api.build(testing_metadata) # make sure that the package is available (should raise if it doesn't) show.main(args) remove.main(args) # verify cleanup: with pytest.raises(NotFound): show.main(args) @pytest.mark.parametrize("service_name", ["binstar", "anaconda"]) def test_no_anaconda_upload_condarc(service_name, testing_workdir, testing_config, capfd): api.build(empty_sections, config=testing_config) output, error = capfd.readouterr() assert "Automatic uploading is disabled" in output, error def test_git_describe_info_on_branch(testing_config): recipe_path = os.path.join(metadata_dir, "_git_describe_number_branch") m = api.render(recipe_path, config=testing_config)[0][0] output = api.get_output_file_path(m)[0] # missing hash because we set custom build string in meta.yaml test_path = os.path.join(testing_config.croot, testing_config.host_subdir, "git_describe_number_branch-1.20.2.0-1_g82c6ba6.tar.bz2") assert test_path == output def test_no_include_recipe_config_arg(testing_metadata): outputs = api.build(testing_metadata) assert package_has_file(outputs[0], "info/recipe/meta.yaml") # make sure that it is not there when the command line flag is passed testing_metadata.config.include_recipe = False testing_metadata.meta['build']['number'] = 2 # We cannot test packages without recipes as we cannot render them output_file = api.build(testing_metadata, notest=True)[0] assert not package_has_file(output_file, "info/recipe/meta.yaml") def test_no_include_recipe_meta_yaml(testing_metadata, testing_config): # first, make sure that the recipe is there by default. This test copied from above, but copied # as a sanity check here. outputs = api.build(testing_metadata) assert package_has_file(outputs[0], "info/recipe/meta.yaml") output_file = api.build(os.path.join(metadata_dir, '_no_include_recipe'), config=testing_config)[0] assert not package_has_file(output_file, "info/recipe/meta.yaml") def test_early_abort(testing_config, capfd): api.build(os.path.join(metadata_dir, '_test_early_abort'), config=testing_config) output, error = capfd.readouterr() assert "Hello World" in output def test_output_build_path_git_source(testing_workdir, testing_config): recipe_path = os.path.join(metadata_dir, "source_git_jinja2") m = api.render(recipe_path, config=testing_config)[0][0] output = api.get_output_file_paths(m)[0] _hash = m.hash_dependencies() test_path = os.path.join(testing_config.croot, testing_config.host_subdir, "conda-build-test-source-git-jinja2-1.20.2-py{}{}{}_0_g262d444.tar.bz2".format( sys.version_info.major, sys.version_info.minor, _hash)) assert output == test_path @pytest.mark.serial def test_build_with_no_activate_does_not_activate(): api.build(os.path.join(metadata_dir, '_set_env_var_no_activate_build'), activate=False, anaconda_upload=False) @pytest.mark.serial def test_build_with_activate_does_activate(): api.build(os.path.join(metadata_dir, '_set_env_var_activate_build'), activate=True, anaconda_upload=False) @pytest.mark.skipif(sys.platform == "win32", reason="no binary prefix manipulation done on windows.") def test_binary_has_prefix_files(testing_workdir, testing_config): api.build(os.path.join(metadata_dir, '_binary_has_prefix_files'), config=testing_config) def test_relative_path_git_versioning(testing_workdir, testing_config): # conda_build_test_recipe is a manual step. Clone it at the same level as # your conda-build source. cwd = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', 'conda_build_test_recipe')) tag = describe_root(cwd) output = api.get_output_file_path(os.path.join(metadata_dir, "_source_git_jinja2_relative_path"), config=testing_config)[0] assert tag in output def test_relative_git_url_git_versioning(testing_workdir, testing_config): cwd = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', 'conda_build_test_recipe')) tag = describe_root(cwd) recipe = os.path.join(metadata_dir, "_source_git_jinja2_relative_git_url") output = api.get_output_file_path(recipe, config=testing_config)[0] assert tag in output def test_dirty_variable_available_in_build_scripts(testing_workdir, testing_config): recipe = os.path.join(metadata_dir, "_dirty_skip_section") testing_config.dirty = True api.build(recipe, config=testing_config) with pytest.raises(subprocess.CalledProcessError): testing_config.dirty = False api.build(recipe, config=testing_config) def dummy_executable(folder, exename): # empty prefix by default - extra bit at beginning of file if sys.platform == "win32": exename = exename + ".bat" dummyfile = os.path.join(folder, exename) if sys.platform == "win32": prefix = "@echo off\n" else: prefix = "#!/bin/bash\nexec 1>&2\n" with open(dummyfile, 'w') as f: f.write(prefix + """ echo ******* You have reached the dummy {}. It is likely there is a bug in echo ******* conda that makes it not add the _build/bin directory onto the echo ******* PATH before running the source checkout tool exit -1 """.format(exename)) if sys.platform != "win32": import stat st = os.stat(dummyfile) os.chmod(dummyfile, st.st_mode | stat.S_IEXEC) return exename def test_checkout_tool_as_dependency(testing_workdir, testing_config, monkeypatch): # "hide" svn by putting a known bad one on PATH exename = dummy_executable(testing_workdir, "svn") monkeypatch.setenv("PATH", testing_workdir, prepend=os.pathsep) FNULL = open(os.devnull, 'w') with pytest.raises(subprocess.CalledProcessError, message="Dummy svn was not executed"): check_call_env([exename, '--version'], stderr=FNULL) FNULL.close() env = dict(os.environ) env["PATH"] = os.pathsep.join([testing_workdir, env["PATH"]]) api.build(os.path.join(metadata_dir, '_checkout_tool_as_dependency'), config=testing_config) platforms = ["64" if sys.maxsize > 2**32 else "32"] if sys.platform == "win32": platforms = set(["32", ] + platforms) compilers = ["2.7", "3.4", "3.5"] msvc_vers = ['9.0', '10.0', '14.0'] else: msvc_vers = [] compilers = [".".join([str(sys.version_info.major), str(sys.version_info.minor)])] @pytest.mark.skipif(sys.platform != "win32", reason="MSVC only on windows") @pytest.mark.parametrize("msvc_ver", msvc_vers) def test_build_msvc_compiler(msvc_ver, monkeypatch): # verify that the correct compiler is available cl_versions = {"9.0": 15, "10.0": 16, "11.0": 17, "12.0": 18, "14.0": 19} monkeypatch.setenv('CONDATEST_MSVC_VER', msvc_ver) monkeypatch.setenv('CL_EXE_VERSION', str(cl_versions[msvc_ver])) try: # Always build Python 2.7 - but set MSVC version manually via Jinja template api.build(os.path.join(metadata_dir, '_build_msvc_compiler'), python="2.7") except: raise finally: del os.environ['CONDATEST_MSVC_VER'] del os.environ['CL_EXE_VERSION'] @pytest.mark.parametrize("platform", platforms) @pytest.mark.parametrize("target_compiler", compilers) def test_cmake_generator(platform, target_compiler, testing_workdir, testing_config): testing_config.variant['python'] = target_compiler api.build(os.path.join(metadata_dir, '_cmake_generator'), config=testing_config) @pytest.mark.skipif(sys.platform == "win32", reason="No windows symlinks") def test_symlink_fail(testing_workdir, testing_config, capfd): with pytest.raises((SystemExit, FileNotFoundError)): api.build(os.path.join(fail_dir, "symlinks"), config=testing_config) # output, error = capfd.readouterr() # assert error.count("Error") == 6, "did not find appropriate count of Error in: " + error def test_pip_in_meta_yaml_fail(testing_workdir, testing_config): with pytest.raises(ValueError) as exc: api.build(os.path.join(fail_dir, "pip_reqs_fail_informatively"), config=testing_config) assert "environment.yml" in str(exc) def test_recursive_fail(testing_workdir, testing_config): with pytest.raises((RuntimeError, exceptions.DependencyNeedsBuildingError)) as exc: api.build(os.path.join(fail_dir, "recursive-build"), config=testing_config) # indentation critical here. If you indent this, and the exception is not raised, then # the exc variable here isn't really completely created and shows really strange errors: assert "recursive-build2" in str(exc.value) def test_jinja_typo(testing_workdir, testing_config): with pytest.raises(SystemExit) as exc: api.build(os.path.join(fail_dir, "source_git_jinja2_oops"), config=testing_config) assert "GIT_DSECRIBE_TAG" in exc.exconly() @pytest.mark.serial def test_skip_existing(testing_workdir, testing_config, capfd): api.build(empty_sections, config=testing_config) api.build(empty_sections, config=testing_config, skip_existing=True) output, error = capfd.readouterr() assert "are already built" in output @pytest.mark.serial def test_skip_existing_url(testing_metadata, testing_workdir, capfd): outputs = api.build(testing_metadata) output_dir = os.path.join(testing_workdir, 'someoutput') platform = os.path.join(output_dir, testing_metadata.config.host_subdir) os.makedirs(platform) copy_into(outputs[0], os.path.join(platform, os.path.basename(outputs[0]))) api.update_index(platform, config=testing_metadata.config) noarch = os.path.join(output_dir, 'noarch') os.makedirs(noarch) api.update_index(noarch, config=testing_metadata.config) testing_metadata.config.skip_existing = True testing_metadata.config.channel_urls = [url_path(output_dir)] api.build(testing_metadata) output, error = capfd.readouterr() assert "are already built" in output assert url_path(testing_metadata.config.croot) in output def test_failed_tests_exit_build(testing_workdir, testing_config): with pytest.raises(SystemExit) as exc: api.build(os.path.join(metadata_dir, "_test_failed_test_exits"), config=testing_config) assert 'TESTS FAILED' in str(exc) def test_requirements_txt_for_run_reqs(testing_workdir, testing_config): testing_config.channel_urls = ('conda_build_test', ) api.build(os.path.join(metadata_dir, "_requirements_txt_run_reqs"), config=testing_config) @pytest.mark.serial def test_compileall_compiles_all_good_files(testing_workdir, testing_config): output = api.build(os.path.join(metadata_dir, "_compile-test"), config=testing_config)[0] good_files = ['f1.py', 'f3.py'] bad_file = 'f2_bad.py' for f in good_files: assert package_has_file(output, f) assert package_has_file(output, add_mangling(f)) assert package_has_file(output, bad_file) assert not package_has_file(output, add_mangling(bad_file)) def test_render_setup_py_old_funcname(testing_workdir, testing_config, caplog): api.build(os.path.join(metadata_dir, "_source_setuptools"), config=testing_config) assert "Deprecation notice: the load_setuptools function has been renamed to " in caplog.text @pytest.mark.skipif(not on_win, reason="only Windows is insane enough to have backslashes in paths") def test_backslash_in_always_include_files_path(testing_config): api.build(os.path.join(metadata_dir, '_backslash_in_include_files')) with pytest.raises(RuntimeError): api.build(os.path.join(fail_dir, 'backslash_in_include_files')) def test_build_metadata_object(testing_metadata): api.build(testing_metadata) @pytest.mark.skipif(on_win, reason="fortran compilers on win are hard.") def test_numpy_setup_py_data(testing_config): recipe_path = os.path.join(metadata_dir, '_numpy_setup_py_data') m = api.render(recipe_path, config=testing_config, numpy="1.11")[0][0] _hash = m.hash_dependencies() assert os.path.basename(api.get_output_file_path(m)[0]) == \ "load_setup_py_test-1.0a1-np111py{0}{1}{2}_1.tar.bz2".format( sys.version_info.major, sys.version_info.minor, _hash) def test_relative_git_url_submodule_clone(testing_workdir, testing_config, monkeypatch): toplevel = os.path.join(testing_workdir, 'toplevel') os.mkdir(toplevel) relative_sub = os.path.join(testing_workdir, 'relative_sub') os.mkdir(relative_sub) absolute_sub = os.path.join(testing_workdir, 'absolute_sub') os.mkdir(absolute_sub) sys_git_env = os.environ.copy() sys_git_env['GIT_AUTHOR_NAME'] = 'conda-build' sys_git_env['GIT_AUTHOR_EMAIL'] = '[email protected]' sys_git_env['GIT_COMMITTER_NAME'] = 'conda-build' sys_git_env['GIT_COMMITTER_EMAIL'] = '[email protected]' git = find_executable('git') exename = dummy_executable(testing_workdir, 'git') monkeypatch.setenv("PATH", testing_workdir, prepend=os.pathsep) FNULL = open(os.devnull, 'w') with pytest.raises(subprocess.CalledProcessError, message="Dummy git was not executed"): check_call_env([exename, '--version'], stdout=FNULL, stderr=FNULL) FNULL.close() for tag in range(2): os.chdir(absolute_sub) if tag == 0: check_call_env([git, 'init'], env=sys_git_env) with open('absolute', 'w') as f: f.write(str(tag)) check_call_env([git, 'add', 'absolute'], env=sys_git_env) check_call_env([git, 'commit', '-m', 'absolute{}'.format(tag)], env=sys_git_env) os.chdir(relative_sub) if tag == 0: check_call_env([git, 'init'], env=sys_git_env) with open('relative', 'w') as f: f.write(str(tag)) check_call_env([git, 'add', 'relative'], env=sys_git_env) check_call_env([git, 'commit', '-m', 'relative{}'.format(tag)], env=sys_git_env) os.chdir(toplevel) if tag == 0: check_call_env([git, 'init'], env=sys_git_env) with open('toplevel', 'w') as f: f.write(str(tag)) check_call_env([git, 'add', 'toplevel'], env=sys_git_env) check_call_env([git, 'commit', '-m', 'toplevel{}'.format(tag)], env=sys_git_env) if tag == 0: check_call_env([git, 'submodule', 'add', convert_path_for_cygwin_or_msys2(git, absolute_sub), 'absolute'], env=sys_git_env) check_call_env([git, 'submodule', 'add', '../relative_sub', 'relative'], env=sys_git_env) else: check_call_env([git, 'submodule', 'foreach', git, 'pull'], env=sys_git_env) check_call_env([git, 'commit', '-am', 'added submodules@{}'.format(tag)], env=sys_git_env) check_call_env([git, 'tag', '-a', str(tag), '-m', 'tag {}'.format(tag)], env=sys_git_env) requirements = ('requirements', OrderedDict([ ('build', ['git # [False]', 'm2-git # [win]', 'm2-filesystem # [win]'])])) recipe_dir = os.path.join(testing_workdir, 'recipe') if not os.path.exists(recipe_dir): os.makedirs(recipe_dir) filename = os.path.join(testing_workdir, 'recipe', 'meta.yaml') data = OrderedDict([ ('package', OrderedDict([ ('name', 'relative_submodules'), ('version', '{{ GIT_DESCRIBE_TAG }}')])), ('source', OrderedDict([ ('git_url', toplevel), ('git_tag', str(tag))])), requirements, ('build', OrderedDict([ ('script', ['git --no-pager submodule --quiet foreach git log -n 1 --pretty=format:%%s > ' '%PREFIX%\\summaries.txt # [win]', 'git --no-pager submodule --quiet foreach git log -n 1 --pretty=format:%s > ' '$PREFIX/summaries.txt # [not win]']) ])), ('test', OrderedDict([ ('commands', ['echo absolute{}relative{} > %PREFIX%\\expected_summaries.txt # [win]' .format(tag, tag), 'fc.exe /W %PREFIX%\\expected_summaries.txt %PREFIX%\\summaries.txt # [win]', 'echo absolute{}relative{} > $PREFIX/expected_summaries.txt # [not win]' .format(tag, tag), 'diff -wuN ${PREFIX}/expected_summaries.txt ${PREFIX}/summaries.txt # [not win]']) ])) ]) with open(filename, 'w') as outfile: outfile.write(yaml.dump(data, default_flow_style=False, width=999999999)) monkeypatch.undo() metadata = api.render(testing_workdir, config=testing_config)[0][0] output = api.get_output_file_path(metadata, config=testing_config)[0] assert ("relative_submodules-{}-".format(tag) in output) api.build(metadata, config=testing_config) def test_noarch(testing_workdir): filename = os.path.join(testing_workdir, 'meta.yaml') for noarch in (False, True): data = OrderedDict([ ('package', OrderedDict([ ('name', 'test'), ('version', '0.0.0')])), ('build', OrderedDict([ ('noarch', str(noarch))])) ]) with open(filename, 'w') as outfile: outfile.write(yaml.dump(data, default_flow_style=False, width=999999999)) output = api.get_output_file_path(testing_workdir)[0] assert (os.path.sep + "noarch" + os.path.sep in output or not noarch) assert (os.path.sep + "noarch" + os.path.sep not in output or noarch) def test_disable_pip(testing_config, testing_metadata): testing_metadata.config.disable_pip = True testing_metadata.meta['build']['script'] = 'python -c "import pip; print(pip.__version__)"' with pytest.raises(subprocess.CalledProcessError): api.build(testing_metadata) testing_metadata.meta['build']['script'] = ('python -c "import setuptools; ' 'print(setuptools.__version__)"') with pytest.raises(subprocess.CalledProcessError): api.build(testing_metadata) @pytest.mark.skipif(not sys.platform.startswith('linux'), reason="rpath fixup only done on Linux so far.") def test_rpath_linux(testing_config): api.build(os.path.join(metadata_dir, "_rpath"), config=testing_config) def test_noarch_none_value(testing_workdir, testing_config): recipe = os.path.join(metadata_dir, "_noarch_none") with pytest.raises(exceptions.CondaBuildException): api.build(recipe, config=testing_config) def test_noarch_foo_value(testing_config): outputs = api.build(os.path.join(metadata_dir, "noarch_generic"), config=testing_config) metadata = json.loads(package_has_file(outputs[0], 'info/index.json').decode()) assert metadata['noarch'] == "generic" def test_about_json_content(testing_metadata): outputs = api.build(testing_metadata) about = json.loads(package_has_file(outputs[0], 'info/about.json').decode()) assert 'conda_version' in about and about['conda_version'] == conda.__version__ assert 'conda_build_version' in about and about['conda_build_version'] == __version__ assert 'channels' in about and about['channels'] try: assert 'env_vars' in about and about['env_vars'] except AssertionError: if VersionOrder(conda.__version__) >= VersionOrder('4.2.10'): raise else: pass assert 'root_pkgs' in about and about['root_pkgs'] @pytest.mark.xfail(parse_version(conda.__version__) < parse_version("4.3.14"), reason="new noarch supported starting with conda 4.3.14") def test_noarch_python_with_tests(testing_config): recipe = os.path.join(metadata_dir, "_noarch_python_with_tests") api.build(recipe, config=testing_config) def test_noarch_python_1(testing_config): output = api.build(os.path.join(metadata_dir, "_noarch_python"), config=testing_config)[0] assert package_has_file(output, 'info/files') is not '' extra = json.loads(package_has_file(output, 'info/link.json').decode()) assert 'noarch' in extra assert 'entry_points' in extra['noarch'] assert 'type' in extra['noarch'] assert 'package_metadata_version' in extra def test_legacy_noarch_python(testing_config): output = api.build(os.path.join(metadata_dir, "_legacy_noarch_python"), config=testing_config)[0] assert os.path.basename(os.path.dirname(output)) == 'noarch' @pytest.mark.skipif(parse_version(conda.__version__) < parse_version("4.5"), reason="full preferred env implementation deferred to conda 4.5") def test_preferred_env(testing_config): recipe = os.path.join(metadata_dir, "_preferred_env") output = api.build(recipe, config=testing_config)[0] extra = json.loads(package_has_file(output, 'info/link.json').decode()) assert 'preferred_env' in extra assert 'name' in extra['preferred_env'] assert 'executable_paths' in extra['preferred_env'] exe_paths = extra['preferred_env']['executable_paths'] if on_win: assert exe_paths == ['Scripts/exepath1.bat', 'Scripts/exepath2.bat'] else: assert exe_paths == ['bin/exepath1', 'bin/exepath2'] assert 'package_metadata_version' in extra @pytest.mark.serial def test_skip_compile_pyc(testing_config): outputs = api.build(os.path.join(metadata_dir, "skip_compile_pyc"), config=testing_config) tf = tarfile.open(outputs[0]) pyc_count = 0 for f in tf.getmembers(): filename = os.path.basename(f.name) _, ext = os.path.splitext(filename) basename = filename.split('.', 1)[0] if basename == 'skip_compile_pyc': assert not ext == '.pyc', "a skip_compile_pyc .pyc was compiled: {}".format(filename) if ext == '.pyc': assert basename == 'compile_pyc', "an unexpected .pyc was compiled: {}".format(filename) pyc_count = pyc_count + 1 assert pyc_count == 2, "there should be 2 .pyc files, instead there were {}".format(pyc_count) def test_detect_binary_files_with_prefix(testing_config): outputs = api.build(os.path.join(metadata_dir, "_detect_binary_files_with_prefix"), config=testing_config) matches = [] with tarfile.open(outputs[0]) as tf: has_prefix = tf.extractfile('info/has_prefix') contents = [p.strip().decode('utf-8') for p in has_prefix.readlines()] has_prefix.close() matches = [entry for entry in contents if entry.endswith('binary-has-prefix') or entry.endswith('"binary-has-prefix"')] assert len(matches) == 1, "binary-has-prefix not recorded in info/has_prefix" assert ' binary ' in matches[0], "binary-has-prefix not recorded as binary in info/has_prefix" def test_skip_detect_binary_files_with_prefix(testing_config): recipe = os.path.join(metadata_dir, "_skip_detect_binary_files_with_prefix") outputs = api.build(recipe, config=testing_config) matches = [] with tarfile.open(outputs[0]) as tf: try: has_prefix = tf.extractfile('info/has_prefix') contents = [p.strip().decode('utf-8') for p in has_prefix.readlines()] has_prefix.close() matches = [entry for entry in contents if entry.endswith('binary-has-prefix') or entry.endswith('"binary-has-prefix"')] except: pass assert len(matches) == 0, "binary-has-prefix recorded in info/has_prefix despite:" \ "build/detect_binary_files_with_prefix: false" def test_fix_permissions(testing_config): recipe = os.path.join(metadata_dir, "fix_permissions") outputs = api.build(recipe, config=testing_config) with tarfile.open(outputs[0]) as tf: for f in tf.getmembers(): assert f.mode & 0o444 == 0o444, "tar member '{}' has invalid (read) mode".format(f.name) @pytest.mark.skipif(not on_win, reason="windows-only functionality") @pytest.mark.parametrize('recipe_name', ["_script_win_creates_exe", "_script_win_creates_exe_garbled"]) def test_script_win_creates_exe(testing_config, recipe_name): recipe = os.path.join(metadata_dir, recipe_name) outputs = api.build(recipe, config=testing_config) assert package_has_file(outputs[0], 'Scripts/test-script.exe') assert package_has_file(outputs[0], 'Scripts/test-script-script.py') def test_output_folder_moves_file(testing_metadata, testing_workdir): testing_metadata.config.output_folder = testing_workdir outputs = api.build(testing_metadata, no_test=True) assert outputs[0].startswith(testing_workdir) def test_info_files_json(testing_config): outputs = api.build(os.path.join(metadata_dir, "ignore_some_prefix_files"), config=testing_config) assert package_has_file(outputs[0], "info/paths.json") with tarfile.open(outputs[0]) as tf: data = json.loads(tf.extractfile('info/paths.json').read().decode('utf-8')) fields = ["_path", "sha256", "size_in_bytes", "path_type", "file_mode", "no_link", "prefix_placeholder", "inode_paths"] for key in data.keys(): assert key in ['paths', 'paths_version'] for paths in data.get('paths'): for field in paths.keys(): assert field in fields assert len(data.get('paths')) == 2 for file in data.get('paths'): for key in file.keys(): assert key in fields short_path = file.get("_path") if short_path == "test.sh" or short_path == "test.bat": assert file.get("prefix_placeholder") is not None assert file.get("file_mode") is not None else: assert file.get("prefix_placeholder") is None assert file.get("file_mode") is None def test_build_expands_wildcards(mocker, testing_workdir): build_tree = mocker.patch("conda_build.build.build_tree") config = api.Config() files = ['abc', 'acb'] for f in files: os.makedirs(f) with open(os.path.join(f, 'meta.yaml'), 'w') as fh: fh.write('\n') api.build(["a*"], config=config) output = [os.path.join(os.getcwd(), path, 'meta.yaml') for path in files] build_tree.assert_called_once_with(output, build_only=False, config=mocker.ANY, need_source_download=True, notest=False, post=None, variants=None) @pytest.mark.serial @pytest.mark.parametrize('set_build_id', [True, False]) def test_remove_workdir_default(testing_config, caplog, set_build_id): recipe = os.path.join(metadata_dir, '_keep_work_dir') # another metadata = api.render(recipe, config=testing_config)[0][0] api.build(metadata, set_build_id=set_build_id) assert not glob(os.path.join(metadata.config.work_dir, '*')) @pytest.mark.serial def test_keep_workdir_and_dirty_reuse(testing_config, capfd): recipe = os.path.join(metadata_dir, '_keep_work_dir') # make a metadata object - otherwise the build folder is computed within the build, but does # not alter the config object that is passed in. This is by design - we always make copies # of the config object rather than edit it in place, so that variants don't clobber one metadata = api.render(recipe, config=testing_config, dirty=True, remove_work_dir=False)[0][0] workdir = metadata.config.work_dir api.build(metadata) out, err = capfd.readouterr() assert glob(os.path.join(metadata.config.work_dir, '*')) metadata = api.render(recipe, config=testing_config, dirty=True, remove_work_dir=False)[0][0] assert workdir == metadata.config.work_dir metadata = api.render(recipe, config=testing_config)[0][0] assert workdir != metadata.config.work_dir testing_config.clean() def test_workdir_removal_warning(testing_config, caplog): recipe = os.path.join(metadata_dir, '_test_uses_src_dir') with pytest.raises(ValueError) as exc: api.build(recipe, config=testing_config) assert "work dir is removed" in str(exc) # @pytest.mark.serial # @pytest.mark.skipif(not sys.platform.startswith('linux'), # reason="cross compiler packages created only on Linux right now") # @pytest.mark.xfail(VersionOrder(conda.__version__) < VersionOrder('4.3.2'), # reason="not completely implemented yet") # def test_cross_compiler(testing_workdir, testing_config, capfd): # # TODO: testing purposes. Package from @mingwandroid's channel, copied to conda_build_test @pytest.mark.skipif(sys.platform != 'darwin', reason="relevant to mac only") def test_append_python_app_osx(testing_config): recipe = os.path.join(metadata_dir, '_osx_is_app_missing_python_app') api.build(recipe, config=testing_config) @pytest.mark.serial def test_run_exports(testing_metadata, testing_config, testing_workdir): api.build(os.path.join(metadata_dir, '_run_exports'), config=testing_config) api.build(os.path.join(metadata_dir, '_run_exports_implicit_weak'), config=testing_config) testing_metadata.meta['requirements']['build'] = ['test_has_run_exports'] api.output_yaml(testing_metadata, 'meta.yaml') m = api.render(testing_workdir, config=testing_config)[0][0] assert 'strong_pinned_package 1.0.*' in m.meta['requirements']['run'] assert 'weak_pinned_package 1.0.*' in m.meta['requirements']['run'] testing_metadata.meta['requirements']['build'] = ['test_has_run_exports_implicit_weak'] testing_metadata.meta['requirements']['host'] = ['python'] api.output_yaml(testing_metadata, 'meta.yaml') m = api.render(testing_workdir, config=testing_config)[0][0] assert 'weak_pinned_package 2.0.*' not in m.meta['requirements']['run'] testing_metadata.meta['requirements']['build'] = ['test_has_run_exports'] testing_metadata.meta['requirements']['host'] = ['test_has_run_exports_implicit_weak'] api.output_yaml(testing_metadata, 'meta.yaml') m = api.render(testing_workdir, config=testing_config)[0][0] assert any('strong_pinned_package 1.0' in req for req in m.meta['requirements']['host']) assert 'strong_pinned_package 1.0.*' in m.meta['requirements']['run'] assert 'weak_pinned_package 1.0.*' not in m.meta['requirements']['run'] assert 'weak_pinned_package 2.0.*' in m.meta['requirements']['run'] @pytest.mark.serial def test_ignore_run_exports(testing_metadata, testing_config): # above for our package here. api.build(os.path.join(metadata_dir, '_run_exports'), config=testing_config) testing_metadata.meta['requirements']['build'] = ['test_has_run_exports'] testing_metadata.meta['build']['ignore_run_exports'] = ['downstream_pinned_package'] testing_metadata.config.index = None m = finalize_metadata(testing_metadata) assert 'downstream_pinned_package 1.0' not in m.meta['requirements']['run'] def test_pin_subpackage_exact(testing_config): recipe = os.path.join(metadata_dir, '_pin_subpackage_exact') ms = api.render(recipe, config=testing_config) assert any(re.match(r'run_exports_subpkg 1.0 h[a-f0-9]{%s}_0' % testing_config.hash_length, req) for (m, _, _) in ms for req in m.meta['requirements']['run']) api.build(recipe, config=testing_config) @pytest.mark.skipif(sys.platform != 'linux', reason="xattr code written here is specific to linux") def test_copy_read_only_file_with_xattr(testing_config, testing_workdir): src_recipe = os.path.join(metadata_dir, '_xattr_copy') recipe = os.path.join(testing_workdir, '_xattr_copy') copy_into(src_recipe, recipe) # file is r/w for owner, but we change it to 400 after setting the attribute ro_file = os.path.join(recipe, 'mode_400_file') subprocess.check_call('setfattr -n user.attrib -v somevalue {}'.format(ro_file), shell=True) subprocess.check_call('chmod 400 {}'.format(ro_file), shell=True) api.build(recipe, config=testing_config) @pytest.mark.serial def test_env_creation_fail_exits_build(testing_config): recipe = os.path.join(metadata_dir, '_post_link_exits_after_retry') with pytest.raises((RuntimeError, LinkError, CondaError)): api.build(recipe, config=testing_config) recipe = os.path.join(metadata_dir, '_post_link_exits_tests') with pytest.raises((RuntimeError, LinkError, CondaError)): api.build(recipe, config=testing_config) @pytest.mark.serial def test_recursion_packages(testing_config): recipe = os.path.join(metadata_dir, '_recursive-build-two-packages') api.build(recipe, config=testing_config) @pytest.mark.serial def test_recursion_layers(testing_config): recipe = os.path.join(metadata_dir, '_recursive-build-two-layers') api.build(recipe, config=testing_config) @pytest.mark.skipif(sys.platform != 'win32', reason=("spaces break openssl prefix " "replacement on *nix")) def test_croot_with_spaces(testing_metadata, testing_workdir): testing_metadata.config.croot = os.path.join(testing_workdir, "space path") api.build(testing_metadata) def test_unknown_selectors(testing_config): recipe = os.path.join(metadata_dir, 'unknown_selector') api.build(recipe, config=testing_config) def test_extract_tarball_with_unicode_filename(testing_config): recipe = os.path.join(metadata_dir, '_unicode_in_tarball') api.build(recipe, config=testing_config) @pytest.mark.serial def test_failed_recipe_leaves_folders(testing_config, testing_workdir): recipe = os.path.join(fail_dir, 'recursive-build') m = api.render(recipe, config=testing_config)[0][0] locks = get_conda_operation_locks(m.config) with pytest.raises((RuntimeError, exceptions.DependencyNeedsBuildingError)): api.build(m) assert os.path.isdir(m.config.build_folder), 'build folder was removed' assert os.listdir(m.config.build_folder), 'build folder has no files' # make sure that it does not leave lock files, though, as these cause permission errors on # centralized installations any_locks = False locks_list = set() for lock in locks: if os.path.isfile(lock.lock_file): any_locks = True dest_path = base64.b64decode(os.path.basename(lock.lock_file)) if PY3 and hasattr(dest_path, 'decode'): dest_path = dest_path.decode() locks_list.add((lock.lock_file, dest_path)) assert not any_locks, "remaining locks:\n{}".format('\n'.join('->'.join((l, r)) for (l, r) in locks_list)) def test_only_r_env_vars_defined(testing_config): recipe = os.path.join(metadata_dir, '_r_env_defined') testing_config.channel_urls = ('r', ) api.build(recipe, config=testing_config) def test_only_perl_env_vars_defined(testing_config): recipe = os.path.join(metadata_dir, '_perl_env_defined') testing_config.channel_urls = ('c3i_test', ) api.build(recipe, config=testing_config) @pytest.mark.skipif(on_win, reason='no lua package on win') def test_only_lua_env(testing_config): recipe = os.path.join(metadata_dir, '_lua_env_defined') testing_config.channel_urls = ('conda-forge', ) testing_config.prefix_length = 80 testing_config.set_build_id = False api.build(recipe, config=testing_config) def test_run_constrained_stores_constrains_info(testing_config): recipe = os.path.join(metadata_dir, '_run_constrained') out_file = api.build(recipe, config=testing_config)[0] info_contents = json.loads(package_has_file(out_file, 'info/index.json')) assert 'constrains' in info_contents assert len(info_contents['constrains']) == 1 assert info_contents['constrains'][0] == 'bzip2 1.*' @pytest.mark.serial def test_no_locking(testing_config): recipe = os.path.join(metadata_dir, 'source_git_jinja2') api.update_index(os.path.join(testing_config.croot, testing_config.subdir), config=testing_config) api.build(recipe, config=testing_config, locking=False) def test_test_dependencies(testing_workdir, testing_config): recipe = os.path.join(fail_dir, 'check_test_dependencies') with pytest.raises(exceptions.DependencyNeedsBuildingError) as e: api.build(recipe, config=testing_config) assert 'Unsatisfiable dependencies for platform ' in str(e.value) assert 'pytest-package-does-not-exist' in str(e.value) def test_runtime_dependencies(testing_workdir, testing_config): recipe = os.path.join(fail_dir, 'check_runtime_dependencies') with pytest.raises(exceptions.DependencyNeedsBuildingError) as e: api.build(recipe, config=testing_config) assert 'Unsatisfiable dependencies for platform ' in str(e.value) assert 'some-nonexistent-package1' in str(e.value) def test_no_force_upload_condarc_setting(mocker, testing_workdir, testing_metadata): testing_metadata.config.anaconda_upload = True del testing_metadata.meta['test'] api.output_yaml(testing_metadata, 'meta.yaml') call = mocker.patch.object(conda_build.build.subprocess, 'call') cc_conda_build['force_upload'] = False pkg = api.build(testing_workdir) assert call.called_once_with(['anaconda', 'upload', pkg]) del cc_conda_build['force_upload'] pkg = api.build(testing_workdir) assert call.called_once_with(['anaconda', 'upload', '--force', pkg]) def test_setup_py_data_in_env(testing_config): recipe = os.path.join(metadata_dir, '_setup_py_data_in_env') # should pass with any modern python (just not 3.5) api.build(recipe, config=testing_config) # make sure it fails with our special python logic with pytest.raises(subprocess.CalledProcessError): api.build(recipe, config=testing_config, python='3.4') def test_numpy_xx(testing_config): recipe = os.path.join(metadata_dir, '_numpy_xx') api.build(recipe, config=testing_config, numpy='1.12') def test_numpy_xx_host(testing_config): recipe = os.path.join(metadata_dir, '_numpy_xx_host') api.build(recipe, config=testing_config, numpy='1.12') def test_python_xx(testing_config): recipe = os.path.join(metadata_dir, '_python_xx') api.build(recipe, config=testing_config, python='3.4') def test_indirect_numpy_dependency(testing_metadata): testing_metadata.meta['requirements']['build'] = ['arrow-cpp 0.5.*'] testing_metadata.config.channel_urls = ['conda-forge'] api.build(testing_metadata, numpy=1.13) def test_dependencies_with_notest(testing_workdir, testing_config): recipe = os.path.join(metadata_dir, '_test_dependencies') api.build(recipe, config=testing_config, notest=True) with pytest.raises(DependencyNeedsBuildingError) as excinfo: api.build(recipe, config=testing_config, notest=False) assert 'Unsatisfiable dependencies for platform' in str(excinfo.value) assert 'somenonexistentpackage1' in str(excinfo.value) def test_source_cache_build(testing_workdir): recipe = os.path.join(metadata_dir, 'source_git_jinja2') config = api.Config(src_cache_root=testing_workdir) api.build(recipe, notest=True, config=config) git_cache_directory = '{}/git_cache' .format(testing_workdir) assert os.path.isdir(git_cache_directory) files = [filename for _, _, filenames in os.walk(git_cache_directory) for filename in filenames] assert len(files) > 0 def test_copy_test_source_files(testing_config): recipe = os.path.join(metadata_dir, '_test_test_source_files') filenames = set() for copy in (False, True): testing_config.copy_test_source_files = copy outputs = api.build(recipe, notest=False, config=testing_config) filenames.add(os.path.basename(outputs[0])) tf = tarfile.open(outputs[0]) found = False for f in tf.getmembers(): if f.name.startswith('info/test/'): found = True break if found: assert copy, "'info/test/' found in tar.bz2 but not copying test source files" else: assert not copy, "'info/test/' not found in tar.bz2 but copying test source files" assert len(filenames) == 2, "copy_test_source_files does not modify the build hash but should" def test_pin_depends(testing_config): recipe = os.path.join(metadata_dir, '_pin_depends_record') m = api.render(recipe, config=testing_config)[0][0] # the recipe python is not pinned, and having pin_depends set to record will not show it in record assert not any(re.search('python\s+[23]\.', dep) for dep in m.meta['requirements']['run']) output = api.build(m, config=testing_config)[0] requires = package_has_file(output, 'info/requires') assert requires if PY3 and hasattr(requires, 'decode'): requires = requires.decode() assert re.search('python\=[23]\.', requires), "didn't find pinned python in info/requires" def test_failed_patch_exits_build(testing_config): with pytest.raises(RuntimeError): api.build(os.path.join(metadata_dir, '_bad_patch'), config=testing_config) def test_version_mismatch_in_variant_does_not_infinitely_rebuild_folder(testing_config): testing_config.variant['test_a'] = "1.0" recipe = os.path.join(metadata_dir, '_build_deps_no_infinite_loop', 'test_b') with pytest.raises(DependencyNeedsBuildingError): api.build(recipe, config=testing_config) testing_config.variant['test_a'] = "2.0" api.build(recipe, config=testing_config)
true
true
f7013f86b94d5e56f39839b5756b3e7af44ab29e
1,213
py
Python
backend/app/tests/tests.py
Edinburgh-Genome-Foundry/dab
7eabf76adf3a0b9332c3651b5d0e5e6d98237d2b
[ "MIT" ]
7
2019-04-11T20:36:07.000Z
2020-03-24T07:12:13.000Z
backend/app/tests/tests.py
Edinburgh-Genome-Foundry/dab
7eabf76adf3a0b9332c3651b5d0e5e6d98237d2b
[ "MIT" ]
null
null
null
backend/app/tests/tests.py
Edinburgh-Genome-Foundry/dab
7eabf76adf3a0b9332c3651b5d0e5e6d98237d2b
[ "MIT" ]
null
null
null
"""Tests for the backend""" from .tools import logprint, AppTestCase, load_file_to_dict, load_json class GetConstructsAsGenbankTests(AppTestCase): endpoint = 'get_constructs_as_genbanks' defaults = dict( database_token='', constructsData={} ) def test_emma_2_constructs_with_one_combinatorial(self): json = load_json('emma_2_constructs_with_one_combinatorial.json') response = self.run_job(json_request=json) self.assertTrue('zip_file' in response) class GetConstructsAsPDFTests(AppTestCase): endpoint = 'get_constructs_as_pdf' defaults = dict(constructsData={}) def test_emma_no_annotation_to_pdf(self): json = load_json('emma_no_annotation_to_pdf.json') response = self.run_job(json_request=json) self.assertTrue('pdf_file' in response) class SendOrderToEGFTests(AppTestCase): endpoint = 'send_order_to_egf' defaults = dict(constructsData={}, customer={}) def test_send_order_to_egf(self): json = load_json('emma_send_order_to_egf.json') response = self.run_job(json_request=json) assert 'message' in response self.assertTrue('order was sent' in response['message'])
33.694444
73
0.720528
from .tools import logprint, AppTestCase, load_file_to_dict, load_json class GetConstructsAsGenbankTests(AppTestCase): endpoint = 'get_constructs_as_genbanks' defaults = dict( database_token='', constructsData={} ) def test_emma_2_constructs_with_one_combinatorial(self): json = load_json('emma_2_constructs_with_one_combinatorial.json') response = self.run_job(json_request=json) self.assertTrue('zip_file' in response) class GetConstructsAsPDFTests(AppTestCase): endpoint = 'get_constructs_as_pdf' defaults = dict(constructsData={}) def test_emma_no_annotation_to_pdf(self): json = load_json('emma_no_annotation_to_pdf.json') response = self.run_job(json_request=json) self.assertTrue('pdf_file' in response) class SendOrderToEGFTests(AppTestCase): endpoint = 'send_order_to_egf' defaults = dict(constructsData={}, customer={}) def test_send_order_to_egf(self): json = load_json('emma_send_order_to_egf.json') response = self.run_job(json_request=json) assert 'message' in response self.assertTrue('order was sent' in response['message'])
true
true
f7013f89ddf7249cb8c21753c974a4e817c0eaa2
45,183
py
Python
archetypal/schedule.py
brunomarct/archetypal
ce8daf4e18ef3ec92967e5d6837b392199caf83b
[ "MIT" ]
null
null
null
archetypal/schedule.py
brunomarct/archetypal
ce8daf4e18ef3ec92967e5d6837b392199caf83b
[ "MIT" ]
null
null
null
archetypal/schedule.py
brunomarct/archetypal
ce8daf4e18ef3ec92967e5d6837b392199caf83b
[ "MIT" ]
null
null
null
################################################################################ # Module: schedule.py # Description: Functions for handling conversion of EnergyPlus schedule objects # License: MIT, see full license in LICENSE.txt # Web: https://github.com/samuelduchesne/archetypal ################################################################################ import functools import io import logging as lg from datetime import datetime, timedelta import archetypal import numpy as np import pandas as pd from archetypal import log class Schedule(object): """An object designed to handle any EnergyPlys schedule object""" def __init__(self, sch_name, idf=None, start_day_of_the_week=0, strict=False, base_year=2018, schType=None, **kwargs): """ Args: idf (IDF): IDF object sch_name (str): The schedule name in the idf file start_day_of_the_week (int): 0-based day of week (Monday=0) strict (bool): if True, schedules that have the Field-Sets such as Holidays and CustomDay will raise an error if they are absent from the IDF file. If False, any missing qualifiers will be ignored. base_year (int): The base year of the schedule. Defaults to 2018 since the first day of that year is a Monday. """ super(Schedule, self).__init__(**kwargs) self.strict = strict self.idf = idf self.schName = sch_name self.startDayOfTheWeek = self.get_sdow(start_day_of_the_week) self.year = base_year self.startDate = self.start_date() self.count = 0 self.startHOY = 1 self.endHOY = 24 self.unit = "unknown" self.index_ = None self.values = None self.schType = schType _type = kwargs.get('Type', None) if _type is None: self.schTypeLimitsName = self.get_schedule_type_limits_name( sch_type=self.schType) else: self.schTypeLimitsName = _type @classmethod def constant_schedule(cls, hourly_value=1, Name='AlwaysOn', **kwargs): idftxt = "VERSION, 8.9;" # Not an emplty string. has just the # version number # we can make a file handle of a string fhandle = io.StringIO(idftxt) # initialize the IDF object with the file handle idf_scratch = archetypal.IDF(fhandle) idf_scratch.add_object(ep_object='Schedule:Constant'.upper(), **dict(Name=Name, Schedule_Type_Limits_Name='', Hourly_Value=hourly_value), save=False) sched = Schedule(sch_name=Name, idf=idf_scratch, **kwargs) return sched @property def all_values(self): """returns the values array""" if self.values is None: self.values = self.get_schedule_values(sch_name=self.schName, sch_type=self.schType) return self.values else: return self.values @property def max(self): return max(self.all_values) @property def min(self): return min(self.all_values) @property def mean(self): return np.mean(self.all_values) @property def series(self): """Returns the schedule values as a pd.Series object with a DateTimeIndex""" index = pd.date_range(start=self.startDate, periods=len( self.all_values), freq='1H') return pd.Series(self.all_values, index=index) def get_schedule_type_limits_name(self, sch_name=None, sch_type=None): """Return the Schedule Type Limits name associated to a schedule name""" if sch_name is None: sch_name = self.schName if sch_type is None: schedule_values = self.idf.get_schedule_data_by_name(sch_name, sch_type=sch_type) try: schedule_limit_name = schedule_values.Schedule_Type_Limits_Name except: return 'unknown' else: return schedule_limit_name def get_schedule_type_limits_data(self, sch_name=None): """Returns Schedule Type Limits data from schedule name""" if sch_name is None: sch_name = self.schName schedule_values = self.idf.get_schedule_data_by_name(sch_name) try: schedule_limit_name = schedule_values.Schedule_Type_Limits_Name except: # this schedule is probably a 'Schedule:Week:Daily' which does # not have a Schedule_Type_Limits_Name field return '', '', '', '' else: lower_limit, upper_limit, numeric_type, unit_type = \ self.idf.get_schedule_type_limits_data_by_name( schedule_limit_name) self.unit = unit_type if self.unit == "unknown": self.unit = numeric_type return lower_limit, upper_limit, numeric_type, unit_type def get_schedule_type(self, sch_name=None): """Return the schedule type""" if sch_name is None: sch_name = self.schName schedule_values = self.idf.get_schedule_data_by_name(sch_name) sch_type = schedule_values.fieldvalues[0] return sch_type def start_date(self): """The start date of the schedule. Satisfies `startDayOfTheWeek`""" import calendar c = calendar.Calendar(firstweekday=self.startDayOfTheWeek) start_date = c.monthdatescalendar(self.year, 1)[0][0] return datetime(start_date.year, start_date.month, start_date.day) def plot(self, slice=None, **kwargs): hourlyvalues = self.all_values index = pd.date_range(self.startDate, periods=len( hourlyvalues), freq='1H') series = pd.Series(hourlyvalues, index=index, dtype=float) if slice is None: slice = pd.IndexSlice[:] elif len(slice) > 1: slice = pd.IndexSlice[slice[0]:slice[1]] ax = series.loc[slice].plot(**kwargs, label=self.schName) return ax def get_interval_day_ep_schedule_values(self, sch_name=None): """'Schedule:Day:Interval""" if sch_name is None: sch_name = self.schName values = self.idf.getobject('Schedule:Day:Interval'.upper(), sch_name) lower_limit, upper_limit, numeric_type, unit_type = \ self.get_schedule_type_limits_data(sch_name) number_of_day_sch = int((len(values.fieldvalues) - 3) / 2) hourly_values = np.arange(24) start_hour = 0 for i in range(number_of_day_sch): value = float(values['Value_Until_Time_{}'.format(i + 1)]) until_time = [int(s.strip()) for s in values['Time_{}'.format(i + 1)].split(":") if s.strip().isdigit()] end_hour = int(until_time[0] + until_time[1] / 60) for hour in range(start_hour, end_hour): hourly_values[hour] = value start_hour = end_hour if numeric_type.strip().lower() == "discrete": hourly_values = hourly_values.astype(int) return hourly_values def get_hourly_day_ep_schedule_values(self, sch_name=None): """'Schedule:Day:Hourly'""" if sch_name is None: sch_name = self.schName values = self.idf.getobject('Schedule:Day:Hourly'.upper(), sch_name) fieldvalues_ = np.array(values.fieldvalues[3:]) return fieldvalues_ def get_compact_weekly_ep_schedule_values(self, sch_name=None, start_date=None, index=None): """'schedule:week:compact'""" if start_date is None: start_date = self.startDate if index is None: idx = pd.date_range(start=start_date, periods=168, freq='1H') slicer_ = pd.Series([False] * (len(idx)), index=idx) else: slicer_ = pd.Series([False] * (len(index)), index=index) if sch_name is None: sch_name = self.schName values = self.idf.getobject('schedule:week:compact'.upper(), sch_name) weekly_schedules = pd.Series([0] * len(slicer_), index=slicer_.index) # update last day of schedule if self.count == 0: self.schType = values.key self.endHOY = 168 num_of_daily_schedules = int(len(values.fieldvalues[2:]) / 2) for i in range(num_of_daily_schedules): day_type = values['DayType_List_{}'.format(i + 1)].lower() how = self.field_set(day_type, slicer_) if not weekly_schedules.loc[how].empty: # Loop through days and replace with day:schedule values days = [] for name, day in weekly_schedules.loc[how].groupby(pd.Grouper( freq='D')): if not day.empty: ref = values.get_referenced_object( "ScheduleDay_Name_{}".format(i + 1)) day.loc[:] = self.get_schedule_values( sch_name=ref.Name, sch_type=ref.key) days.append(day) new = pd.concat(days) slicer_.update( pd.Series([True] * len(new.index), index=new.index)) slicer_ = slicer_.apply(lambda x: x == True) weekly_schedules.update(new) else: return weekly_schedules.values return weekly_schedules.values def get_daily_weekly_ep_schedule_values(self, sch_name=None): """'schedule:week:daily'""" if sch_name is None: sch_name = self.schName values = self.idf.getobject('schedule:week:daily'.upper(), sch_name) # 7 list for 7 days of the week hourly_values = [] for day in ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday']: ref = values.get_referenced_object( '{}_ScheduleDay_Name'.format(day)) h = self.get_schedule_values(sch_name=ref.Name, sch_type=ref.key) hourly_values.append(h) hourly_values = np.array(hourly_values) # shift days earlier by self.startDayOfTheWeek hourly_values = np.roll(hourly_values, -self.startDayOfTheWeek, axis=0) return hourly_values.ravel() def get_list_day_ep_schedule_values(self, sch_name=None): """'schedule:day:list'""" if sch_name is None: sch_name = self.schName values = self.idf.getobject('schedule:day:list'.upper(), sch_name) lower_limit, upper_limit, numeric_type, unit_type = \ self.get_schedule_type_limits_data(sch_name) import pandas as pd freq = int(values['Minutes_per_Item']) # Frequency of the values num_values = values.fieldvalues[5:] # List of values method = values['Interpolate_to_Timestep'] # How to resample # fill a list of available values and pad with zeros (this is safer # but should not occur) all_values = np.arange(int(24 * 60 / freq)) for i in all_values: try: all_values[i] = num_values[i] except: all_values[i] = 0 # create a fake index to help us with the resampling index = pd.date_range(start=self.startDate, periods=(24 * 60) / freq, freq='{}T'.format(freq)) series = pd.Series(all_values, index=index) # resample series to hourly values and apply resampler function series = series.resample('1H').apply(_how(method)) return series.values def get_constant_ep_schedule_values(self, sch_name=None): """'schedule:constant'""" if sch_name is None: sch_name = self.schName values = self.idf.getobject('schedule:constant'.upper(), sch_name) lower_limit, upper_limit, numeric_type, unit_type = \ self.get_schedule_type_limits_data(sch_name) hourly_values = np.arange(8760) value = float(values['Hourly_Value']) for hour in hourly_values: hourly_values[hour] = value if numeric_type.strip().lower() == 'discrete': hourly_values = hourly_values.astype(int) return hourly_values def get_file_ep_schedule_values(self, sch_name=None): """'schedule:file'""" if sch_name is None: sch_name = self.schName values = self.idf.getobject('schedule:file'.upper(), sch_name) lower_limit, upper_limit, numeric_type, unit_type = \ self.get_schedule_type_limits_data(sch_name) filename = values['File_Name'] column = values['Column_Number'] rows = values['Rows_to_Skip_at_Top'] hours = values['Number_of_Hours_of_Data'] sep = values['Column_Separator'] interp = values['Interpolate_to_Timestep'] import pandas as pd import os idfdir = os.path.dirname(self.idf.idfname) file = os.path.join(idfdir, filename) delimeter = _separator(sep) skip_rows = int(rows) - 1 # We want to keep the column col = [int(column) - 1] # zero-based values = pd.read_csv(file, delimiter=delimeter, skiprows=skip_rows, usecols=col) return values.iloc[:, 0].values def get_compact_ep_schedule_values(self, sch_name=None): """'schedule:compact'""" if sch_name is None: sch_name = self.schName values = self.idf.getobject('schedule:compact'.upper(), sch_name) lower_limit, upper_limit, numeric_type, unit_type = \ self.get_schedule_type_limits_data(sch_name) field_sets = ['through', 'for', 'interpolate', 'until', 'value'] fields = values.fieldvalues[3:] index = pd.date_range(start=self.startDate, periods=8760, freq='H') zeros = np.zeros(len(index)) slicer_ = pd.Series([False] * len(index), index=index) series = pd.Series(zeros, index=index) from_day = self.startDate ep_from_day = datetime(self.year, 1, 1) from_time = '00:00' how_interpolate = None for field in fields: if any([spe in field.lower() for spe in field_sets]): f_set, hour, minute, value = self.field_interpreter(field) if f_set.lower() == 'through': # main condition. All sub-conditions must obey a # `Through` condition # First, initialize the slice (all False for now) through_conditions = self.invalidate_condition(series) # reset from_time from_time = '00:00' # Prepare ep_to_day variable ep_to_day = self.date_field_interpretation(value) + \ timedelta(days=1) # Calculate Timedelta in days days = (ep_to_day - ep_from_day).days # Add timedelta to start_date to_day = from_day + timedelta(days=days) + timedelta( hours=-1) # slice the conditions with the range and apply True through_conditions.loc[from_day:to_day] = True from_day = to_day + timedelta(hours=1) ep_from_day = ep_to_day elif f_set.lower() == 'for': # slice specific days # reset from_time from_time = '00:00' for_condition = self.invalidate_condition(series) values = value.split() if len(values) > 1: # if multiple `For`. eg.: For: Weekends Holidays, # Combine both conditions for value in values: if value.lower() == 'allotherdays': # Apply condition to slice how = self.field_set(value, slicer_) # Reset though condition through_conditions = how for_condition = how else: how = self.field_set(value, slicer_) for_condition.loc[how] = True elif value.lower() == 'allotherdays': # Apply condition to slice how = self.field_set(value, slicer_) # Reset though condition through_conditions = how for_condition = how else: # Apply condition to slice how = self.field_set(value) for_condition.loc[how] = True # Combine the for_condition with all_conditions all_conditions = through_conditions & for_condition # update in memory slice # self.sliced_day_.loc[all_conditions] = True elif 'interpolate' in f_set.lower(): # we need to upsample to series to 8760 * 60 values new_idx = pd.date_range(start=self.startDate, periods=525600, closed='left', freq='T') series = series.resample('T').pad() series = series.reindex(new_idx) series.fillna(method='pad', inplace=True) through_conditions = through_conditions.resample('T').pad() through_conditions = through_conditions.reindex(new_idx) through_conditions.fillna(method='pad', inplace=True) for_condition = for_condition.resample('T').pad() for_condition = for_condition.reindex(new_idx) for_condition.fillna(method='pad', inplace=True) how_interpolate = value.lower() elif f_set.lower() == 'until': until_condition = self.invalidate_condition(series) if series.index.freq.name == 'T': # until_time = str(int(hour) - 1) + ':' + minute until_time = timedelta(hours=int(hour), minutes=int(minute)) - timedelta( minutes=1) else: until_time = str(int(hour) - 1) + ':' + minute until_condition.loc[until_condition.between_time(from_time, str( until_time)).index] = True all_conditions = for_condition & through_conditions & \ until_condition from_time = str(int(hour)) + ':' + minute elif f_set.lower() == 'value': # If the therm `Value: ` field is used, we will catch it # here. # update in memory slice slicer_.loc[all_conditions] = True series[all_conditions] = value else: # Do something here before looping to the next Field pass else: # If the term `Value: ` is not used; the variable is simply # passed in the Field value = float(field) series[all_conditions] = value # update in memory slice slicer_.loc[all_conditions] = True if how_interpolate: return series.resample('H').mean().values else: return series.values def field_interpreter(self, field): """dealing with a Field-Set (Through, For, Interpolate, # Until, Value) and return the parsed string""" if 'through' in field.lower(): # deal with through if ':' in field.lower(): # parse colon f_set, statement = field.split(':') hour = None minute = None value = statement.strip() else: msg = 'The schedule "{sch}" contains a Field ' \ 'that is not understood: "{field}"'.format( sch=self.schName, field=field) raise NotImplementedError(msg) elif 'for' in field.lower(): if ':' in field.lower(): # parse colon f_set, statement = field.split(':') value = statement.strip() hour = None minute = None else: # parse without a colon msg = 'The schedule "{sch}" contains a Field ' \ 'that is not understood: "{field}"'.format( sch=self.schName, field=field) raise NotImplementedError(msg) elif 'interpolate' in field.lower(): msg = 'The schedule "{sch}" contains sub-hourly values (' \ 'Field-Set="{field}"). The average over the hour is ' \ 'taken'.format(sch=self.schName, field=field) log(msg, lg.WARNING) f_set, value = field.split(':') hour = None minute = None elif 'until' in field.lower(): if ':' in field.lower(): # parse colon try: f_set, hour, minute = field.split(':') hour = hour.strip() # remove trailing spaces minute = minute.strip() # remove trailing spaces value = None except: f_set = 'until' hour, minute = field.split(':') hour = hour[-2:].strip() minute = minute.strip() value = None else: msg = 'The schedule "{sch}" contains a Field ' \ 'that is not understood: "{field}"'.format( sch=self.schName, field=field) raise NotImplementedError(msg) elif 'value' in field.lower(): if ':' in field.lower(): # parse colon f_set, statement = field.split(':') value = statement.strip() hour = None minute = None else: msg = 'The schedule "{sch}" contains a Field ' \ 'that is not understood: "{field}"'.format( sch=self.schName, field=field) raise NotImplementedError(msg) else: # deal with the data value f_set = field hour = None minute = None value = field[len(field) + 1:].strip() return f_set, hour, minute, value @staticmethod def invalidate_condition(series): index = series.index periods = len(series) return pd.Series([False] * periods, index=index) def get_yearly_ep_schedule_values(self, sch_name=None): """'schedule:year'""" # first week start_date = self.startDate idx = pd.date_range(start=start_date, periods=8760, freq='1H') hourly_values = pd.Series([0] * 8760, index=idx) # update last day of schedule self.endHOY = 8760 if sch_name is None: sch_name = self.schName values = self.idf.getobject('schedule:year'.upper(), sch_name) # generate weekly schedules num_of_weekly_schedules = int(len(values.fieldvalues[3:]) / 5) for i in range(num_of_weekly_schedules): ref = values.get_referenced_object( 'ScheduleWeek_Name_{}'.format(i + 1)) start_month = values['Start_Month_{}'.format(i + 1)] end_month = values['End_Month_{}'.format(i + 1)] start_day = values['Start_Day_{}'.format(i + 1)] end_day = values['End_Day_{}'.format(i + 1)] start = datetime.strptime( '{}/{}/{}'.format(self.year, start_month, start_day), '%Y/%m/%d') end = datetime.strptime( '{}/{}/{}'.format(self.year, end_month, end_day), '%Y/%m/%d') days = (end - start).days + 1 end_date = start_date + timedelta(days=days) + timedelta(hours=23) how = pd.IndexSlice[start_date:end_date] weeks = [] for name, week in hourly_values.loc[how].groupby( pd.Grouper(freq='168H')): if not week.empty: try: week.loc[:] = self.get_schedule_values( sch_name=ref.Name, start_date=week.index[0], index=week.index, sch_type=ref.key) except ValueError: week.loc[:] = self.get_schedule_values( ref.Name, week.index[0])[0:len(week)] finally: weeks.append(week) new = pd.concat(weeks) hourly_values.update(new) start_date += timedelta(days=days) return hourly_values.values def get_schedule_values(self, sch_name=None, start_date=None, index=None, sch_type=None): """Main function that returns the schedule values Args: sch_type: index: start_date: """ if sch_name is None: sch_name = self.schName if sch_type is None: schedule_values = self.idf.get_schedule_data_by_name(sch_name) self.schType = schedule_values.key.upper() sch_type = self.schType if self.count == 0: # This is the first time, get the schedule type and the type limits. self.schTypeLimitsName = self.get_schedule_type_limits_name() self.count += 1 if sch_type.upper() == "schedule:year".upper(): hourly_values = self.get_yearly_ep_schedule_values( sch_name) elif sch_type.upper() == "schedule:day:interval".upper(): hourly_values = self.get_interval_day_ep_schedule_values( sch_name) elif sch_type.upper() == "schedule:day:hourly".upper(): hourly_values = self.get_hourly_day_ep_schedule_values( sch_name) elif sch_type.upper() == "schedule:day:list".upper(): hourly_values = self.get_list_day_ep_schedule_values( sch_name) elif sch_type.upper() == "schedule:week:compact".upper(): hourly_values = self.get_compact_weekly_ep_schedule_values( sch_name, start_date, index) elif sch_type.upper() == "schedule:week:daily".upper(): hourly_values = self.get_daily_weekly_ep_schedule_values( sch_name) elif sch_type.upper() == "schedule:constant".upper(): hourly_values = self.get_constant_ep_schedule_values( sch_name) elif sch_type.upper() == "schedule:compact".upper(): hourly_values = self.get_compact_ep_schedule_values( sch_name) elif sch_type.upper() == "schedule:file".upper(): hourly_values = self.get_file_ep_schedule_values( sch_name) else: log('Archetypal does not support "{}" currently'.format( self.schType), lg.WARNING) hourly_values = [] return hourly_values def is_schedule(self, sch_name): """Returns True if idfobject is one of 'schedule_types'""" if sch_name.upper() in self.idf.schedules_dict: return True else: return False def to_year_week_day(self): """convert a Schedule Class to the 'Schedule:Year', 'Schedule:Week:Daily' and 'Schedule:Day:Hourly' representation Returns: 'Schedule:Year', list of ['Schedule:Week:Daily'], list of ['Schedule:Day:Hourly'] """ full_year = np.array(self.all_values) # array of shape (8760,) values = full_year.reshape(-1, 24) # shape (365, 24) # create unique days unique_days, nds = np.unique(values, axis=0, return_inverse=True) ep_days = [] dict_day = {} count_day = 0 for unique_day in unique_days: name = 'd_' + self.schName + '_' + '%03d' % count_day name, count_day = archetypal.check_unique_name('d', count_day, name, archetypal.settings.unique_schedules, suffix=True) dict_day[name] = unique_day archetypal.settings.unique_schedules.append(name) # Create idf_objects for schedule:day:hourly ep_day = self.idf.add_object( ep_object='Schedule:Day:Hourly'.upper(), save=False, **dict(Name=name, Schedule_Type_Limits_Name=self.schType, **{'Hour_{}'.format(i + 1): unique_day[i] for i in range(24)}) ) ep_days.append(ep_day) # create unique weeks from unique days unique_weeks, nwsi, nws, count = np.unique( full_year[:364 * 24, ...].reshape(-1, 168), return_index=True, axis=0, return_inverse=True, return_counts=True) # Appending unique weeks in dictionary with name and values of weeks as # keys # {'name_week': {'dayName':[]}} dict_week = {} count_week = 0 for unique_week in unique_weeks: week_id = 'w_' + self.schName + '_' + '%03d' % count_week week_id, count_week = archetypal.check_unique_name('w', count_week, week_id, archetypal.settings.unique_schedules, suffix=True) archetypal.settings.unique_schedules.append(week_id) dict_week[week_id] = {} for i in list(range(0, 7)): day_of_week = unique_week[..., i * 24:(i + 1) * 24] for key in dict_day: if (day_of_week == dict_day[key]).all(): dict_week[week_id]['day_{}'.format(i)] = key # Create idf_objects for schedule:week:daily list_day_of_week = ['Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday'] ordered_day_n = np.array([6, 0, 1, 2, 3, 4, 5]) ordered_day_n = np.roll(ordered_day_n, self.startDayOfTheWeek) ep_weeks = [] for week_id in dict_week: ep_week = self.idf.add_object( ep_object='Schedule:Week:Daily'.upper(), save=False, **dict(Name=week_id, **{'{}_ScheduleDay_Name'.format( weekday): dict_week[week_id][ 'day_{}'.format(i)] for i, weekday in zip(ordered_day_n, list_day_of_week) }, Holiday_ScheduleDay_Name= dict_week[week_id]['day_6'], SummerDesignDay_ScheduleDay_Name= dict_week[week_id]['day_1'], WinterDesignDay_ScheduleDay_Name= dict_week[week_id]['day_1'], CustomDay1_ScheduleDay_Name= dict_week[week_id]['day_2'], CustomDay2_ScheduleDay_Name= dict_week[week_id]['day_5']) ) ep_weeks.append(ep_week) import itertools blocks = {} from_date = datetime(self.year, 1, 1) bincount = [sum(1 for _ in group) for key, group in itertools.groupby(nws + 1) if key] week_order = {i: v for i, v in enumerate(np.array( [key for key, group in itertools.groupby(nws + 1) if key]) - 1)} for i, (week_n, count) in enumerate( zip(week_order, bincount)): week_id = list(dict_week)[week_order[i]] to_date = from_date + timedelta(days=int(count * 7), hours=-1) blocks[i] = {} blocks[i]['week_id'] = week_id blocks[i]['from_day'] = from_date.day blocks[i]['end_day'] = to_date.day blocks[i]['from_month'] = from_date.month blocks[i]['end_month'] = to_date.month from_date = to_date + timedelta(hours=1) # If this is the last block, force end of year if i == len(bincount) - 1: blocks[i]['end_day'] = 31 blocks[i]['end_month'] = 12 new_dict = dict(Name=self.schName + '_', Schedule_Type_Limits_Name=self.schTypeLimitsName) for i in blocks: new_dict.update({"ScheduleWeek_Name_{}".format(i + 1): blocks[i]['week_id'], "Start_Month_{}".format(i + 1): blocks[i]['from_month'], "Start_Day_{}".format(i + 1): blocks[i]['from_day'], "End_Month_{}".format(i + 1): blocks[i]['end_month'], "End_Day_{}".format(i + 1): blocks[i]['end_day']}) ep_year = self.idf.add_object(ep_object='Schedule:Year'.upper(), save=False, **new_dict) return ep_year, ep_weeks, ep_days def date_field_interpretation(self, field): """Date Field Interpretation Args: field (str): The EnergyPlus Field Contents Returns: (datetime): The datetime object Info: See EnergyPlus documentation for more details: 1.6.8.1.2 Field: Start Date (Table 1.4: Date Field Interpretation) """ # < number > Weekday in Month formats = ['%m/%d', '%d %B', '%B %d', '%d %b', '%b %d'] date = None for format_str in formats: # Tru to parse using each defined formats try: date = datetime.strptime(field, format_str) except: pass else: date = datetime(self.year, date.month, date.day) if date is None: # if the defined formats did not work, try the fancy parse try: date = self.parse_fancy_string(field) except: msg = "the schedule '{sch}' contains a " \ "Field that is not understood: '{field}'".format( sch=self.schName, field=field) raise ValueError(msg) else: return date else: return date def parse_fancy_string(self, field): """Will try to parse cases such as `3rd Monday in February` or `Last Weekday In Month` Args: field (str): The EnergyPlus Field Contents Returns: (datetime): The datetime object """ import re # split the string at the term ' in ' time, month = field.lower().split(' in ') month = datetime.strptime(month, '%B').month # split the first part into nth and dayofweek nth, dayofweek = time.split(' ') if 'last' in nth: nth = -1 # Use the last one else: nth = re.findall(r'\d+', nth) # use the nth one nth = int(nth[0]) - 1 # python is zero-based weekday = {'monday': 0, 'tuesday': 1, 'wednesday': 2, 'thursday': 3, 'friday': 4, 'saturday': 5, 'sunday': 6} # parse the dayofweek eg. monday dayofweek = weekday.get(dayofweek, 6) # create list of possible days using Calendar import calendar c = calendar.Calendar(firstweekday=self.startDayOfTheWeek) monthcal = c.monthdatescalendar(self.year, month) # iterate though the month and get the nth weekday date = [day for week in monthcal for day in week if \ day.weekday() == dayofweek and \ day.month == month][nth] return datetime(date.year, date.month, date.day) def field_set(self, field, slicer_=None): """helper function to return the proper slicer depending on the field_set value. Available values are: Weekdays, Weekends, Holidays, Alldays, SummerDesignDay, WinterDesignDay, Sunday, Monday, Tuesday, Wednesday, Thursday, Friday, Saturday, CustomDay1, CustomDay2, AllOtherDays Args: field (str): The EnergyPlus field set value. slicer_ (pd.Series): The persistent slicer for this schedule Returns: (indexer-like): Returns the appropriate indexer for the series. """ if field.lower() == 'weekdays': # return only days of weeks return lambda x: x.index.dayofweek < 5 elif field.lower() == 'weekends': # return only weekends return lambda x: x.index.dayofweek >= 5 elif field.lower() == 'alldays': log('For schedule "{}", the field-set "AllDays" may be overridden ' 'by the "AllOtherDays" field-set'.format( self.schName), lg.WARNING) # return all days := equivalenet to .loc[:] return pd.IndexSlice[:] elif field.lower() == 'allotherdays': # return unused days (including special days). Uses the global # variable `slicer_` import operator if slicer_ is not None: return _conjunction(*[self.special_day(field, slicer_), ~slicer_], logical=operator.or_) else: raise NotImplementedError elif field.lower() == 'sunday': # return only sundays return lambda x: x.index.dayofweek == 6 elif field.lower() == 'monday': # return only mondays return lambda x: x.index.dayofweek == 0 elif field.lower() == 'tuesday': # return only Tuesdays return lambda x: x.index.dayofweek == 1 elif field.lower() == 'wednesday': # return only Wednesdays return lambda x: x.index.dayofweek == 2 elif field.lower() == 'thursday': # return only Thursdays return lambda x: x.index.dayofweek == 3 elif field.lower() == 'friday': # return only Fridays return lambda x: x.index.dayofweek == 4 elif field.lower() == 'saturday': # return only Saturdays return lambda x: x.index.dayofweek == 5 elif field.lower() == 'summerdesignday': # return design_day(self, field) return None elif field.lower() == 'winterdesignday': # return design_day(self, field) return None elif field.lower() == 'holiday' or field.lower() == 'holidays': field = 'holiday' return self.special_day(field, slicer_) elif not self.strict: # If not strict, ignore missing field-sets such as CustomDay1 return pd.IndexSlice[:] else: raise NotImplementedError( 'Archetypal does not yet support The ' 'Field_set "{}"'.format(field)) def __len__(self): """returns the length of all values of the schedule""" return len(self.all_values) def __eq__(self, other): """Overrides the default implementation""" if isinstance(other, Schedule): return self.all_values == other.all_values else: raise NotImplementedError def __ne__(self, other): return ~(self.__eq__(other)) def __add__(self, other): if isinstance(other, Schedule): return self.all_values + other.all_values elif isinstance(other, list): return self.all_values + other else: raise NotImplementedError def __sub__(self, other): if isinstance(other, Schedule): return self.all_values - other.all_values elif isinstance(other, list): return self.all_values - other else: raise NotImplementedError def __mul__(self, other): if isinstance(other, Schedule): return self.all_values * other.all_values elif isinstance(other, list): return self.all_values * other else: raise NotImplementedError def get_sdow(self, start_day_of_week): """Returns the start day of the week""" if start_day_of_week is None: return self.idf.day_of_week_for_start_day else: return start_day_of_week def special_day(self, field, slicer_): """try to get the RunPeriodControl:SpecialDays for the corresponding Day Type""" sp_slicer_ = slicer_.copy() sp_slicer_.loc[:] = False special_day_types = ['holiday', 'customday1', 'customday2'] dds = self.idf.idfobjects['RunPeriodControl:SpecialDays'.upper()] dd = [dd for dd in dds if dd.Special_Day_Type.lower() == field or dd.Special_Day_Type.lower() in special_day_types] if len(dd) > 0: slice = [] for dd in dd: # can have more than one special day types data = dd.Start_Date ep_start_date = self.date_field_interpretation(data) ep_orig = datetime(self.year, 1, 1) days_to_speciald = (ep_start_date - ep_orig).days duration = int(dd.Duration) from_date = self.startDate + timedelta(days=days_to_speciald) to_date = from_date + timedelta(days=duration) + timedelta( hours=-1) sp_slicer_.loc[from_date:to_date] = True return sp_slicer_ elif not self.strict: return sp_slicer_ else: msg = 'Could not find a "SizingPeriod:DesignDay" object ' \ 'needed for schedule "{}" with Day Type "{}"'.format( self.schName, field.capitalize() ) raise ValueError(msg) def design_day(schedule, field): # try to get the SizingPeriod:DesignDay for the corresponding Day Type dds = schedule.idf.idfobjects['SizingPeriod:DesignDay'.upper()] dd = [dd for dd in dds if dd.Day_Type.lower() == field] if len(dd) > 0: # should have found only one design day matching the Day Type data = [dd[0].Month, dd[0].Day_of_Month] date = '/'.join([str(item).zfill(2) for item in data]) date = schedule.date_field_interpretation(date) return lambda x: x.index == date else: msg = 'Could not find a "SizingPeriod:DesignDay" object ' \ 'needed for schedule "{}" with Day Type "{}"'.format( schedule.schName, field.capitalize() ) raise ValueError(msg) def _conjunction(*conditions, logical=np.logical_and): """Applies a logical function on n conditions""" return functools.reduce(logical, conditions) def _separator(sep): """helper function to return the correct delimiter""" if sep == 'Comma': return ',' elif sep == 'Tab': return '\t' elif sep == 'Fixed': return None elif sep == 'Semicolon': return ';' else: return ',' def _how(how): """Helper function to return the correct resampler""" if how.lower() == 'average': return 'mean' elif how.lower() == 'linear': return 'interpolate' elif how.lower() == 'no': return 'max' else: return 'max'
39.808811
100
0.535799
import functools import io import logging as lg from datetime import datetime, timedelta import archetypal import numpy as np import pandas as pd from archetypal import log class Schedule(object): def __init__(self, sch_name, idf=None, start_day_of_the_week=0, strict=False, base_year=2018, schType=None, **kwargs): super(Schedule, self).__init__(**kwargs) self.strict = strict self.idf = idf self.schName = sch_name self.startDayOfTheWeek = self.get_sdow(start_day_of_the_week) self.year = base_year self.startDate = self.start_date() self.count = 0 self.startHOY = 1 self.endHOY = 24 self.unit = "unknown" self.index_ = None self.values = None self.schType = schType _type = kwargs.get('Type', None) if _type is None: self.schTypeLimitsName = self.get_schedule_type_limits_name( sch_type=self.schType) else: self.schTypeLimitsName = _type @classmethod def constant_schedule(cls, hourly_value=1, Name='AlwaysOn', **kwargs): idftxt = "VERSION, 8.9;" fhandle = io.StringIO(idftxt) idf_scratch = archetypal.IDF(fhandle) idf_scratch.add_object(ep_object='Schedule:Constant'.upper(), **dict(Name=Name, Schedule_Type_Limits_Name='', Hourly_Value=hourly_value), save=False) sched = Schedule(sch_name=Name, idf=idf_scratch, **kwargs) return sched @property def all_values(self): if self.values is None: self.values = self.get_schedule_values(sch_name=self.schName, sch_type=self.schType) return self.values else: return self.values @property def max(self): return max(self.all_values) @property def min(self): return min(self.all_values) @property def mean(self): return np.mean(self.all_values) @property def series(self): index = pd.date_range(start=self.startDate, periods=len( self.all_values), freq='1H') return pd.Series(self.all_values, index=index) def get_schedule_type_limits_name(self, sch_name=None, sch_type=None): if sch_name is None: sch_name = self.schName if sch_type is None: schedule_values = self.idf.get_schedule_data_by_name(sch_name, sch_type=sch_type) try: schedule_limit_name = schedule_values.Schedule_Type_Limits_Name except: return 'unknown' else: return schedule_limit_name def get_schedule_type_limits_data(self, sch_name=None): if sch_name is None: sch_name = self.schName schedule_values = self.idf.get_schedule_data_by_name(sch_name) try: schedule_limit_name = schedule_values.Schedule_Type_Limits_Name except: return '', '', '', '' else: lower_limit, upper_limit, numeric_type, unit_type = \ self.idf.get_schedule_type_limits_data_by_name( schedule_limit_name) self.unit = unit_type if self.unit == "unknown": self.unit = numeric_type return lower_limit, upper_limit, numeric_type, unit_type def get_schedule_type(self, sch_name=None): if sch_name is None: sch_name = self.schName schedule_values = self.idf.get_schedule_data_by_name(sch_name) sch_type = schedule_values.fieldvalues[0] return sch_type def start_date(self): import calendar c = calendar.Calendar(firstweekday=self.startDayOfTheWeek) start_date = c.monthdatescalendar(self.year, 1)[0][0] return datetime(start_date.year, start_date.month, start_date.day) def plot(self, slice=None, **kwargs): hourlyvalues = self.all_values index = pd.date_range(self.startDate, periods=len( hourlyvalues), freq='1H') series = pd.Series(hourlyvalues, index=index, dtype=float) if slice is None: slice = pd.IndexSlice[:] elif len(slice) > 1: slice = pd.IndexSlice[slice[0]:slice[1]] ax = series.loc[slice].plot(**kwargs, label=self.schName) return ax def get_interval_day_ep_schedule_values(self, sch_name=None): if sch_name is None: sch_name = self.schName values = self.idf.getobject('Schedule:Day:Interval'.upper(), sch_name) lower_limit, upper_limit, numeric_type, unit_type = \ self.get_schedule_type_limits_data(sch_name) number_of_day_sch = int((len(values.fieldvalues) - 3) / 2) hourly_values = np.arange(24) start_hour = 0 for i in range(number_of_day_sch): value = float(values['Value_Until_Time_{}'.format(i + 1)]) until_time = [int(s.strip()) for s in values['Time_{}'.format(i + 1)].split(":") if s.strip().isdigit()] end_hour = int(until_time[0] + until_time[1] / 60) for hour in range(start_hour, end_hour): hourly_values[hour] = value start_hour = end_hour if numeric_type.strip().lower() == "discrete": hourly_values = hourly_values.astype(int) return hourly_values def get_hourly_day_ep_schedule_values(self, sch_name=None): if sch_name is None: sch_name = self.schName values = self.idf.getobject('Schedule:Day:Hourly'.upper(), sch_name) fieldvalues_ = np.array(values.fieldvalues[3:]) return fieldvalues_ def get_compact_weekly_ep_schedule_values(self, sch_name=None, start_date=None, index=None): if start_date is None: start_date = self.startDate if index is None: idx = pd.date_range(start=start_date, periods=168, freq='1H') slicer_ = pd.Series([False] * (len(idx)), index=idx) else: slicer_ = pd.Series([False] * (len(index)), index=index) if sch_name is None: sch_name = self.schName values = self.idf.getobject('schedule:week:compact'.upper(), sch_name) weekly_schedules = pd.Series([0] * len(slicer_), index=slicer_.index) if self.count == 0: self.schType = values.key self.endHOY = 168 num_of_daily_schedules = int(len(values.fieldvalues[2:]) / 2) for i in range(num_of_daily_schedules): day_type = values['DayType_List_{}'.format(i + 1)].lower() how = self.field_set(day_type, slicer_) if not weekly_schedules.loc[how].empty: days = [] for name, day in weekly_schedules.loc[how].groupby(pd.Grouper( freq='D')): if not day.empty: ref = values.get_referenced_object( "ScheduleDay_Name_{}".format(i + 1)) day.loc[:] = self.get_schedule_values( sch_name=ref.Name, sch_type=ref.key) days.append(day) new = pd.concat(days) slicer_.update( pd.Series([True] * len(new.index), index=new.index)) slicer_ = slicer_.apply(lambda x: x == True) weekly_schedules.update(new) else: return weekly_schedules.values return weekly_schedules.values def get_daily_weekly_ep_schedule_values(self, sch_name=None): if sch_name is None: sch_name = self.schName values = self.idf.getobject('schedule:week:daily'.upper(), sch_name) hourly_values = [] for day in ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday']: ref = values.get_referenced_object( '{}_ScheduleDay_Name'.format(day)) h = self.get_schedule_values(sch_name=ref.Name, sch_type=ref.key) hourly_values.append(h) hourly_values = np.array(hourly_values) hourly_values = np.roll(hourly_values, -self.startDayOfTheWeek, axis=0) return hourly_values.ravel() def get_list_day_ep_schedule_values(self, sch_name=None): if sch_name is None: sch_name = self.schName values = self.idf.getobject('schedule:day:list'.upper(), sch_name) lower_limit, upper_limit, numeric_type, unit_type = \ self.get_schedule_type_limits_data(sch_name) import pandas as pd freq = int(values['Minutes_per_Item']) num_values = values.fieldvalues[5:] method = values['Interpolate_to_Timestep'] all_values = np.arange(int(24 * 60 / freq)) for i in all_values: try: all_values[i] = num_values[i] except: all_values[i] = 0 index = pd.date_range(start=self.startDate, periods=(24 * 60) / freq, freq='{}T'.format(freq)) series = pd.Series(all_values, index=index) series = series.resample('1H').apply(_how(method)) return series.values def get_constant_ep_schedule_values(self, sch_name=None): if sch_name is None: sch_name = self.schName values = self.idf.getobject('schedule:constant'.upper(), sch_name) lower_limit, upper_limit, numeric_type, unit_type = \ self.get_schedule_type_limits_data(sch_name) hourly_values = np.arange(8760) value = float(values['Hourly_Value']) for hour in hourly_values: hourly_values[hour] = value if numeric_type.strip().lower() == 'discrete': hourly_values = hourly_values.astype(int) return hourly_values def get_file_ep_schedule_values(self, sch_name=None): if sch_name is None: sch_name = self.schName values = self.idf.getobject('schedule:file'.upper(), sch_name) lower_limit, upper_limit, numeric_type, unit_type = \ self.get_schedule_type_limits_data(sch_name) filename = values['File_Name'] column = values['Column_Number'] rows = values['Rows_to_Skip_at_Top'] hours = values['Number_of_Hours_of_Data'] sep = values['Column_Separator'] interp = values['Interpolate_to_Timestep'] import pandas as pd import os idfdir = os.path.dirname(self.idf.idfname) file = os.path.join(idfdir, filename) delimeter = _separator(sep) skip_rows = int(rows) - 1 col = [int(column) - 1] values = pd.read_csv(file, delimiter=delimeter, skiprows=skip_rows, usecols=col) return values.iloc[:, 0].values def get_compact_ep_schedule_values(self, sch_name=None): if sch_name is None: sch_name = self.schName values = self.idf.getobject('schedule:compact'.upper(), sch_name) lower_limit, upper_limit, numeric_type, unit_type = \ self.get_schedule_type_limits_data(sch_name) field_sets = ['through', 'for', 'interpolate', 'until', 'value'] fields = values.fieldvalues[3:] index = pd.date_range(start=self.startDate, periods=8760, freq='H') zeros = np.zeros(len(index)) slicer_ = pd.Series([False] * len(index), index=index) series = pd.Series(zeros, index=index) from_day = self.startDate ep_from_day = datetime(self.year, 1, 1) from_time = '00:00' how_interpolate = None for field in fields: if any([spe in field.lower() for spe in field_sets]): f_set, hour, minute, value = self.field_interpreter(field) if f_set.lower() == 'through': through_conditions = self.invalidate_condition(series) from_time = '00:00' ep_to_day = self.date_field_interpretation(value) + \ timedelta(days=1) days = (ep_to_day - ep_from_day).days to_day = from_day + timedelta(days=days) + timedelta( hours=-1) through_conditions.loc[from_day:to_day] = True from_day = to_day + timedelta(hours=1) ep_from_day = ep_to_day elif f_set.lower() == 'for': from_time = '00:00' for_condition = self.invalidate_condition(series) values = value.split() if len(values) > 1: for value in values: if value.lower() == 'allotherdays': how = self.field_set(value, slicer_) through_conditions = how for_condition = how else: how = self.field_set(value, slicer_) for_condition.loc[how] = True elif value.lower() == 'allotherdays': how = self.field_set(value, slicer_) through_conditions = how for_condition = how else: how = self.field_set(value) for_condition.loc[how] = True all_conditions = through_conditions & for_condition elif 'interpolate' in f_set.lower(): new_idx = pd.date_range(start=self.startDate, periods=525600, closed='left', freq='T') series = series.resample('T').pad() series = series.reindex(new_idx) series.fillna(method='pad', inplace=True) through_conditions = through_conditions.resample('T').pad() through_conditions = through_conditions.reindex(new_idx) through_conditions.fillna(method='pad', inplace=True) for_condition = for_condition.resample('T').pad() for_condition = for_condition.reindex(new_idx) for_condition.fillna(method='pad', inplace=True) how_interpolate = value.lower() elif f_set.lower() == 'until': until_condition = self.invalidate_condition(series) if series.index.freq.name == 'T': until_time = timedelta(hours=int(hour), minutes=int(minute)) - timedelta( minutes=1) else: until_time = str(int(hour) - 1) + ':' + minute until_condition.loc[until_condition.between_time(from_time, str( until_time)).index] = True all_conditions = for_condition & through_conditions & \ until_condition from_time = str(int(hour)) + ':' + minute elif f_set.lower() == 'value': slicer_.loc[all_conditions] = True series[all_conditions] = value else: pass else: value = float(field) series[all_conditions] = value slicer_.loc[all_conditions] = True if how_interpolate: return series.resample('H').mean().values else: return series.values def field_interpreter(self, field): if 'through' in field.lower(): if ':' in field.lower(): f_set, statement = field.split(':') hour = None minute = None value = statement.strip() else: msg = 'The schedule "{sch}" contains a Field ' \ 'that is not understood: "{field}"'.format( sch=self.schName, field=field) raise NotImplementedError(msg) elif 'for' in field.lower(): if ':' in field.lower(): f_set, statement = field.split(':') value = statement.strip() hour = None minute = None else: msg = 'The schedule "{sch}" contains a Field ' \ 'that is not understood: "{field}"'.format( sch=self.schName, field=field) raise NotImplementedError(msg) elif 'interpolate' in field.lower(): msg = 'The schedule "{sch}" contains sub-hourly values (' \ 'Field-Set="{field}"). The average over the hour is ' \ 'taken'.format(sch=self.schName, field=field) log(msg, lg.WARNING) f_set, value = field.split(':') hour = None minute = None elif 'until' in field.lower(): if ':' in field.lower(): try: f_set, hour, minute = field.split(':') hour = hour.strip() minute = minute.strip() value = None except: f_set = 'until' hour, minute = field.split(':') hour = hour[-2:].strip() minute = minute.strip() value = None else: msg = 'The schedule "{sch}" contains a Field ' \ 'that is not understood: "{field}"'.format( sch=self.schName, field=field) raise NotImplementedError(msg) elif 'value' in field.lower(): if ':' in field.lower(): f_set, statement = field.split(':') value = statement.strip() hour = None minute = None else: msg = 'The schedule "{sch}" contains a Field ' \ 'that is not understood: "{field}"'.format( sch=self.schName, field=field) raise NotImplementedError(msg) else: f_set = field hour = None minute = None value = field[len(field) + 1:].strip() return f_set, hour, minute, value @staticmethod def invalidate_condition(series): index = series.index periods = len(series) return pd.Series([False] * periods, index=index) def get_yearly_ep_schedule_values(self, sch_name=None): start_date = self.startDate idx = pd.date_range(start=start_date, periods=8760, freq='1H') hourly_values = pd.Series([0] * 8760, index=idx) self.endHOY = 8760 if sch_name is None: sch_name = self.schName values = self.idf.getobject('schedule:year'.upper(), sch_name) num_of_weekly_schedules = int(len(values.fieldvalues[3:]) / 5) for i in range(num_of_weekly_schedules): ref = values.get_referenced_object( 'ScheduleWeek_Name_{}'.format(i + 1)) start_month = values['Start_Month_{}'.format(i + 1)] end_month = values['End_Month_{}'.format(i + 1)] start_day = values['Start_Day_{}'.format(i + 1)] end_day = values['End_Day_{}'.format(i + 1)] start = datetime.strptime( '{}/{}/{}'.format(self.year, start_month, start_day), '%Y/%m/%d') end = datetime.strptime( '{}/{}/{}'.format(self.year, end_month, end_day), '%Y/%m/%d') days = (end - start).days + 1 end_date = start_date + timedelta(days=days) + timedelta(hours=23) how = pd.IndexSlice[start_date:end_date] weeks = [] for name, week in hourly_values.loc[how].groupby( pd.Grouper(freq='168H')): if not week.empty: try: week.loc[:] = self.get_schedule_values( sch_name=ref.Name, start_date=week.index[0], index=week.index, sch_type=ref.key) except ValueError: week.loc[:] = self.get_schedule_values( ref.Name, week.index[0])[0:len(week)] finally: weeks.append(week) new = pd.concat(weeks) hourly_values.update(new) start_date += timedelta(days=days) return hourly_values.values def get_schedule_values(self, sch_name=None, start_date=None, index=None, sch_type=None): if sch_name is None: sch_name = self.schName if sch_type is None: schedule_values = self.idf.get_schedule_data_by_name(sch_name) self.schType = schedule_values.key.upper() sch_type = self.schType if self.count == 0: self.schTypeLimitsName = self.get_schedule_type_limits_name() self.count += 1 if sch_type.upper() == "schedule:year".upper(): hourly_values = self.get_yearly_ep_schedule_values( sch_name) elif sch_type.upper() == "schedule:day:interval".upper(): hourly_values = self.get_interval_day_ep_schedule_values( sch_name) elif sch_type.upper() == "schedule:day:hourly".upper(): hourly_values = self.get_hourly_day_ep_schedule_values( sch_name) elif sch_type.upper() == "schedule:day:list".upper(): hourly_values = self.get_list_day_ep_schedule_values( sch_name) elif sch_type.upper() == "schedule:week:compact".upper(): hourly_values = self.get_compact_weekly_ep_schedule_values( sch_name, start_date, index) elif sch_type.upper() == "schedule:week:daily".upper(): hourly_values = self.get_daily_weekly_ep_schedule_values( sch_name) elif sch_type.upper() == "schedule:constant".upper(): hourly_values = self.get_constant_ep_schedule_values( sch_name) elif sch_type.upper() == "schedule:compact".upper(): hourly_values = self.get_compact_ep_schedule_values( sch_name) elif sch_type.upper() == "schedule:file".upper(): hourly_values = self.get_file_ep_schedule_values( sch_name) else: log('Archetypal does not support "{}" currently'.format( self.schType), lg.WARNING) hourly_values = [] return hourly_values def is_schedule(self, sch_name): if sch_name.upper() in self.idf.schedules_dict: return True else: return False def to_year_week_day(self): full_year = np.array(self.all_values) values = full_year.reshape(-1, 24) unique_days, nds = np.unique(values, axis=0, return_inverse=True) ep_days = [] dict_day = {} count_day = 0 for unique_day in unique_days: name = 'd_' + self.schName + '_' + '%03d' % count_day name, count_day = archetypal.check_unique_name('d', count_day, name, archetypal.settings.unique_schedules, suffix=True) dict_day[name] = unique_day archetypal.settings.unique_schedules.append(name) ep_day = self.idf.add_object( ep_object='Schedule:Day:Hourly'.upper(), save=False, **dict(Name=name, Schedule_Type_Limits_Name=self.schType, **{'Hour_{}'.format(i + 1): unique_day[i] for i in range(24)}) ) ep_days.append(ep_day) unique_weeks, nwsi, nws, count = np.unique( full_year[:364 * 24, ...].reshape(-1, 168), return_index=True, axis=0, return_inverse=True, return_counts=True) dict_week = {} count_week = 0 for unique_week in unique_weeks: week_id = 'w_' + self.schName + '_' + '%03d' % count_week week_id, count_week = archetypal.check_unique_name('w', count_week, week_id, archetypal.settings.unique_schedules, suffix=True) archetypal.settings.unique_schedules.append(week_id) dict_week[week_id] = {} for i in list(range(0, 7)): day_of_week = unique_week[..., i * 24:(i + 1) * 24] for key in dict_day: if (day_of_week == dict_day[key]).all(): dict_week[week_id]['day_{}'.format(i)] = key list_day_of_week = ['Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday'] ordered_day_n = np.array([6, 0, 1, 2, 3, 4, 5]) ordered_day_n = np.roll(ordered_day_n, self.startDayOfTheWeek) ep_weeks = [] for week_id in dict_week: ep_week = self.idf.add_object( ep_object='Schedule:Week:Daily'.upper(), save=False, **dict(Name=week_id, **{'{}_ScheduleDay_Name'.format( weekday): dict_week[week_id][ 'day_{}'.format(i)] for i, weekday in zip(ordered_day_n, list_day_of_week) }, Holiday_ScheduleDay_Name= dict_week[week_id]['day_6'], SummerDesignDay_ScheduleDay_Name= dict_week[week_id]['day_1'], WinterDesignDay_ScheduleDay_Name= dict_week[week_id]['day_1'], CustomDay1_ScheduleDay_Name= dict_week[week_id]['day_2'], CustomDay2_ScheduleDay_Name= dict_week[week_id]['day_5']) ) ep_weeks.append(ep_week) import itertools blocks = {} from_date = datetime(self.year, 1, 1) bincount = [sum(1 for _ in group) for key, group in itertools.groupby(nws + 1) if key] week_order = {i: v for i, v in enumerate(np.array( [key for key, group in itertools.groupby(nws + 1) if key]) - 1)} for i, (week_n, count) in enumerate( zip(week_order, bincount)): week_id = list(dict_week)[week_order[i]] to_date = from_date + timedelta(days=int(count * 7), hours=-1) blocks[i] = {} blocks[i]['week_id'] = week_id blocks[i]['from_day'] = from_date.day blocks[i]['end_day'] = to_date.day blocks[i]['from_month'] = from_date.month blocks[i]['end_month'] = to_date.month from_date = to_date + timedelta(hours=1) if i == len(bincount) - 1: blocks[i]['end_day'] = 31 blocks[i]['end_month'] = 12 new_dict = dict(Name=self.schName + '_', Schedule_Type_Limits_Name=self.schTypeLimitsName) for i in blocks: new_dict.update({"ScheduleWeek_Name_{}".format(i + 1): blocks[i]['week_id'], "Start_Month_{}".format(i + 1): blocks[i]['from_month'], "Start_Day_{}".format(i + 1): blocks[i]['from_day'], "End_Month_{}".format(i + 1): blocks[i]['end_month'], "End_Day_{}".format(i + 1): blocks[i]['end_day']}) ep_year = self.idf.add_object(ep_object='Schedule:Year'.upper(), save=False, **new_dict) return ep_year, ep_weeks, ep_days def date_field_interpretation(self, field): formats = ['%m/%d', '%d %B', '%B %d', '%d %b', '%b %d'] date = None for format_str in formats: try: date = datetime.strptime(field, format_str) except: pass else: date = datetime(self.year, date.month, date.day) if date is None: try: date = self.parse_fancy_string(field) except: msg = "the schedule '{sch}' contains a " \ "Field that is not understood: '{field}'".format( sch=self.schName, field=field) raise ValueError(msg) else: return date else: return date def parse_fancy_string(self, field): import re time, month = field.lower().split(' in ') month = datetime.strptime(month, '%B').month nth, dayofweek = time.split(' ') if 'last' in nth: nth = -1 else: nth = re.findall(r'\d+', nth) nth = int(nth[0]) - 1 weekday = {'monday': 0, 'tuesday': 1, 'wednesday': 2, 'thursday': 3, 'friday': 4, 'saturday': 5, 'sunday': 6} dayofweek = weekday.get(dayofweek, 6) import calendar c = calendar.Calendar(firstweekday=self.startDayOfTheWeek) monthcal = c.monthdatescalendar(self.year, month) date = [day for week in monthcal for day in week if \ day.weekday() == dayofweek and \ day.month == month][nth] return datetime(date.year, date.month, date.day) def field_set(self, field, slicer_=None): if field.lower() == 'weekdays': return lambda x: x.index.dayofweek < 5 elif field.lower() == 'weekends': return lambda x: x.index.dayofweek >= 5 elif field.lower() == 'alldays': log('For schedule "{}", the field-set "AllDays" may be overridden ' 'by the "AllOtherDays" field-set'.format( self.schName), lg.WARNING) return pd.IndexSlice[:] elif field.lower() == 'allotherdays': import operator if slicer_ is not None: return _conjunction(*[self.special_day(field, slicer_), ~slicer_], logical=operator.or_) else: raise NotImplementedError elif field.lower() == 'sunday': return lambda x: x.index.dayofweek == 6 elif field.lower() == 'monday': return lambda x: x.index.dayofweek == 0 elif field.lower() == 'tuesday': return lambda x: x.index.dayofweek == 1 elif field.lower() == 'wednesday': return lambda x: x.index.dayofweek == 2 elif field.lower() == 'thursday': return lambda x: x.index.dayofweek == 3 elif field.lower() == 'friday': return lambda x: x.index.dayofweek == 4 elif field.lower() == 'saturday': return lambda x: x.index.dayofweek == 5 elif field.lower() == 'summerdesignday': return None elif field.lower() == 'winterdesignday': return None elif field.lower() == 'holiday' or field.lower() == 'holidays': field = 'holiday' return self.special_day(field, slicer_) elif not self.strict: return pd.IndexSlice[:] else: raise NotImplementedError( 'Archetypal does not yet support The ' 'Field_set "{}"'.format(field)) def __len__(self): return len(self.all_values) def __eq__(self, other): if isinstance(other, Schedule): return self.all_values == other.all_values else: raise NotImplementedError def __ne__(self, other): return ~(self.__eq__(other)) def __add__(self, other): if isinstance(other, Schedule): return self.all_values + other.all_values elif isinstance(other, list): return self.all_values + other else: raise NotImplementedError def __sub__(self, other): if isinstance(other, Schedule): return self.all_values - other.all_values elif isinstance(other, list): return self.all_values - other else: raise NotImplementedError def __mul__(self, other): if isinstance(other, Schedule): return self.all_values * other.all_values elif isinstance(other, list): return self.all_values * other else: raise NotImplementedError def get_sdow(self, start_day_of_week): if start_day_of_week is None: return self.idf.day_of_week_for_start_day else: return start_day_of_week def special_day(self, field, slicer_): sp_slicer_ = slicer_.copy() sp_slicer_.loc[:] = False special_day_types = ['holiday', 'customday1', 'customday2'] dds = self.idf.idfobjects['RunPeriodControl:SpecialDays'.upper()] dd = [dd for dd in dds if dd.Special_Day_Type.lower() == field or dd.Special_Day_Type.lower() in special_day_types] if len(dd) > 0: slice = [] for dd in dd: data = dd.Start_Date ep_start_date = self.date_field_interpretation(data) ep_orig = datetime(self.year, 1, 1) days_to_speciald = (ep_start_date - ep_orig).days duration = int(dd.Duration) from_date = self.startDate + timedelta(days=days_to_speciald) to_date = from_date + timedelta(days=duration) + timedelta( hours=-1) sp_slicer_.loc[from_date:to_date] = True return sp_slicer_ elif not self.strict: return sp_slicer_ else: msg = 'Could not find a "SizingPeriod:DesignDay" object ' \ 'needed for schedule "{}" with Day Type "{}"'.format( self.schName, field.capitalize() ) raise ValueError(msg) def design_day(schedule, field): dds = schedule.idf.idfobjects['SizingPeriod:DesignDay'.upper()] dd = [dd for dd in dds if dd.Day_Type.lower() == field] if len(dd) > 0: data = [dd[0].Month, dd[0].Day_of_Month] date = '/'.join([str(item).zfill(2) for item in data]) date = schedule.date_field_interpretation(date) return lambda x: x.index == date else: msg = 'Could not find a "SizingPeriod:DesignDay" object ' \ 'needed for schedule "{}" with Day Type "{}"'.format( schedule.schName, field.capitalize() ) raise ValueError(msg) def _conjunction(*conditions, logical=np.logical_and): return functools.reduce(logical, conditions) def _separator(sep): if sep == 'Comma': return ',' elif sep == 'Tab': return '\t' elif sep == 'Fixed': return None elif sep == 'Semicolon': return ';' else: return ',' def _how(how): if how.lower() == 'average': return 'mean' elif how.lower() == 'linear': return 'interpolate' elif how.lower() == 'no': return 'max' else: return 'max'
true
true
f70140c6982875c8447bb9e230f4d9b63c3ecaf0
44
py
Python
pupa/__init__.py
opencivicdata/pupa
8087e221fc527a80262192d22c2f50966c20604d
[ "BSD-3-Clause" ]
62
2015-01-08T05:46:46.000Z
2022-01-31T03:27:14.000Z
pupa/__init__.py
opencivicdata/pupa
8087e221fc527a80262192d22c2f50966c20604d
[ "BSD-3-Clause" ]
199
2015-01-10T03:19:37.000Z
2021-05-21T20:34:58.000Z
pupa/__init__.py
opencivicdata/pupa
8087e221fc527a80262192d22c2f50966c20604d
[ "BSD-3-Clause" ]
35
2015-03-09T19:41:42.000Z
2021-06-22T20:01:35.000Z
__version__ = '0.10.2' # pragma: no cover
22
43
0.636364
__version__ = '0.10.2'
true
true
f701410e9b48a170183eb9219ab8738113958f1d
84
py
Python
tests/requesters/__init__.py
Tberdy/python-amazon-mws-tools
2925118ce113851a2d8db98ad7f99163154f4151
[ "Unlicense" ]
9
2017-03-28T12:58:36.000Z
2020-03-02T14:42:32.000Z
tests/requesters/__init__.py
Tberdy/python-amazon-mws-tools
2925118ce113851a2d8db98ad7f99163154f4151
[ "Unlicense" ]
5
2017-01-05T19:36:18.000Z
2021-12-13T19:43:42.000Z
tests/requesters/__init__.py
Tberdy/python-amazon-mws-tools
2925118ce113851a2d8db98ad7f99163154f4151
[ "Unlicense" ]
5
2017-02-15T17:29:02.000Z
2019-03-06T07:30:55.000Z
from reports import suite as reports_suite from orders import suite as orders_suite
28
42
0.857143
from reports import suite as reports_suite from orders import suite as orders_suite
true
true
f70141ddaaf55ae4bb906ac7ae26d0faa94aa511
12,461
py
Python
magenta/models/score2perf/music_encoders_test.py
sleep-yearning/magenta
a03a14ef5a691ee9e3d336aa621281028dc5af32
[ "Apache-2.0" ]
1
2020-02-24T06:12:09.000Z
2020-02-24T06:12:09.000Z
magenta/models/score2perf/music_encoders_test.py
sleep-yearning/magenta
a03a14ef5a691ee9e3d336aa621281028dc5af32
[ "Apache-2.0" ]
null
null
null
magenta/models/score2perf/music_encoders_test.py
sleep-yearning/magenta
a03a14ef5a691ee9e3d336aa621281028dc5af32
[ "Apache-2.0" ]
null
null
null
# Copyright 2020 The Magenta Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Tests for Score2Perf music encoders.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import tempfile import magenta from magenta.models.score2perf import music_encoders from magenta.music import testing_lib from magenta.music.protobuf import music_pb2 import tensorflow.compat.v1 as tf class MidiPerformanceEncoderTest(tf.test.TestCase): def testNumReservedIds(self): encoder = music_encoders.MidiPerformanceEncoder( steps_per_second=100, num_velocity_bins=32, min_pitch=21, max_pitch=108) self.assertEqual(2, encoder.num_reserved_ids) def testEncodeEmptyNoteSequence(self): encoder = music_encoders.MidiPerformanceEncoder( steps_per_second=100, num_velocity_bins=32, min_pitch=21, max_pitch=108) ids = encoder.encode_note_sequence(music_pb2.NoteSequence()) self.assertEqual([], ids) def testEncodeEmptyNoteSequenceAddEos(self): encoder = music_encoders.MidiPerformanceEncoder( steps_per_second=100, num_velocity_bins=32, min_pitch=21, max_pitch=108, add_eos=True) ids = encoder.encode_note_sequence(music_pb2.NoteSequence()) self.assertEqual([1], ids) def testEncodeNoteSequence(self): encoder = music_encoders.MidiPerformanceEncoder( steps_per_second=100, num_velocity_bins=32, min_pitch=21, max_pitch=108) ns = music_pb2.NoteSequence() testing_lib.add_track_to_sequence( ns, 0, [(60, 100, 0.0, 4.0), (64, 100, 0.0, 3.0), (67, 127, 1.0, 2.0)]) ids = encoder.encode_note_sequence(ns) expected_ids = [ 302, # VELOCITY(25) 41, # NOTE-ON(60) 45, # NOTE-ON(64) 277, # TIME-SHIFT(100) 309, # VELOCITY(32) 48, # NOTE-ON(67) 277, # TIME-SHIFT(100) 136, # NOTE-OFF(67) 277, # TIME-SHIFT(100) 133, # NOTE-OFF(64 277, # TIME-SHIFT(100) 129 # NOTE-OFF(60) ] self.assertEqual(expected_ids, ids) def testEncodeNoteSequenceAddEos(self): encoder = music_encoders.MidiPerformanceEncoder( steps_per_second=100, num_velocity_bins=32, min_pitch=21, max_pitch=108, add_eos=True) ns = music_pb2.NoteSequence() testing_lib.add_track_to_sequence( ns, 0, [(60, 100, 0.0, 4.0), (64, 100, 0.0, 3.0), (67, 127, 1.0, 2.0)]) ids = encoder.encode_note_sequence(ns) expected_ids = [ 302, # VELOCITY(25) 41, # NOTE-ON(60) 45, # NOTE-ON(64) 277, # TIME-SHIFT(100) 309, # VELOCITY(32) 48, # NOTE-ON(67) 277, # TIME-SHIFT(100) 136, # NOTE-OFF(67) 277, # TIME-SHIFT(100) 133, # NOTE-OFF(64 277, # TIME-SHIFT(100) 129, # NOTE-OFF(60) 1 # EOS ] self.assertEqual(expected_ids, ids) def testEncodeNoteSequenceNGrams(self): encoder = music_encoders.MidiPerformanceEncoder( steps_per_second=100, num_velocity_bins=32, min_pitch=21, max_pitch=108, ngrams=[(41, 45), (277, 309, 300), (309, 48), (277, 129, 130)]) ns = music_pb2.NoteSequence() testing_lib.add_track_to_sequence( ns, 0, [(60, 100, 0.0, 4.0), (64, 100, 0.0, 3.0), (67, 127, 1.0, 2.0)]) ids = encoder.encode_note_sequence(ns) expected_ids = [ 302, # VELOCITY(25) 310, # NOTE-ON(60), NOTE-ON(64) 277, # TIME-SHIFT(100) 312, # VELOCITY(32), NOTE-ON(67) 277, # TIME-SHIFT(100) 136, # NOTE-OFF(67) 277, # TIME-SHIFT(100) 133, # NOTE-OFF(64 277, # TIME-SHIFT(100) 129 # NOTE-OFF(60) ] self.assertEqual(expected_ids, ids) def testEncode(self): encoder = music_encoders.MidiPerformanceEncoder( steps_per_second=100, num_velocity_bins=32, min_pitch=21, max_pitch=108, ngrams=[(277, 129)]) ns = music_pb2.NoteSequence() testing_lib.add_track_to_sequence(ns, 0, [(60, 97, 0.0, 1.0)]) # Write NoteSequence to MIDI file as encoder takes in filename. with tempfile.NamedTemporaryFile(suffix='.mid') as f: magenta.music.sequence_proto_to_midi_file(ns, f.name) ids = encoder.encode(f.name) expected_ids = [ 302, # VELOCITY(25) 41, # NOTE-ON(60) 310 # TIME-SHIFT(100), NOTE-OFF(60) ] self.assertEqual(expected_ids, ids) def testDecode(self): encoder = music_encoders.MidiPerformanceEncoder( steps_per_second=100, num_velocity_bins=32, min_pitch=21, max_pitch=108, ngrams=[(277, 129)]) ids = [ 302, # VELOCITY(25) 41, # NOTE-ON(60) 310 # TIME-SHIFT(100), NOTE-OFF(60) ] # Decode method returns MIDI filename, read and convert to NoteSequence. filename = encoder.decode(ids) ns = magenta.music.midi_file_to_sequence_proto(filename) # Remove default tempo & time signature. del ns.tempos[:] del ns.time_signatures[:] expected_ns = music_pb2.NoteSequence(ticks_per_quarter=220) testing_lib.add_track_to_sequence(expected_ns, 0, [(60, 97, 0.0, 1.0)]) # Add source info fields. expected_ns.source_info.encoding_type = ( music_pb2.NoteSequence.SourceInfo.MIDI) expected_ns.source_info.parser = ( music_pb2.NoteSequence.SourceInfo.PRETTY_MIDI) self.assertEqual(expected_ns, ns) def testVocabSize(self): encoder = music_encoders.MidiPerformanceEncoder( steps_per_second=100, num_velocity_bins=32, min_pitch=21, max_pitch=108) self.assertEqual(310, encoder.vocab_size) def testVocabSizeNGrams(self): encoder = music_encoders.MidiPerformanceEncoder( steps_per_second=100, num_velocity_bins=32, min_pitch=21, max_pitch=108, ngrams=[(41, 45), (277, 309, 300), (309, 48), (277, 129, 130)]) self.assertEqual(314, encoder.vocab_size) class TextChordsEncoderTest(tf.test.TestCase): def testEncodeNoteSequence(self): encoder = music_encoders.TextChordsEncoder(steps_per_quarter=1) ns = music_pb2.NoteSequence() ns.tempos.add(qpm=60) testing_lib.add_chords_to_sequence( ns, [('C', 1), ('Dm', 3), ('Bdim', 4)]) ns.total_time = 5.0 ids = encoder.encode_note_sequence(ns) expected_ids = [ 2, # no-chord 3, # C major 3, # C major 17, # D minor 50 # B diminished ] self.assertEqual(expected_ids, ids) def testEncode(self): encoder = music_encoders.TextChordsEncoder(steps_per_quarter=1) ids = encoder.encode('C G Am F') expected_ids = [ 3, # C major 10, # G major 24, # A minor 8 # F major ] self.assertEqual(expected_ids, ids) def testVocabSize(self): encoder = music_encoders.TextChordsEncoder(steps_per_quarter=1) self.assertEqual(51, encoder.vocab_size) class TextMelodyEncoderTest(tf.test.TestCase): def testEncodeNoteSequence(self): encoder = music_encoders.TextMelodyEncoder( steps_per_quarter=4, min_pitch=21, max_pitch=108) encoder_absolute = music_encoders.TextMelodyEncoderAbsolute( steps_per_second=4, min_pitch=21, max_pitch=108) ns = music_pb2.NoteSequence() ns.tempos.add(qpm=60) testing_lib.add_track_to_sequence( ns, 0, [(60, 127, 0.0, 0.25), (62, 127, 0.25, 0.75), (64, 127, 1.25, 2.0)]) ids = encoder.encode_note_sequence(ns) ids_absolute = encoder_absolute.encode_note_sequence(ns) expected_ids = [ 43, # ON(60) 45, # ON(62) 2, # HOLD(62) 3, # OFF(62) 2, # REST 47, # ON(64) 2, # HOLD(64) 2 # HOLD(64) ] self.assertEqual(expected_ids, ids) self.assertEqual(expected_ids, ids_absolute) def testEncode(self): encoder = music_encoders.TextMelodyEncoder( steps_per_quarter=4, min_pitch=21, max_pitch=108) ids = encoder.encode('60 -2 62 -1 64 -2') expected_ids = [ 43, # ON(60) 2, # HOLD(60) 45, # ON(62) 3, # OFF(62) 47, # ON(64) 2 # HOLD(64) ] self.assertEqual(expected_ids, ids) def testVocabSize(self): encoder = music_encoders.TextMelodyEncoder( steps_per_quarter=4, min_pitch=21, max_pitch=108) self.assertEqual(92, encoder.vocab_size) class FlattenedTextMelodyEncoderTest(tf.test.TestCase): def testEncodeNoteSequence(self): encoder = music_encoders.FlattenedTextMelodyEncoderAbsolute( steps_per_second=4, num_velocity_bins=127) ns = music_pb2.NoteSequence() ns.tempos.add(qpm=60) testing_lib.add_track_to_sequence( ns, 0, [(60, 127, 0.0, 0.25), (62, 15, 0.25, 0.75), (64, 32, 1.25, 2.0)]) ids = encoder.encode_note_sequence(ns) expected_ids = [ 130, # ON(vel=127) 18, # ON(vel=15) 2, # HOLD(62) 2, # REST 2, # REST 35, # ON(vel=32) 2, # HOLD(64) 2 # HOLD(64) ] self.assertEqual(expected_ids, ids) def testVocabSize(self): num_vel_bins = 12 encoder = music_encoders.FlattenedTextMelodyEncoderAbsolute( steps_per_second=4, num_velocity_bins=num_vel_bins) expected = num_vel_bins + encoder.num_reserved_ids + 2 self.assertEqual(expected, encoder.vocab_size) class CompositeScoreEncoderTest(tf.test.TestCase): def testEncodeNoteSequence(self): encoder = music_encoders.CompositeScoreEncoder([ music_encoders.TextChordsEncoder(steps_per_quarter=4), music_encoders.TextMelodyEncoder( steps_per_quarter=4, min_pitch=21, max_pitch=108) ]) ns = music_pb2.NoteSequence() ns.tempos.add(qpm=60) testing_lib.add_chords_to_sequence(ns, [('C', 0.5), ('Dm', 1.0)]) testing_lib.add_track_to_sequence( ns, 0, [(60, 127, 0.0, 0.25), (62, 127, 0.25, 0.75), (64, 127, 1.25, 2.0)]) chord_ids, melody_ids = zip(*encoder.encode_note_sequence(ns)) expected_chord_ids = [ 2, # no-chord 2, # no-chord 3, # C major 3, # C major 17, # D minor 17, # D minor 17, # D minor 17 # D minor ] expected_melody_ids = [ 43, # ON(60) 45, # ON(62) 2, # HOLD(62) 3, # OFF(62) 2, # REST 47, # ON(64) 2, # HOLD(64) 2 # HOLD(64) ] self.assertEqual(expected_chord_ids, list(chord_ids)) self.assertEqual(expected_melody_ids, list(melody_ids)) # TODO(iansimon): also test MusicXML encoding def testVocabSize(self): encoder = music_encoders.CompositeScoreEncoder([ music_encoders.TextChordsEncoder(steps_per_quarter=4), music_encoders.TextMelodyEncoder( steps_per_quarter=4, min_pitch=21, max_pitch=108) ]) self.assertEqual([51, 92], encoder.vocab_size) if __name__ == '__main__': tf.test.main()
33.497312
84
0.590001
from __future__ import absolute_import from __future__ import division from __future__ import print_function import tempfile import magenta from magenta.models.score2perf import music_encoders from magenta.music import testing_lib from magenta.music.protobuf import music_pb2 import tensorflow.compat.v1 as tf class MidiPerformanceEncoderTest(tf.test.TestCase): def testNumReservedIds(self): encoder = music_encoders.MidiPerformanceEncoder( steps_per_second=100, num_velocity_bins=32, min_pitch=21, max_pitch=108) self.assertEqual(2, encoder.num_reserved_ids) def testEncodeEmptyNoteSequence(self): encoder = music_encoders.MidiPerformanceEncoder( steps_per_second=100, num_velocity_bins=32, min_pitch=21, max_pitch=108) ids = encoder.encode_note_sequence(music_pb2.NoteSequence()) self.assertEqual([], ids) def testEncodeEmptyNoteSequenceAddEos(self): encoder = music_encoders.MidiPerformanceEncoder( steps_per_second=100, num_velocity_bins=32, min_pitch=21, max_pitch=108, add_eos=True) ids = encoder.encode_note_sequence(music_pb2.NoteSequence()) self.assertEqual([1], ids) def testEncodeNoteSequence(self): encoder = music_encoders.MidiPerformanceEncoder( steps_per_second=100, num_velocity_bins=32, min_pitch=21, max_pitch=108) ns = music_pb2.NoteSequence() testing_lib.add_track_to_sequence( ns, 0, [(60, 100, 0.0, 4.0), (64, 100, 0.0, 3.0), (67, 127, 1.0, 2.0)]) ids = encoder.encode_note_sequence(ns) expected_ids = [ 302, 41, 45, 277, 309, 48, 277, 136, 277, 133, 277, 129 ] self.assertEqual(expected_ids, ids) def testEncodeNoteSequenceAddEos(self): encoder = music_encoders.MidiPerformanceEncoder( steps_per_second=100, num_velocity_bins=32, min_pitch=21, max_pitch=108, add_eos=True) ns = music_pb2.NoteSequence() testing_lib.add_track_to_sequence( ns, 0, [(60, 100, 0.0, 4.0), (64, 100, 0.0, 3.0), (67, 127, 1.0, 2.0)]) ids = encoder.encode_note_sequence(ns) expected_ids = [ 302, 41, 45, 277, 309, 48, 277, 136, 277, 133, 277, 129, 1 ] self.assertEqual(expected_ids, ids) def testEncodeNoteSequenceNGrams(self): encoder = music_encoders.MidiPerformanceEncoder( steps_per_second=100, num_velocity_bins=32, min_pitch=21, max_pitch=108, ngrams=[(41, 45), (277, 309, 300), (309, 48), (277, 129, 130)]) ns = music_pb2.NoteSequence() testing_lib.add_track_to_sequence( ns, 0, [(60, 100, 0.0, 4.0), (64, 100, 0.0, 3.0), (67, 127, 1.0, 2.0)]) ids = encoder.encode_note_sequence(ns) expected_ids = [ 302, 310, 277, 312, 277, 136, 277, 133, 277, 129 ] self.assertEqual(expected_ids, ids) def testEncode(self): encoder = music_encoders.MidiPerformanceEncoder( steps_per_second=100, num_velocity_bins=32, min_pitch=21, max_pitch=108, ngrams=[(277, 129)]) ns = music_pb2.NoteSequence() testing_lib.add_track_to_sequence(ns, 0, [(60, 97, 0.0, 1.0)]) with tempfile.NamedTemporaryFile(suffix='.mid') as f: magenta.music.sequence_proto_to_midi_file(ns, f.name) ids = encoder.encode(f.name) expected_ids = [ 302, 41, 310 ] self.assertEqual(expected_ids, ids) def testDecode(self): encoder = music_encoders.MidiPerformanceEncoder( steps_per_second=100, num_velocity_bins=32, min_pitch=21, max_pitch=108, ngrams=[(277, 129)]) ids = [ 302, 41, 310 ] filename = encoder.decode(ids) ns = magenta.music.midi_file_to_sequence_proto(filename) del ns.tempos[:] del ns.time_signatures[:] expected_ns = music_pb2.NoteSequence(ticks_per_quarter=220) testing_lib.add_track_to_sequence(expected_ns, 0, [(60, 97, 0.0, 1.0)]) expected_ns.source_info.encoding_type = ( music_pb2.NoteSequence.SourceInfo.MIDI) expected_ns.source_info.parser = ( music_pb2.NoteSequence.SourceInfo.PRETTY_MIDI) self.assertEqual(expected_ns, ns) def testVocabSize(self): encoder = music_encoders.MidiPerformanceEncoder( steps_per_second=100, num_velocity_bins=32, min_pitch=21, max_pitch=108) self.assertEqual(310, encoder.vocab_size) def testVocabSizeNGrams(self): encoder = music_encoders.MidiPerformanceEncoder( steps_per_second=100, num_velocity_bins=32, min_pitch=21, max_pitch=108, ngrams=[(41, 45), (277, 309, 300), (309, 48), (277, 129, 130)]) self.assertEqual(314, encoder.vocab_size) class TextChordsEncoderTest(tf.test.TestCase): def testEncodeNoteSequence(self): encoder = music_encoders.TextChordsEncoder(steps_per_quarter=1) ns = music_pb2.NoteSequence() ns.tempos.add(qpm=60) testing_lib.add_chords_to_sequence( ns, [('C', 1), ('Dm', 3), ('Bdim', 4)]) ns.total_time = 5.0 ids = encoder.encode_note_sequence(ns) expected_ids = [ 2, 3, 3, 17, 50 ] self.assertEqual(expected_ids, ids) def testEncode(self): encoder = music_encoders.TextChordsEncoder(steps_per_quarter=1) ids = encoder.encode('C G Am F') expected_ids = [ 3, 10, 24, 8 ] self.assertEqual(expected_ids, ids) def testVocabSize(self): encoder = music_encoders.TextChordsEncoder(steps_per_quarter=1) self.assertEqual(51, encoder.vocab_size) class TextMelodyEncoderTest(tf.test.TestCase): def testEncodeNoteSequence(self): encoder = music_encoders.TextMelodyEncoder( steps_per_quarter=4, min_pitch=21, max_pitch=108) encoder_absolute = music_encoders.TextMelodyEncoderAbsolute( steps_per_second=4, min_pitch=21, max_pitch=108) ns = music_pb2.NoteSequence() ns.tempos.add(qpm=60) testing_lib.add_track_to_sequence( ns, 0, [(60, 127, 0.0, 0.25), (62, 127, 0.25, 0.75), (64, 127, 1.25, 2.0)]) ids = encoder.encode_note_sequence(ns) ids_absolute = encoder_absolute.encode_note_sequence(ns) expected_ids = [ 43, 45, 2, 3, 2, 47, 2, 2 ] self.assertEqual(expected_ids, ids) self.assertEqual(expected_ids, ids_absolute) def testEncode(self): encoder = music_encoders.TextMelodyEncoder( steps_per_quarter=4, min_pitch=21, max_pitch=108) ids = encoder.encode('60 -2 62 -1 64 -2') expected_ids = [ 43, 2, 45, 3, 47, 2 ] self.assertEqual(expected_ids, ids) def testVocabSize(self): encoder = music_encoders.TextMelodyEncoder( steps_per_quarter=4, min_pitch=21, max_pitch=108) self.assertEqual(92, encoder.vocab_size) class FlattenedTextMelodyEncoderTest(tf.test.TestCase): def testEncodeNoteSequence(self): encoder = music_encoders.FlattenedTextMelodyEncoderAbsolute( steps_per_second=4, num_velocity_bins=127) ns = music_pb2.NoteSequence() ns.tempos.add(qpm=60) testing_lib.add_track_to_sequence( ns, 0, [(60, 127, 0.0, 0.25), (62, 15, 0.25, 0.75), (64, 32, 1.25, 2.0)]) ids = encoder.encode_note_sequence(ns) expected_ids = [ 130, 18, 2, 2, 2, 35, 2, 2 ] self.assertEqual(expected_ids, ids) def testVocabSize(self): num_vel_bins = 12 encoder = music_encoders.FlattenedTextMelodyEncoderAbsolute( steps_per_second=4, num_velocity_bins=num_vel_bins) expected = num_vel_bins + encoder.num_reserved_ids + 2 self.assertEqual(expected, encoder.vocab_size) class CompositeScoreEncoderTest(tf.test.TestCase): def testEncodeNoteSequence(self): encoder = music_encoders.CompositeScoreEncoder([ music_encoders.TextChordsEncoder(steps_per_quarter=4), music_encoders.TextMelodyEncoder( steps_per_quarter=4, min_pitch=21, max_pitch=108) ]) ns = music_pb2.NoteSequence() ns.tempos.add(qpm=60) testing_lib.add_chords_to_sequence(ns, [('C', 0.5), ('Dm', 1.0)]) testing_lib.add_track_to_sequence( ns, 0, [(60, 127, 0.0, 0.25), (62, 127, 0.25, 0.75), (64, 127, 1.25, 2.0)]) chord_ids, melody_ids = zip(*encoder.encode_note_sequence(ns)) expected_chord_ids = [ 2, 2, 3, 3, 17, 17, 17, 17 ] expected_melody_ids = [ 43, 45, 2, 3, 2, 47, 2, 2 ] self.assertEqual(expected_chord_ids, list(chord_ids)) self.assertEqual(expected_melody_ids, list(melody_ids)) def testVocabSize(self): encoder = music_encoders.CompositeScoreEncoder([ music_encoders.TextChordsEncoder(steps_per_quarter=4), music_encoders.TextMelodyEncoder( steps_per_quarter=4, min_pitch=21, max_pitch=108) ]) self.assertEqual([51, 92], encoder.vocab_size) if __name__ == '__main__': tf.test.main()
true
true
f701420d7e91f4bcbdb6b47908438c20bcf682cc
73,843
py
Python
release/e2e.py
behnamh217rn21/ray-master
5fabd6c80a2dd7b8c259a98c149df430b16379b4
[ "Apache-2.0" ]
1
2021-12-21T17:57:21.000Z
2021-12-21T17:57:21.000Z
release/e2e.py
MaximumProgrammer/ray
1047914ee0ec27d0e7b8fc3599f8a0a573ba2dd7
[ "Apache-2.0" ]
10
2021-11-20T08:10:57.000Z
2022-03-19T07:11:59.000Z
release/e2e.py
MaximumProgrammer/ray
1047914ee0ec27d0e7b8fc3599f8a0a573ba2dd7
[ "Apache-2.0" ]
null
null
null
""" This is an end to end release test automation script used to kick off periodic release tests, running on Anyscale. The tool leverages app configs and compute templates. Calling this script will run a single release test. Example: python e2e.py --test-config ~/ray/release/xgboost_tests/xgboost_tests.yaml --test-name tune_small The following steps are then performed: 1. It will look up the test tune_small in the file xgboost_tests.yaml 2. It will fetch the specified app config and compute template and register those with anyscale (if they don’t exist yet) 3. It waits until the app config is built 4. It then kicks off the script defined in the run block 5. When the script is finished, it will fetch the latest logs, the full log output, and any artifacts specified in the artifacts block. 6. The full logs and artifacts will be stored in a s3 bucket 7. It will also fetch the json file specified in the run block as results. This is the file where you should write your metrics to. 8. All results are then stored in a database. Specifically it will store the following fields: - Timestamp - Test name - Status (finished, error, timeout, invalid) - Last logs (50 lines) - results (see above) - artifacts (links to s3 files) Then the script exits. If an error occurs at any time, a fail result is written to the database. Writing a new release test -------------------------- Each release test requires the following: 1. It has to be added in a release test yaml file, describing meta information about the test (e.g. name, command to run, timeout) 2. You need an app config yaml 3. You need a compute template yaml 4. You need to define a command to run. This is usually a python script. The command should accept (or ignore) a single optional `--smoke-test` argument. Usually the command should write its result metrics to a json file. The json filename is available in the TEST_OUTPUT_JSON env variable. 5. Add your test in release/.buildkite/build_pipeline.py. The script will have access to these environment variables: "RAY_ADDRESS": os.environ.get("RAY_ADDRESS", "auto") "TEST_OUTPUT_JSON": results_json_filename "IS_SMOKE_TEST": "1" if smoke_test else "0" For an example, take a look at the XGBoost test suite: https://github.com/ray-project/ray/blob/master/release/xgboost_tests/xgboost_tests.yaml These all use the same app configs and similar compute templates. This means that app configs can be re-used across runs and only have to be built ones. App configs and compute templates can interpret environment variables. A notable one is the `RAY_WHEELS` variable which points to the wheels that should be tested (e.g. latest master wheels). You might want to include something like this in your `post_build_cmds`: - pip3 install -U {{ env["RAY_WHEELS"] | default("ray") }} If you want to force rebuilds, consider using something like - echo {{ env["TIMESTAMP"] }} so that your app configs changes each time the script is executed. If you only want to trigger rebuilds once per day, use `DATESTAMP` instead: - echo {{ env["DATESTAMP"] }} Local testing ------------- For local testing, make sure to authenticate with the ray-ossci AWS user (e.g. by setting the respective environment variables obtained from go/aws), or use the `--no-report` command line argument. Also make sure to set these environment variables: - ANYSCALE_CLI_TOKEN (should contain your anyscale credential token) - ANYSCALE_PROJECT (should point to a project ID you have access to) A test can then be run like this: python e2e.py --no-report --test-config ~/ray/release/xgboost_tests/xgboost_tests.yaml --test-name tune_small The `--no-report` option disables storing the results in the DB and artifacts on S3. If you set this option, you do not need access to the ray-ossci AWS user. Using Compilation on Product + App Config Override -------------------------------------------------- For quick iteration when debugging a release test, go/compile-on-product allows you to easily modify and recompile Ray, such that the recompilation happens within an app build step and can benefit from a warm Bazel cache. See go/compile-on-product for more information. After kicking off the app build, you can give the app config ID to this script as an app config override, where the indicated app config will be used instead of the app config given in the test config. E.g., running python e2e.py --no-report --test-config ~/ray/benchmarks/benchmark_tests.yaml --test-name=single_node --app-config-id-override=apt_TBngEXXXrhipMXgexVcrpC9i would run the single_node benchmark test with the apt_TBngEXXXrhipMXgexVcrpC9i app config instead of the app config given in ~/ray/benchmarks/benchmark_tests.yaml. If the build for the app config is still in progress, the script will wait until it completes, same as for a locally defined app config. Running on Head Node vs Running with Anyscale Connect ----------------------------------------------------- By default release tests run their drivers on the head node. Support is being added to run release tests that execute the driver as a subprocess and run the workload on Anyscale product via Anyscale connect. Note that when the driver in the test is a subprocess of releaser, releaser cannot be terminated before the test finishes. Other known feature gaps when running with Anyscale connect: - Kicking off a test or checking progress is not supported. - Downloading / uploading logs and artifacts are unsupported. - Logs from remote may not have finished streaming, before the driver exits. Long running tests ------------------ Long running tests can be kicked off with by adding the --kick-off-only parameters to the e2e script. The status can then be checked with the --check command. Long running test sessions will be terminated after `timeout` seconds, after which the latest result in the TEST_OUTPUT_JSON will be reported. Thus, long running release tests should update this file periodically. There are also two config options to configure behavior. The `time_key` is needed to track the latest update of the TEST_OUTPUT_JSON and should contain a floating point number (usually `time.time()`). The `max_update_delay` then specified the maximum time in seconds that can be passed without an update to the results json. If the output file hasn't been updated in e.g. 60 seconds, this could indicate that the command is stale/frozen, and thus should fail. Release test yaml example ------------------------- - name: example owner: mail: "[email protected]" # Currently not used slack: "@tune-team" # Currentl not used cluster: app_config: app_config.yaml # Relative to the release test yaml compute_template: tpl_cpu.yaml run: timeout: 600 # in seconds prepare: python wait_cluster.py 4 600 # prepare cmd to run before test script: python workloads/train.py # actual release test command # Only needed for long running test time_key: last_update # Key in the results json indicating current time max_update_delay: 30 # If state hasn't been updated in 30s, terminate # This block is optional artifacts: # Artifact name: location on head node - detailed_output: detailed_output.csv # This block is optional. If present, the contents will be # deep updated for smoke testing smoke_test: cluster: compute_template: tpl_cpu_smoketest.yaml """ # noqa: E501 import argparse import boto3 import collections import copy import datetime import hashlib import jinja2 import json import logging import multiprocessing import os import requests import shutil import subprocess import sys import tempfile import time from queue import Empty from typing import Any, Dict, Optional, Tuple, List import yaml import anyscale import anyscale.conf from anyscale.api import instantiate_api_client from anyscale.controllers.session_controller import SessionController from anyscale.sdk.anyscale_client.sdk import AnyscaleSDK logger = logging.getLogger() logger.setLevel(logging.INFO) handler = logging.StreamHandler(stream=sys.stdout) formatter = logging.Formatter(fmt="[%(levelname)s %(asctime)s] " "%(filename)s: %(lineno)d " "%(message)s") handler.setFormatter(formatter) logger.addHandler(handler) def getenv_default(key: str, default: Optional[str] = None): """Return environment variable with default value""" # If the environment variable is set but "", still return default return os.environ.get(key, None) or default GLOBAL_CONFIG = { "ANYSCALE_USER": getenv_default("ANYSCALE_USER", "[email protected]"), "ANYSCALE_HOST": getenv_default("ANYSCALE_HOST", "https://beta.anyscale.com"), "ANYSCALE_CLI_TOKEN": getenv_default("ANYSCALE_CLI_TOKEN"), "ANYSCALE_CLOUD_ID": getenv_default( "ANYSCALE_CLOUD_ID", "cld_4F7k8814aZzGG8TNUGPKnc"), # cld_4F7k8814aZzGG8TNUGPKnc "ANYSCALE_PROJECT": getenv_default("ANYSCALE_PROJECT", ""), "RAY_VERSION": getenv_default("RAY_VERSION", "2.0.0.dev0"), "RAY_REPO": getenv_default("RAY_REPO", "https://github.com/ray-project/ray.git"), "RAY_BRANCH": getenv_default("RAY_BRANCH", "master"), "RELEASE_AWS_BUCKET": getenv_default("RELEASE_AWS_BUCKET", "ray-release-automation-results"), "RELEASE_AWS_LOCATION": getenv_default("RELEASE_AWS_LOCATION", "dev"), "RELEASE_AWS_DB_NAME": getenv_default("RELEASE_AWS_DB_NAME", "ray_ci"), "RELEASE_AWS_DB_TABLE": getenv_default("RELEASE_AWS_DB_TABLE", "release_test_result"), "RELEASE_AWS_DB_SECRET_ARN": getenv_default( "RELEASE_AWS_DB_SECRET_ARN", "arn:aws:secretsmanager:us-west-2:029272617770:secret:" "rds-db-credentials/cluster-7RB7EYTTBK2EUC3MMTONYRBJLE/ray_ci-MQN2hh", ), "RELEASE_AWS_DB_RESOURCE_ARN": getenv_default( "RELEASE_AWS_DB_RESOURCE_ARN", "arn:aws:rds:us-west-2:029272617770:cluster:ci-reporting", ), "RELEASE_RESULTS_DIR": getenv_default("RELEASE_RESULTS_DIR", "/tmp/ray_release_test_artifacts"), "DATESTAMP": str(datetime.datetime.now().strftime("%Y%m%d")), "TIMESTAMP": str(int(datetime.datetime.now().timestamp())), "EXPIRATION_1D": str((datetime.datetime.now() + datetime.timedelta(days=1)).strftime("%Y-%m-%d")), "EXPIRATION_2D": str((datetime.datetime.now() + datetime.timedelta(days=2)).strftime("%Y-%m-%d")), "EXPIRATION_3D": str((datetime.datetime.now() + datetime.timedelta(days=3)).strftime("%Y-%m-%d")), } REPORT_S = 30 RETRY_MULTIPLIER = 2 def exponential_backoff_retry(f, retry_exceptions, initial_retry_delay_s, max_retries): retry_cnt = 0 retry_delay_s = initial_retry_delay_s while True: try: return f() except retry_exceptions as e: retry_cnt += 1 if retry_cnt > max_retries: raise logger.info(f"Retry function call failed due to {e} " f"in {retry_delay_s} seconds...") time.sleep(retry_delay_s) retry_delay_s *= RETRY_MULTIPLIER def maybe_fetch_api_token(): if GLOBAL_CONFIG["ANYSCALE_CLI_TOKEN"] is None: logger.info( "Missing ANYSCALE_CLI_TOKEN, retrieving from AWS secrets store") # NOTE(simon) This should automatically retrieve # [email protected]'s anyscale token GLOBAL_CONFIG["ANYSCALE_CLI_TOKEN"] = boto3.client( "secretsmanager", region_name="us-west-2" ).get_secret_value( SecretId="arn:aws:secretsmanager:us-west-2:029272617770:secret:" "release-automation/" "anyscale-token20210505220406333800000001-BcUuKB")["SecretString"] class PrepareCommandRuntimeError(RuntimeError): pass class ReleaseTestTimeoutError(RuntimeError): pass class SessionTimeoutError(ReleaseTestTimeoutError): pass class FileSyncTimeoutError(ReleaseTestTimeoutError): pass class CommandTimeoutError(ReleaseTestTimeoutError): pass class PrepareCommandTimeoutError(ReleaseTestTimeoutError): pass # e.g., App config failure. class AppConfigBuildFailure(RuntimeError): pass class State: def __init__(self, state: str, timestamp: float, data: Any): self.state = state self.timestamp = timestamp self.data = data sys.path.insert(0, anyscale.ANYSCALE_RAY_DIR) def anyscale_project_url(project_id: str): return f"{GLOBAL_CONFIG['ANYSCALE_HOST']}" \ f"/o/anyscale-internal/projects/{project_id}" \ f"/?tab=session-list" def anyscale_session_url(project_id: str, session_id: str): return f"{GLOBAL_CONFIG['ANYSCALE_HOST']}" \ f"/o/anyscale-internal/projects/{project_id}" \ f"/clusters/{session_id}" def anyscale_compute_tpl_url(compute_tpl_id: str): return f"{GLOBAL_CONFIG['ANYSCALE_HOST']}" \ f"/o/anyscale-internal/configurations/cluster-computes" \ f"/{compute_tpl_id}" def anyscale_app_config_build_url(build_id: str): return f"{GLOBAL_CONFIG['ANYSCALE_HOST']}" \ f"/o/anyscale-internal/configurations/app-config-details" \ f"/{build_id}" def wheel_url(ray_version, git_branch, git_commit): return f"https://s3-us-west-2.amazonaws.com/ray-wheels/" \ f"{git_branch}/{git_commit}/" \ f"ray-{ray_version}-cp37-cp37m-manylinux2014_x86_64.whl" def wheel_exists(ray_version, git_branch, git_commit): url = wheel_url(ray_version, git_branch, git_commit) return requests.head(url).status_code == 200 def get_latest_commits(repo: str, branch: str = "master") -> List[str]: cur = os.getcwd() with tempfile.TemporaryDirectory() as tmpdir: os.chdir(tmpdir) clone_cmd = [ "git", "clone", "--filter=tree:0", "--no-checkout", # "--single-branch", # "--depth=10", f"--branch={branch}", repo, tmpdir, ] log_cmd = [ "git", "log", "-n", "10", "--pretty=format:%H", ] subprocess.check_output(clone_cmd) commits = subprocess.check_output(log_cmd).decode( sys.stdout.encoding).split("\n") os.chdir(cur) return commits def find_ray_wheels(repo: str, branch: str, version: str): url = None commits = get_latest_commits(repo, branch) logger.info(f"Latest 10 commits for branch {branch}: {commits}") for commit in commits: if wheel_exists(version, branch, commit): url = wheel_url(version, branch, commit) os.environ["RAY_WHEELS"] = url os.environ["RAY_COMMIT"] = commit logger.info( f"Found wheels URL for Ray {version}, branch {branch}: " f"{url}") break return url def populate_wheels_sanity_check(commit: Optional[str] = None): if not commit: cmd = ("python -c 'import ray; print(" "\"No commit sanity check available, but this is the " "Ray wheel commit:\", ray.__commit__)'") else: cmd = (f"python -c 'import ray; " f"assert ray.__commit__ == \"{commit}\", ray.__commit__'") os.environ["RAY_WHEELS_SANITY_CHECK"] = cmd def _check_stop(stop_event: multiprocessing.Event, timeout_type: str): if stop_event.is_set(): if timeout_type == "prepare_command": raise PrepareCommandTimeoutError( "Process timed out in the prepare command stage.") if timeout_type == "command": raise CommandTimeoutError( "Process timed out while running a command.") elif timeout_type == "file_sync": raise FileSyncTimeoutError( "Process timed out while syncing files.") elif timeout_type == "session": raise SessionTimeoutError( "Process timed out while starting a session.") else: assert False, "Unexpected timeout type." def _deep_update(d, u): for k, v in u.items(): if isinstance(v, collections.abc.Mapping): d[k] = _deep_update(d.get(k, {}), v) else: d[k] = v return d def _dict_hash(dt: Dict[Any, Any]) -> str: json_str = json.dumps(dt, sort_keys=True, ensure_ascii=True) sha = hashlib.sha256() sha.update(json_str.encode()) return sha.hexdigest() def _load_config(local_dir: str, config_file: Optional[str]) -> Optional[Dict]: if not config_file: return None config_path = os.path.join(local_dir, config_file) with open(config_path, "rt") as f: # Todo: jinja2 render content = f.read() env = copy.deepcopy(os.environ) env.update(GLOBAL_CONFIG) content = jinja2.Template(content).render(env=env) return yaml.safe_load(content) def has_errored(result: Dict[Any, Any]) -> bool: return result.get("status", "invalid") != "finished" def report_result(test_suite: str, test_name: str, status: str, last_logs: str, results: Dict[Any, Any], artifacts: Dict[Any, Any], category: str): now = datetime.datetime.utcnow() rds_data_client = boto3.client("rds-data", region_name="us-west-2") schema = GLOBAL_CONFIG["RELEASE_AWS_DB_TABLE"] sql = ( f"INSERT INTO {schema} " f"(created_on, test_suite, test_name, status, last_logs, " f"results, artifacts, category) " f"VALUES (:created_on, :test_suite, :test_name, :status, :last_logs, " f":results, :artifacts, :category)") parameters = [{ "name": "created_on", "typeHint": "TIMESTAMP", "value": { "stringValue": now.strftime("%Y-%m-%d %H:%M:%S") }, }, { "name": "test_suite", "value": { "stringValue": test_suite } }, { "name": "test_name", "value": { "stringValue": test_name } }, { "name": "status", "value": { "stringValue": status } }, { "name": "last_logs", "value": { "stringValue": last_logs } }, { "name": "results", "typeHint": "JSON", "value": { "stringValue": json.dumps(results) }, }, { "name": "artifacts", "typeHint": "JSON", "value": { "stringValue": json.dumps(artifacts) }, }, { "name": "category", "value": { "stringValue": category } }] # Default boto3 call timeout is 45 seconds. retry_delay_s = 64 MAX_RDS_RETRY = 3 exponential_backoff_retry( lambda: rds_data_client.execute_statement( database=GLOBAL_CONFIG["RELEASE_AWS_DB_NAME"], parameters=parameters, secretArn=GLOBAL_CONFIG["RELEASE_AWS_DB_SECRET_ARN"], resourceArn=GLOBAL_CONFIG["RELEASE_AWS_DB_RESOURCE_ARN"], schema=schema, sql=sql), retry_exceptions=rds_data_client.exceptions.StatementTimeoutException, initial_retry_delay_s=retry_delay_s, max_retries=MAX_RDS_RETRY) logger.info("Result has been persisted to the databse") def log_results_and_artifacts(result: Dict): results = result.get("results", {}) if results: msg = "Observed the following results:\n\n" for key, val in results.items(): msg += f" {key} = {val}\n" else: msg = "Did not find any results." logger.info(msg) artifacts = result.get("artifacts", {}) if artifacts: msg = "Saved the following artifacts:\n\n" for key, val in artifacts.items(): msg += f" {key} = {val}\n" else: msg = "Did not find any artifacts." logger.info(msg) def _cleanup_session(sdk: AnyscaleSDK, session_id: str): if session_id: # Just trigger a request. No need to wait until session shutdown. sdk.terminate_session( session_id=session_id, terminate_session_options={}) def search_running_session(sdk: AnyscaleSDK, project_id: str, session_name: str) -> Optional[str]: session_id = None logger.info(f"Looking for existing session with name {session_name}") result = sdk.search_sessions( project_id=project_id, sessions_query=dict(name=dict(equals=session_name))) if len(result.results) > 0 and result.results[0].state == "Running": logger.info("Found existing session.") session_id = result.results[0].id return session_id def create_or_find_compute_template( sdk: AnyscaleSDK, project_id: str, compute_tpl: Dict[Any, Any], _repeat: bool = True) -> Tuple[Optional[str], Optional[str]]: compute_tpl_id = None compute_tpl_name = None if compute_tpl: # As of Anyscale 0.4.1, it is an error to use the same compute template # name within the same organization, between different projects. compute_tpl_name = f"{project_id}/compute/{_dict_hash(compute_tpl)}" logger.info(f"Tests uses compute template " f"with name {compute_tpl_name}. Looking up existing " f"templates.") paging_token = None while not compute_tpl_id: result = sdk.search_compute_templates( dict( project_id=project_id, name=dict(equals=compute_tpl_name), include_anonymous=True), paging_token=paging_token) paging_token = result.metadata.next_paging_token for res in result.results: if res.name == compute_tpl_name: compute_tpl_id = res.id logger.info( f"Template already exists with ID {compute_tpl_id}") break if not paging_token: break if not compute_tpl_id: logger.info(f"Compute template not found. " f"Creating with name {compute_tpl_name}.") try: result = sdk.create_compute_template( dict( name=compute_tpl_name, project_id=project_id, config=compute_tpl)) compute_tpl_id = result.result.id except Exception as e: if _repeat: logger.warning( f"Got exception when trying to create compute " f"template: {e}. Sleeping for 10 seconds and then " f"try again once...") time.sleep(10) return create_or_find_compute_template( sdk=sdk, project_id=project_id, compute_tpl=compute_tpl, _repeat=False) raise e logger.info(f"Compute template created with ID {compute_tpl_id}") return compute_tpl_id, compute_tpl_name def create_or_find_app_config( sdk: AnyscaleSDK, project_id: str, app_config: Dict[Any, Any], _repeat: bool = True) -> Tuple[Optional[str], Optional[str]]: app_config_id = None app_config_name = None if app_config: app_config_name = f"{project_id}-{_dict_hash(app_config)}" logger.info(f"Test uses an app config with hash {app_config_name}. " f"Looking up existing app configs with this name.") paging_token = None while not app_config_id: result = sdk.list_app_configs( project_id=project_id, count=50, paging_token=paging_token) paging_token = result.metadata.next_paging_token for res in result.results: if res.name == app_config_name: app_config_id = res.id logger.info( f"App config already exists with ID {app_config_id}") break if not paging_token or app_config_id: break if not app_config_id: logger.info("App config not found. Creating new one.") try: result = sdk.create_app_config( dict( name=app_config_name, project_id=project_id, config_json=app_config)) app_config_id = result.result.id except Exception as e: if _repeat: logger.warning( f"Got exception when trying to create app " f"config: {e}. Sleeping for 10 seconds and then " f"try again once...") time.sleep(10) return create_or_find_app_config( sdk=sdk, project_id=project_id, app_config=app_config, _repeat=False) raise e logger.info(f"App config created with ID {app_config_id}") return app_config_id, app_config_name def install_app_config_packages(app_config: Dict[Any, Any]): os.environ.update(app_config.get("env_vars", {})) packages = app_config["python"]["pip_packages"] for package in packages: subprocess.check_output(["pip", "install", "-U", package], text=True) def install_matching_ray(): wheel = os.environ.get("RAY_WHEELS", None) if not wheel: return assert "manylinux2014_x86_64" in wheel, wheel if sys.platform == "darwin": platform = "macosx_10_15_intel" elif sys.platform == "win32": platform = "win_amd64" else: platform = "manylinux2014_x86_64" wheel = wheel.replace("manylinux2014_x86_64", platform) subprocess.check_output(["pip", "uninstall", "-y", "ray"], text=True) subprocess.check_output(["pip", "install", "-U", wheel], text=True) def wait_for_build_or_raise(sdk: AnyscaleSDK, app_config_id: Optional[str]) -> Optional[str]: if not app_config_id: return None # Fetch build build_id = None last_status = None result = sdk.list_builds(app_config_id) for build in sorted(result.results, key=lambda b: b.created_at): build_id = build.id last_status = build.status if build.status == "failed": continue if build.status == "succeeded": logger.info(f"Link to app config build: " f"{anyscale_app_config_build_url(build_id)}") return build_id if last_status == "failed": raise AppConfigBuildFailure("App config build failed.") if not build_id: raise AppConfigBuildFailure("No build found for app config.") # Build found but not failed/finished yet completed = False start_wait = time.time() next_report = start_wait + REPORT_S logger.info(f"Waiting for build {build_id} to finish...") logger.info(f"Track progress here: " f"{anyscale_app_config_build_url(build_id)}") while not completed: now = time.time() if now > next_report: logger.info(f"... still waiting for build {build_id} to finish " f"({int(now - start_wait)} seconds) ...") next_report = next_report + REPORT_S result = sdk.get_build(build_id) build = result.result if build.status == "failed": raise AppConfigBuildFailure( f"App config build failed. Please see " f"{anyscale_app_config_build_url(build_id)} for details") if build.status == "succeeded": logger.info("Build succeeded.") return build_id completed = build.status not in ["in_progress", "pending"] if completed: raise AppConfigBuildFailure( f"Unknown build status: {build.status}. Please see " f"{anyscale_app_config_build_url(build_id)} for details") time.sleep(1) return build_id def run_job(cluster_name: str, compute_tpl_name: str, cluster_env_name: str, job_name: str, min_workers: str, script: str, script_args: List[str], env_vars: Dict[str, str], autosuspend: int) -> Tuple[int, str]: # Start cluster and job address = f"anyscale://{cluster_name}?autosuspend={autosuspend}" logger.info(f"Starting job {job_name} with Ray address: {address}") env = copy.deepcopy(os.environ) env.update(GLOBAL_CONFIG) env.update(env_vars) env["RAY_ADDRESS"] = address env["RAY_JOB_NAME"] = job_name env["RAY_RELEASE_MIN_WORKERS"] = str(min_workers) proc = subprocess.Popen( script.split(" ") + script_args, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True) proc.stdout.reconfigure(line_buffering=True) logs = "" for line in proc.stdout: logs += line sys.stdout.write(line) proc.wait() return proc.returncode, logs def create_and_wait_for_session( sdk: AnyscaleSDK, stop_event: multiprocessing.Event, session_name: str, session_options: Dict[Any, Any], ) -> str: # Create session logger.info(f"Creating session {session_name}") result = sdk.create_session(session_options) session_id = result.result.id # Trigger session start logger.info(f"Starting session {session_name} ({session_id})") session_url = anyscale_session_url( project_id=GLOBAL_CONFIG["ANYSCALE_PROJECT"], session_id=session_id) logger.info(f"Link to session: {session_url}") result = sdk.start_session(session_id, start_session_options={}) sop_id = result.result.id completed = result.result.completed # Wait for session logger.info(f"Waiting for session {session_name}...") start_wait = time.time() next_report = start_wait + REPORT_S while not completed: # Sleep 1 sec before next check. time.sleep(1) session_operation_response = sdk.get_session_operation( sop_id, _request_timeout=30) session_operation = session_operation_response.result completed = session_operation.completed _check_stop(stop_event, "session") now = time.time() if now > next_report: logger.info(f"... still waiting for session {session_name} " f"({int(now - start_wait)} seconds) ...") next_report = next_report + REPORT_S return session_id def run_session_command(sdk: AnyscaleSDK, session_id: str, cmd_to_run: str, result_queue: multiprocessing.Queue, env_vars: Dict[str, str], state_str: str = "CMD_RUN") -> Tuple[str, int]: full_cmd = " ".join(f"{k}={v}" for k, v in env_vars.items()) + " " + cmd_to_run logger.info(f"Running command in session {session_id}: \n" f"{full_cmd}") session_url = anyscale_session_url( project_id=GLOBAL_CONFIG["ANYSCALE_PROJECT"], session_id=session_id) logger.info(f"Link to session: {session_url}") result_queue.put(State(state_str, time.time(), None)) result = sdk.create_session_command( dict(session_id=session_id, shell_command=full_cmd)) scd_id = result.result.id return scd_id, result def wait_for_session_command_to_complete(create_session_command_result, sdk: AnyscaleSDK, scd_id: str, stop_event: multiprocessing.Event, state_str: str = "CMD_RUN"): result = create_session_command_result completed = result.result.finished_at is not None start_wait = time.time() next_report = start_wait + REPORT_S while not completed: # Sleep 1 sec before next check. time.sleep(1) result = exponential_backoff_retry( lambda: sdk.get_session_command(session_command_id=scd_id), retry_exceptions=Exception, initial_retry_delay_s=10, max_retries=3) completed = result.result.finished_at if state_str == "CMD_RUN": _check_stop(stop_event, "command") elif state_str == "CMD_PREPARE": _check_stop(stop_event, "prepare_command") now = time.time() if now > next_report: logger.info(f"... still waiting for command to finish " f"({int(now - start_wait)} seconds) ...") next_report = next_report + REPORT_S status_code = result.result.status_code runtime = time.time() - start_wait if status_code != 0: if state_str == "CMD_RUN": raise RuntimeError( f"Command returned non-success status: {status_code}") elif state_str == "CMD_PREPARE": raise PrepareCommandRuntimeError( f"Prepare command returned non-success status: {status_code}") return status_code, runtime def get_command_logs(session_controller: SessionController, scd_id: str, lines: int = 50): result = exponential_backoff_retry( lambda: session_controller.api_client.get_execution_logs_api_v2_session_commands_session_command_id_execution_logs_get( # noqa: E501 session_command_id=scd_id, start_line=-1 * lines, end_line=0), retry_exceptions=Exception, initial_retry_delay_s=10, max_retries=3) return result.result.lines def get_remote_json_content( temp_dir: str, session_name: str, remote_file: Optional[str], session_controller: SessionController, ): if not remote_file: logger.warning("No remote file specified, returning empty dict") return {} local_target_file = os.path.join(temp_dir, ".tmp.json") session_controller.pull( session_name=session_name, source=remote_file, target=local_target_file) with open(local_target_file, "rt") as f: return json.load(f) def get_local_json_content(local_file: Optional[str], ): if not local_file: logger.warning("No local file specified, returning empty dict") return {} with open(local_file, "rt") as f: return json.load(f) def pull_artifacts_and_store_in_cloud( temp_dir: str, logs: str, session_name: str, test_name: str, artifacts: Optional[Dict[Any, Any]], session_controller: SessionController, ): output_log_file = os.path.join(temp_dir, "output.log") with open(output_log_file, "wt") as f: f.write(logs) bucket = GLOBAL_CONFIG["RELEASE_AWS_BUCKET"] location = f"{GLOBAL_CONFIG['RELEASE_AWS_LOCATION']}" \ f"/{session_name}/{test_name}" saved_artifacts = {} s3_client = boto3.client("s3") s3_client.upload_file(output_log_file, bucket, f"{location}/output.log") saved_artifacts["output.log"] = f"s3://{bucket}/{location}/output.log" # Download artifacts if artifacts: for name, remote_file in artifacts.items(): logger.info(f"Downloading artifact `{name}` from " f"{remote_file}") local_target_file = os.path.join(temp_dir, name) session_controller.pull( session_name=session_name, source=remote_file, target=local_target_file) # Upload artifacts to s3 s3_client.upload_file(local_target_file, bucket, f"{location}/{name}") saved_artifacts[name] = f"s3://{bucket}/{location}/{name}" return saved_artifacts def find_session_by_test_name( sdk: AnyscaleSDK, session_controller: SessionController, temp_dir: str, state_json: str, project_id: str, test_name: str, ) -> Optional[Tuple[str, str, Dict[Any, Any]]]: paging_token = None while True: # Will break if paging_token is None after first search result = sdk.search_sessions( project_id=project_id, sessions_query=dict( name=dict(contains=test_name), state_filter=["Running"], paging=dict(count=20, paging_token=paging_token))) for session in result.results: logger.info(f"Found sessions {session.name}") if not session.name.startswith(test_name): continue try: session_state = get_remote_json_content( temp_dir=temp_dir, session_name=session.name, remote_file=state_json, session_controller=session_controller) except Exception as exc: raise RuntimeError(f"Could not get remote json content " f"for session {session.name}") from exc if session_state.get("test_name") == test_name: return session.id, session.name, session_state session_token = result.metadata.next_paging_token if not session_token: return None def get_latest_running_command_id(sdk: AnyscaleSDK, session_id: str ) -> Tuple[Optional[str], Optional[bool]]: scd_id = None paging_token = None success = None while not scd_id: result = sdk.list_session_commands( session_id=session_id, paging_token=paging_token) paging_token = result.metadata.next_paging_token for cmd in result.results: if not scd_id: scd_id = cmd.id completed = cmd.finished_at is not None if completed: if success is None: success = True success = success and cmd.status_code == 0 if not completed: return cmd.id, None return scd_id, success or False def run_test_config( local_dir: str, project_id: str, test_name: str, test_config: Dict[Any, Any], commit_url: str, session_name: str = None, smoke_test: bool = False, no_terminate: bool = False, kick_off_only: bool = False, check_progress: bool = False, upload_artifacts: bool = True, keep_results_dir: bool = False, app_config_id_override: Optional[str] = None, ) -> Dict[Any, Any]: """ Returns: Dict with the following entries: status (str): One of [finished, error, timeout] command_link (str): Link to command (Anyscale web UI) last_logs (str): Last logs (excerpt) to send to owner artifacts (dict): Dict of artifacts Key: Name Value: S3 URL """ # Todo (mid-term): Support other cluster definitions # (not only cluster configs) cluster_config_rel_path = test_config["cluster"].get( "cluster_config", None) cluster_config = _load_config(local_dir, cluster_config_rel_path) app_config_rel_path = test_config["cluster"].get("app_config", None) app_config = _load_config(local_dir, app_config_rel_path) compute_tpl_rel_path = test_config["cluster"].get("compute_template", None) compute_tpl = _load_config(local_dir, compute_tpl_rel_path) stop_event = multiprocessing.Event() result_queue = multiprocessing.Queue() if not session_name: session_name = f"{test_name}_{int(time.time())}" temp_dir = tempfile.mkdtemp() # Result and state files results_json = test_config["run"].get("results", None) if results_json is None: results_json = "/tmp/release_test_out.json" state_json = test_config["run"].get("state", None) if state_json is None: state_json = "/tmp/release_test_state.json" env_vars = { "RAY_ADDRESS": os.environ.get("RAY_ADDRESS", "auto"), "TEST_OUTPUT_JSON": results_json, "TEST_STATE_JSON": state_json, "IS_SMOKE_TEST": "1" if smoke_test else "0", } with open(os.path.join(local_dir, ".anyscale.yaml"), "wt") as f: f.write(f"project_id: {project_id}") os.chdir(local_dir) # Setup interface # Unfortunately, there currently seems to be no great way to # transfer files with the Anyscale SDK. # So we use the session controller instead. sdk = AnyscaleSDK(auth_token=GLOBAL_CONFIG["ANYSCALE_CLI_TOKEN"]) session_controller = SessionController( api_client=instantiate_api_client( cli_token=GLOBAL_CONFIG["ANYSCALE_CLI_TOKEN"], host=GLOBAL_CONFIG["ANYSCALE_HOST"], ), anyscale_api_client=sdk.api_client, ) timeout = test_config["run"].get("timeout", 1800) if "RELEASE_OVERRIDE_TIMEOUT" in os.environ: previous_timeout = timeout timeout = int(os.environ.get("RELEASE_OVERRIDE_TIMEOUT", str(timeout))) logger.warning(f"Release test timeout override: {timeout} " f"(would have been {previous_timeout})") # If a test is long running, timeout does not mean it failed is_long_running = test_config["run"].get("long_running", False) build_id_override = None if test_config["run"].get("use_connect"): autosuspend_mins = test_config["run"].get("autosuspend_mins", 5) assert not kick_off_only, \ "Unsupported for running with Anyscale connect." if app_config_id_override is not None: logger.info( "Using connect and an app config override, waiting until " "build finishes so we can fetch the app config in order to " "install its pip packages locally.") build_id_override = wait_for_build_or_raise( sdk, app_config_id_override) response = sdk.get_cluster_environment_build(build_id_override) app_config = response.result.config_json install_app_config_packages(app_config) install_matching_ray() elif "autosuspend_mins" in test_config["run"]: raise ValueError( "'autosuspend_mins' is only supported if 'use_connect' is True.") # Add information to results dict def _update_results(results: Dict): if "last_update" in results: results["last_update_diff"] = time.time() - results["last_update"] if smoke_test: results["smoke_test"] = True def _process_finished_command(session_controller: SessionController, scd_id: str, results: Optional[Dict] = None, runtime: int = None, commit_url: str = None, session_url: str = None): logger.info("Command finished successfully.") if results_json: results = results or get_remote_json_content( temp_dir=temp_dir, session_name=session_name, remote_file=results_json, session_controller=session_controller, ) else: results = {"passed": 1} _update_results(results) if scd_id: logs = get_command_logs(session_controller, scd_id, test_config.get("log_lines", 50)) else: logs = "No command found to fetch logs for" if upload_artifacts: saved_artifacts = pull_artifacts_and_store_in_cloud( temp_dir=temp_dir, logs=logs, # Also save logs in cloud session_name=session_name, test_name=test_name, artifacts=test_config.get("artifacts", {}), session_controller=session_controller, ) logger.info("Fetched results and stored on the cloud. Returning.") else: saved_artifacts = {} logger.info("Usually I would have fetched the results and " "artifacts and stored them on S3.") # Add these metadata here to avoid changing SQL schema. results["_runtime"] = runtime results["_session_url"] = session_url results["_commit_url"] = commit_url results["_stable"] = test_config.get("stable", True) result_queue.put( State( "END", time.time(), { "status": "finished", "last_logs": logs, "results": results, "artifacts": saved_artifacts, }, )) # When running the test script in client mode, the finish command is a # completed local process. def _process_finished_client_command(returncode: int, logs: str): if upload_artifacts: saved_artifacts = pull_artifacts_and_store_in_cloud( temp_dir=temp_dir, logs=logs, # Also save logs in cloud session_name=session_name, test_name=test_name, artifacts=None, session_controller=None, ) logger.info("Stored results on the cloud. Returning.") else: saved_artifacts = {} logger.info("Usually I would have fetched the results and " "artifacts and stored them on S3.") if results_json: results = get_local_json_content(local_file=results_json, ) else: results = { "passed": int(returncode == 0), } results["returncode"] = returncode _update_results(results) result_queue.put( State( "END", time.time(), { "status": "finished", "last_logs": logs, "results": results, "artifacts": saved_artifacts, }, )) def _run(logger): # These values will be set as the test runs. session_url = None runtime = None anyscale.conf.CLI_TOKEN = GLOBAL_CONFIG["ANYSCALE_CLI_TOKEN"] test_uses_ray_connect = test_config["run"].get("use_connect") session_id = None scd_id = None try: # First, look for running sessions session_id = search_running_session(sdk, project_id, session_name) compute_tpl_name = None app_config_id = app_config_id_override app_config_name = None build_id = build_id_override if not session_id: logger.info("No session found.") # Start session session_options = dict( name=session_name, project_id=project_id) if cluster_config is not None: logging.info("Starting session with cluster config") cluster_config_str = json.dumps(cluster_config) session_options["cluster_config"] = cluster_config_str session_options["cloud_id"] = ( GLOBAL_CONFIG["ANYSCALE_CLOUD_ID"], ) session_options["uses_app_config"] = False else: logging.info("Starting session with app/compute config") # Find/create compute template compute_tpl_id, compute_tpl_name = \ create_or_find_compute_template( sdk, project_id, compute_tpl) logger.info(f"Link to compute template: " f"{anyscale_compute_tpl_url(compute_tpl_id)}") # Find/create app config if app_config_id is None: ( app_config_id, app_config_name, ) = create_or_find_app_config(sdk, project_id, app_config) else: logger.info( f"Using override app config {app_config_id}") app_config_name = sdk.get_app_config( app_config_id).result.name if build_id is None: # We might have already retrieved the build ID when # installing app config packages locally if using # connect, so only get the build ID if it's not set. build_id = wait_for_build_or_raise(sdk, app_config_id) session_options["compute_template_id"] = compute_tpl_id session_options["build_id"] = build_id session_options["uses_app_config"] = True # Start session session_id = create_and_wait_for_session( sdk=sdk, stop_event=stop_event, session_name=session_name, session_options=session_options, ) prepare_command = test_config["run"].get("prepare") # Write test state json test_state_file = os.path.join(local_dir, "test_state.json") with open(test_state_file, "wt") as f: json.dump({ "start_time": time.time(), "test_name": test_name }, f) if prepare_command or not test_uses_ray_connect: if test_uses_ray_connect: logger.info("Found a prepare command, so pushing it " "to the session.") # Rsync up logger.info("Syncing files to session...") session_controller.push( session_name=session_name, source=None, target=None, config=None, all_nodes=False, ) logger.info("Syncing test state to session...") session_controller.push( session_name=session_name, source=test_state_file, target=state_json, config=None, all_nodes=False, ) session_url = anyscale_session_url( project_id=GLOBAL_CONFIG["ANYSCALE_PROJECT"], session_id=session_id) _check_stop(stop_event, "file_sync") # Optionally run preparation command if prepare_command: logger.info( f"Running preparation command: {prepare_command}") scd_id, result = run_session_command( sdk=sdk, session_id=session_id, cmd_to_run=prepare_command, result_queue=result_queue, env_vars=env_vars, state_str="CMD_PREPARE") _, _ = wait_for_session_command_to_complete( result, sdk=sdk, scd_id=scd_id, stop_event=stop_event, state_str="CMD_PREPARE") if test_uses_ray_connect: script_args = test_config["run"].get("args", []) if smoke_test: script_args += ["--smoke-test"] min_workers = 0 for node_type in compute_tpl["worker_node_types"]: min_workers += node_type["min_workers"] # Build completed, use job timeout result_queue.put(State("CMD_RUN", time.time(), None)) returncode, logs = run_job( cluster_name=session_name, compute_tpl_name=compute_tpl_name, cluster_env_name=app_config_name, job_name=session_name, min_workers=min_workers, script=test_config["run"]["script"], script_args=script_args, env_vars=env_vars, autosuspend=autosuspend_mins) _process_finished_client_command(returncode, logs) return # Run release test command cmd_to_run = test_config["run"]["script"] + " " args = test_config["run"].get("args", []) if args: cmd_to_run += " ".join(args) + " " if smoke_test: cmd_to_run += " --smoke-test" scd_id, result = run_session_command( sdk=sdk, session_id=session_id, cmd_to_run=cmd_to_run, result_queue=result_queue, env_vars=env_vars, state_str="CMD_RUN") if not kick_off_only: _, runtime = wait_for_session_command_to_complete( result, sdk=sdk, scd_id=scd_id, stop_event=stop_event, state_str="CMD_RUN") _process_finished_command( session_controller=session_controller, scd_id=scd_id, runtime=runtime, session_url=session_url, commit_url=commit_url) else: result_queue.put( State("END", time.time(), { "status": "kickoff", "last_logs": "" })) except (ReleaseTestTimeoutError, Exception) as e: logger.error(e, exc_info=True) logs = str(e) if scd_id is not None: try: logs = logs + "; Command logs:" + get_command_logs( session_controller, scd_id, test_config.get("log_lines", 50)) except Exception as e2: logger.error(e2, exc_info=True) # Long running tests are "finished" successfully when # timed out if isinstance(e, ReleaseTestTimeoutError) and is_long_running: _process_finished_command( session_controller=session_controller, scd_id=scd_id) else: timeout_type = "" runtime = None if isinstance(e, CommandTimeoutError): timeout_type = "timeout" runtime = 0 elif (isinstance(e, PrepareCommandTimeoutError) or isinstance(e, FileSyncTimeoutError) or isinstance(e, SessionTimeoutError) or isinstance(e, PrepareCommandRuntimeError) or isinstance(e, AppConfigBuildFailure)): timeout_type = "infra_timeout" runtime = None elif isinstance(e, RuntimeError): timeout_type = "runtime_error" runtime = 0 else: timeout_type = "unknown timeout" runtime = None # Add these metadata here to avoid changing SQL schema. results = {} results["_runtime"] = runtime results["_session_url"] = session_url results["_commit_url"] = commit_url results["_stable"] = test_config.get("stable", True) result_queue.put( State( "END", time.time(), { "status": timeout_type, "last_logs": logs, "results": results })) finally: if no_terminate: logger.warning( "`no_terminate` is set to True, so the session will " "*not* be terminated!") else: _cleanup_session(sdk, session_id) def _check_progress(logger): anyscale.conf.CLI_TOKEN = GLOBAL_CONFIG["ANYSCALE_CLI_TOKEN"] should_terminate = False session_id = None scd_id = None try: existing_session = find_session_by_test_name( sdk=sdk, session_controller=session_controller, temp_dir=temp_dir, state_json=state_json, project_id=project_id, test_name=test_name) if existing_session is None: logger.info(f"Found no existing session for {test_name}") result_queue.put( State("END", time.time(), { "status": "nosession", "last_logs": "" })) return session_id, session_name, session_state = existing_session logger.info(f"Found existing session for {test_name}: " f"{session_name}") scd_id, success = get_latest_running_command_id( sdk=sdk, session_id=session_id) latest_result = get_remote_json_content( temp_dir=temp_dir, session_name=session_name, remote_file=results_json, session_controller=session_controller, ) # Fetch result json and check if it has been updated recently result_time_key = test_config["run"].get("time_key", None) maximum_update_delay = test_config["run"].get( "max_update_delay", None) if result_time_key and maximum_update_delay: last_update = latest_result.get(result_time_key, None) if not last_update: result_queue.put( State( "END", time.time(), { "status": "error", "last_logs": f"Test did not store " f"{result_time_key} in the " f"results json." })) return delay = time.time() - last_update logger.info(f"Last update was at {last_update:.2f}. " f"This was {delay:.2f} seconds ago " f"(maximum allowed: {maximum_update_delay})") if delay > maximum_update_delay: raise RuntimeError( f"Test did not update the results json within " f"the last {maximum_update_delay} seconds.") if time.time() - session_state["start_time"] > timeout: # Long running test reached timeout logger.info( f"Test command reached timeout after {timeout} seconds") _process_finished_command( session_controller=session_controller, scd_id=scd_id, results=latest_result) should_terminate = True elif success: logger.info("All commands finished.") _process_finished_command( session_controller=session_controller, scd_id=scd_id, results=latest_result) should_terminate = True else: rest_time = timeout - time.time() + session_state["start_time"] logger.info(f"Test command should continue running " f"for {rest_time} seconds") result_queue.put( State("END", time.time(), { "status": "kickoff", "last_logs": "Test is still running" })) except Exception as e: logger.error(e, exc_info=True) logs = str(e) if scd_id is not None: try: logs = get_command_logs(session_controller, scd_id, test_config.get("log_lines", 50)) logs += f"\n{str(e)}" except Exception as e2: logger.error(e2, exc_info=True) result_queue.put( State("END", time.time(), { "status": "error", "last_logs": logs })) should_terminate = True finally: if should_terminate: logger.warning("Terminating session") _cleanup_session(sdk, session_id) if not check_progress: process = multiprocessing.Process(target=_run, args=(logger, )) else: process = multiprocessing.Process( target=_check_progress, args=(logger, )) build_timeout = test_config["run"].get("build_timeout", 1800) project_url = anyscale_project_url( project_id=GLOBAL_CONFIG["ANYSCALE_PROJECT"]) logger.info(f"Link to project: {project_url}") msg = f"This will now run test {test_name}." if smoke_test: msg += " This is a smoke test." if is_long_running: msg += " This is a long running test." logger.info(msg) logger.info(f"Starting process with timeout {timeout} " f"(build timeout {build_timeout})") process.start() # The timeout time will be updated after the build finished # Build = App config + compute template build and session start timeout_time = time.time() + build_timeout result = {} while process.is_alive(): try: state: State = result_queue.get(timeout=1) except (Empty, TimeoutError): if time.time() > timeout_time: stop_event.set() logger.warning("Process timed out.") if not is_long_running: logger.warning("Terminating process in 10 seconds.") time.sleep(10) logger.warning("Terminating process now.") process.terminate() else: logger.info("Process is long running. Give 2 minutes to " "fetch result and terminate.") start_terminate = time.time() while time.time( ) < start_terminate + 120 and process.is_alive(): time.sleep(1) if process.is_alive(): logger.warning("Terminating forcefully now.") process.terminate() else: logger.info("Long running results collected.") break continue if not isinstance(state, State): raise RuntimeError(f"Expected `State` object, got {result}") if state.state == "CMD_PREPARE": # Reset timeout after build finished timeout_time = state.timestamp + timeout if state.state == "CMD_RUN": # Reset timeout after prepare command or build finished timeout_time = state.timestamp + timeout elif state.state == "END": result = state.data break while not result_queue.empty(): state = result_queue.get_nowait() result = state.data logger.info("Final check if everything worked.") try: result.setdefault("status", "error (status not found)") except (TimeoutError, Empty): result = {"status": "timeout", "last_logs": "Test timed out."} logger.info(f"Final results: {result}") log_results_and_artifacts(result) if not keep_results_dir: logger.info(f"Removing results dir {temp_dir}") shutil.rmtree(temp_dir) else: # Write results.json with open(os.path.join(temp_dir, "results.json"), "wt") as fp: json.dump(result, fp) out_dir = os.path.expanduser(GLOBAL_CONFIG["RELEASE_RESULTS_DIR"]) logger.info(f"Moving results dir {temp_dir} to persistent location " f"{out_dir}") shutil.rmtree(out_dir, ignore_errors=True) shutil.copytree(temp_dir, out_dir) logger.info(f"Dir contents: {os.listdir(out_dir)}") return result def run_test(test_config_file: str, test_name: str, project_id: str, commit_url: str, category: str = "unspecified", smoke_test: bool = False, no_terminate: bool = False, kick_off_only: bool = False, check_progress: bool = False, report: bool = True, keep_results_dir: bool = False, session_name: Optional[str] = None, app_config_id_override=None) -> Dict[str, Any]: with open(test_config_file, "rt") as f: test_configs = yaml.safe_load(f) test_config_dict = {} for test_config in test_configs: name = test_config.pop("name") test_config_dict[name] = test_config if test_name not in test_config_dict: raise ValueError( f"Test with name `{test_name}` not found in test config file " f"at `{test_config_file}`.") test_config = test_config_dict[test_name] if smoke_test and "smoke_test" in test_config: smoke_test_config = test_config.pop("smoke_test") test_config = _deep_update(test_config, smoke_test_config) local_dir = os.path.dirname(test_config_file) if "local_dir" in test_config: # local_dir is relative to test_config_file local_dir = os.path.join(local_dir, test_config["local_dir"]) if test_config["run"].get("use_connect"): assert not kick_off_only, \ "--kick-off-only is unsupported when running with " \ "Anyscale connect." assert not check_progress, \ "--check is unsupported when running with Anyscale connect." if test_config.get("artifacts", {}): logger.error( "Saving artifacts are not yet supported when running with " "Anyscale connect.") result = run_test_config( local_dir, project_id, test_name, test_config, commit_url, session_name=session_name, smoke_test=smoke_test, no_terminate=no_terminate, kick_off_only=kick_off_only, check_progress=check_progress, upload_artifacts=report, keep_results_dir=keep_results_dir, app_config_id_override=app_config_id_override) status = result.get("status", "invalid") if kick_off_only: if status != "kickoff": raise RuntimeError("Error kicking off test.") logger.info("Kicked off test. It's now up to the `--check` " "part of the script to track its process.") return {} else: # `--check` or no kick off only if status == "nosession": logger.info(f"No running session found for test {test_name}, so " f"assuming everything is fine.") return {} if status == "kickoff": logger.info(f"Test {test_name} is still running.") return {} last_logs = result.get("last_logs", "No logs.") test_suite = os.path.basename(test_config_file).replace(".yaml", "") report_kwargs = dict( test_suite=test_suite, test_name=test_name, status=status, last_logs=last_logs, results=result.get("results", {}), artifacts=result.get("artifacts", {}), category=category, ) if report: report_result(**report_kwargs) else: logger.info(f"Usually I would now report the following results:\n" f"{report_kwargs}") if has_errored(result): raise RuntimeError(last_logs) return report_kwargs if __name__ == "__main__": parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument( "--test-config", type=str, required=True, help="Test config file") parser.add_argument("--test-name", type=str, help="Test name in config") parser.add_argument( "--ray-wheels", required=False, type=str, help="URL to ray wheels") parser.add_argument( "--no-terminate", action="store_true", default=False, help="Don't terminate session after failure") parser.add_argument( "--no-report", action="store_true", default=False, help="Do not report any results or upload to S3") parser.add_argument( "--kick-off-only", action="store_true", default=False, help="Kick off only (don't wait for command to finish)") parser.add_argument( "--check", action="store_true", default=False, help="Check (long running) status") parser.add_argument( "--keep-results-dir", action="store_true", default=False, help="Keep results in directory (named RELEASE_RESULTS_DIR), e.g. " "for Buildkite artifact upload.") parser.add_argument( "--category", type=str, default="unspecified", help="Category name, e.g. `release-1.3.0` (will be saved in database)") parser.add_argument( "--smoke-test", action="store_true", help="Finish quickly for testing") parser.add_argument( "--session-name", required=False, type=str, help="Name of the session to run this test.") parser.add_argument( "--app-config-id-override", required=False, type=str, help=("An app config ID, which will override the test config app " "config.")) args, _ = parser.parse_known_args() if not GLOBAL_CONFIG["ANYSCALE_PROJECT"]: raise RuntimeError( "You have to set the ANYSCALE_PROJECT environment variable!") maybe_fetch_api_token() if args.ray_wheels: os.environ["RAY_WHEELS"] = str(args.ray_wheels) url = str(args.ray_wheels) elif not args.check and not os.environ.get("RAY_WHEELS"): url = find_ray_wheels( GLOBAL_CONFIG["RAY_REPO"], GLOBAL_CONFIG["RAY_BRANCH"], GLOBAL_CONFIG["RAY_VERSION"], ) if not url: raise RuntimeError(f"Could not find wheels for " f"Ray {GLOBAL_CONFIG['RAY_VERSION']}, " f"branch {GLOBAL_CONFIG['RAY_BRANCH']}") # RAY_COMMIT is set by find_ray_wheels elif os.environ.get("RAY_WHEELS"): logger.info(f"Using Ray wheels provided from URL: " f"{os.environ.get('RAY_WHEELS')}") url = os.environ.get("RAY_WHEELS") populate_wheels_sanity_check(os.environ.get("RAY_COMMIT", "")) test_config_file = os.path.abspath(os.path.expanduser(args.test_config)) result_dict = run_test( test_config_file=test_config_file, test_name=args.test_name, project_id=GLOBAL_CONFIG["ANYSCALE_PROJECT"], commit_url=url, category=args.category, smoke_test=args.smoke_test, no_terminate=args.no_terminate or args.kick_off_only, kick_off_only=args.kick_off_only, check_progress=args.check, report=not args.no_report, session_name=args.session_name, keep_results_dir=args.keep_results_dir, app_config_id_override=args.app_config_id_override, ) if result_dict: # If we get a result dict, check if any alerts should be raised from alert import SUITE_TO_FN, default_handle_result logger.info("Checking if results are valid...") handle_result_kwargs = result_dict.copy() handle_result_kwargs["created_on"] = None test_suite = handle_result_kwargs.get("test_suite", None) test_name = handle_result_kwargs.get("test_name", None) category = handle_result_kwargs.get("category", None) handle_fn = SUITE_TO_FN.get(test_suite, None) if not handle_fn: logger.warning(f"No handle for suite {test_suite}") alert = default_handle_result(**handle_result_kwargs) else: alert = handle_fn(**handle_result_kwargs) if alert: # If we get an alert, the test failed. raise RuntimeError(alert) else: logger.info(f"No alert raised for test {test_suite}/{test_name} " f"({category}) - the test successfully passed!")
36.340059
155
0.589643
import argparse import boto3 import collections import copy import datetime import hashlib import jinja2 import json import logging import multiprocessing import os import requests import shutil import subprocess import sys import tempfile import time from queue import Empty from typing import Any, Dict, Optional, Tuple, List import yaml import anyscale import anyscale.conf from anyscale.api import instantiate_api_client from anyscale.controllers.session_controller import SessionController from anyscale.sdk.anyscale_client.sdk import AnyscaleSDK logger = logging.getLogger() logger.setLevel(logging.INFO) handler = logging.StreamHandler(stream=sys.stdout) formatter = logging.Formatter(fmt="[%(levelname)s %(asctime)s] " "%(filename)s: %(lineno)d " "%(message)s") handler.setFormatter(formatter) logger.addHandler(handler) def getenv_default(key: str, default: Optional[str] = None): return os.environ.get(key, None) or default GLOBAL_CONFIG = { "ANYSCALE_USER": getenv_default("ANYSCALE_USER", "[email protected]"), "ANYSCALE_HOST": getenv_default("ANYSCALE_HOST", "https://beta.anyscale.com"), "ANYSCALE_CLI_TOKEN": getenv_default("ANYSCALE_CLI_TOKEN"), "ANYSCALE_CLOUD_ID": getenv_default( "ANYSCALE_CLOUD_ID", "cld_4F7k8814aZzGG8TNUGPKnc"), "ANYSCALE_PROJECT": getenv_default("ANYSCALE_PROJECT", ""), "RAY_VERSION": getenv_default("RAY_VERSION", "2.0.0.dev0"), "RAY_REPO": getenv_default("RAY_REPO", "https://github.com/ray-project/ray.git"), "RAY_BRANCH": getenv_default("RAY_BRANCH", "master"), "RELEASE_AWS_BUCKET": getenv_default("RELEASE_AWS_BUCKET", "ray-release-automation-results"), "RELEASE_AWS_LOCATION": getenv_default("RELEASE_AWS_LOCATION", "dev"), "RELEASE_AWS_DB_NAME": getenv_default("RELEASE_AWS_DB_NAME", "ray_ci"), "RELEASE_AWS_DB_TABLE": getenv_default("RELEASE_AWS_DB_TABLE", "release_test_result"), "RELEASE_AWS_DB_SECRET_ARN": getenv_default( "RELEASE_AWS_DB_SECRET_ARN", "arn:aws:secretsmanager:us-west-2:029272617770:secret:" "rds-db-credentials/cluster-7RB7EYTTBK2EUC3MMTONYRBJLE/ray_ci-MQN2hh", ), "RELEASE_AWS_DB_RESOURCE_ARN": getenv_default( "RELEASE_AWS_DB_RESOURCE_ARN", "arn:aws:rds:us-west-2:029272617770:cluster:ci-reporting", ), "RELEASE_RESULTS_DIR": getenv_default("RELEASE_RESULTS_DIR", "/tmp/ray_release_test_artifacts"), "DATESTAMP": str(datetime.datetime.now().strftime("%Y%m%d")), "TIMESTAMP": str(int(datetime.datetime.now().timestamp())), "EXPIRATION_1D": str((datetime.datetime.now() + datetime.timedelta(days=1)).strftime("%Y-%m-%d")), "EXPIRATION_2D": str((datetime.datetime.now() + datetime.timedelta(days=2)).strftime("%Y-%m-%d")), "EXPIRATION_3D": str((datetime.datetime.now() + datetime.timedelta(days=3)).strftime("%Y-%m-%d")), } REPORT_S = 30 RETRY_MULTIPLIER = 2 def exponential_backoff_retry(f, retry_exceptions, initial_retry_delay_s, max_retries): retry_cnt = 0 retry_delay_s = initial_retry_delay_s while True: try: return f() except retry_exceptions as e: retry_cnt += 1 if retry_cnt > max_retries: raise logger.info(f"Retry function call failed due to {e} " f"in {retry_delay_s} seconds...") time.sleep(retry_delay_s) retry_delay_s *= RETRY_MULTIPLIER def maybe_fetch_api_token(): if GLOBAL_CONFIG["ANYSCALE_CLI_TOKEN"] is None: logger.info( "Missing ANYSCALE_CLI_TOKEN, retrieving from AWS secrets store") GLOBAL_CONFIG["ANYSCALE_CLI_TOKEN"] = boto3.client( "secretsmanager", region_name="us-west-2" ).get_secret_value( SecretId="arn:aws:secretsmanager:us-west-2:029272617770:secret:" "release-automation/" "anyscale-token20210505220406333800000001-BcUuKB")["SecretString"] class PrepareCommandRuntimeError(RuntimeError): pass class ReleaseTestTimeoutError(RuntimeError): pass class SessionTimeoutError(ReleaseTestTimeoutError): pass class FileSyncTimeoutError(ReleaseTestTimeoutError): pass class CommandTimeoutError(ReleaseTestTimeoutError): pass class PrepareCommandTimeoutError(ReleaseTestTimeoutError): pass # e.g., App config failure. class AppConfigBuildFailure(RuntimeError): pass class State: def __init__(self, state: str, timestamp: float, data: Any): self.state = state self.timestamp = timestamp self.data = data sys.path.insert(0, anyscale.ANYSCALE_RAY_DIR) def anyscale_project_url(project_id: str): return f"{GLOBAL_CONFIG['ANYSCALE_HOST']}" \ f"/o/anyscale-internal/projects/{project_id}" \ f"/?tab=session-list" def anyscale_session_url(project_id: str, session_id: str): return f"{GLOBAL_CONFIG['ANYSCALE_HOST']}" \ f"/o/anyscale-internal/projects/{project_id}" \ f"/clusters/{session_id}" def anyscale_compute_tpl_url(compute_tpl_id: str): return f"{GLOBAL_CONFIG['ANYSCALE_HOST']}" \ f"/o/anyscale-internal/configurations/cluster-computes" \ f"/{compute_tpl_id}" def anyscale_app_config_build_url(build_id: str): return f"{GLOBAL_CONFIG['ANYSCALE_HOST']}" \ f"/o/anyscale-internal/configurations/app-config-details" \ f"/{build_id}" def wheel_url(ray_version, git_branch, git_commit): return f"https://s3-us-west-2.amazonaws.com/ray-wheels/" \ f"{git_branch}/{git_commit}/" \ f"ray-{ray_version}-cp37-cp37m-manylinux2014_x86_64.whl" def wheel_exists(ray_version, git_branch, git_commit): url = wheel_url(ray_version, git_branch, git_commit) return requests.head(url).status_code == 200 def get_latest_commits(repo: str, branch: str = "master") -> List[str]: cur = os.getcwd() with tempfile.TemporaryDirectory() as tmpdir: os.chdir(tmpdir) clone_cmd = [ "git", "clone", "--filter=tree:0", "--no-checkout", # "--single-branch", # "--depth=10", f"--branch={branch}", repo, tmpdir, ] log_cmd = [ "git", "log", "-n", "10", "--pretty=format:%H", ] subprocess.check_output(clone_cmd) commits = subprocess.check_output(log_cmd).decode( sys.stdout.encoding).split("\n") os.chdir(cur) return commits def find_ray_wheels(repo: str, branch: str, version: str): url = None commits = get_latest_commits(repo, branch) logger.info(f"Latest 10 commits for branch {branch}: {commits}") for commit in commits: if wheel_exists(version, branch, commit): url = wheel_url(version, branch, commit) os.environ["RAY_WHEELS"] = url os.environ["RAY_COMMIT"] = commit logger.info( f"Found wheels URL for Ray {version}, branch {branch}: " f"{url}") break return url def populate_wheels_sanity_check(commit: Optional[str] = None): if not commit: cmd = ("python -c 'import ray; print(" "\"No commit sanity check available, but this is the " "Ray wheel commit:\", ray.__commit__)'") else: cmd = (f"python -c 'import ray; " f"assert ray.__commit__ == \"{commit}\", ray.__commit__'") os.environ["RAY_WHEELS_SANITY_CHECK"] = cmd def _check_stop(stop_event: multiprocessing.Event, timeout_type: str): if stop_event.is_set(): if timeout_type == "prepare_command": raise PrepareCommandTimeoutError( "Process timed out in the prepare command stage.") if timeout_type == "command": raise CommandTimeoutError( "Process timed out while running a command.") elif timeout_type == "file_sync": raise FileSyncTimeoutError( "Process timed out while syncing files.") elif timeout_type == "session": raise SessionTimeoutError( "Process timed out while starting a session.") else: assert False, "Unexpected timeout type." def _deep_update(d, u): for k, v in u.items(): if isinstance(v, collections.abc.Mapping): d[k] = _deep_update(d.get(k, {}), v) else: d[k] = v return d def _dict_hash(dt: Dict[Any, Any]) -> str: json_str = json.dumps(dt, sort_keys=True, ensure_ascii=True) sha = hashlib.sha256() sha.update(json_str.encode()) return sha.hexdigest() def _load_config(local_dir: str, config_file: Optional[str]) -> Optional[Dict]: if not config_file: return None config_path = os.path.join(local_dir, config_file) with open(config_path, "rt") as f: # Todo: jinja2 render content = f.read() env = copy.deepcopy(os.environ) env.update(GLOBAL_CONFIG) content = jinja2.Template(content).render(env=env) return yaml.safe_load(content) def has_errored(result: Dict[Any, Any]) -> bool: return result.get("status", "invalid") != "finished" def report_result(test_suite: str, test_name: str, status: str, last_logs: str, results: Dict[Any, Any], artifacts: Dict[Any, Any], category: str): now = datetime.datetime.utcnow() rds_data_client = boto3.client("rds-data", region_name="us-west-2") schema = GLOBAL_CONFIG["RELEASE_AWS_DB_TABLE"] sql = ( f"INSERT INTO {schema} " f"(created_on, test_suite, test_name, status, last_logs, " f"results, artifacts, category) " f"VALUES (:created_on, :test_suite, :test_name, :status, :last_logs, " f":results, :artifacts, :category)") parameters = [{ "name": "created_on", "typeHint": "TIMESTAMP", "value": { "stringValue": now.strftime("%Y-%m-%d %H:%M:%S") }, }, { "name": "test_suite", "value": { "stringValue": test_suite } }, { "name": "test_name", "value": { "stringValue": test_name } }, { "name": "status", "value": { "stringValue": status } }, { "name": "last_logs", "value": { "stringValue": last_logs } }, { "name": "results", "typeHint": "JSON", "value": { "stringValue": json.dumps(results) }, }, { "name": "artifacts", "typeHint": "JSON", "value": { "stringValue": json.dumps(artifacts) }, }, { "name": "category", "value": { "stringValue": category } }] # Default boto3 call timeout is 45 seconds. retry_delay_s = 64 MAX_RDS_RETRY = 3 exponential_backoff_retry( lambda: rds_data_client.execute_statement( database=GLOBAL_CONFIG["RELEASE_AWS_DB_NAME"], parameters=parameters, secretArn=GLOBAL_CONFIG["RELEASE_AWS_DB_SECRET_ARN"], resourceArn=GLOBAL_CONFIG["RELEASE_AWS_DB_RESOURCE_ARN"], schema=schema, sql=sql), retry_exceptions=rds_data_client.exceptions.StatementTimeoutException, initial_retry_delay_s=retry_delay_s, max_retries=MAX_RDS_RETRY) logger.info("Result has been persisted to the databse") def log_results_and_artifacts(result: Dict): results = result.get("results", {}) if results: msg = "Observed the following results:\n\n" for key, val in results.items(): msg += f" {key} = {val}\n" else: msg = "Did not find any results." logger.info(msg) artifacts = result.get("artifacts", {}) if artifacts: msg = "Saved the following artifacts:\n\n" for key, val in artifacts.items(): msg += f" {key} = {val}\n" else: msg = "Did not find any artifacts." logger.info(msg) def _cleanup_session(sdk: AnyscaleSDK, session_id: str): if session_id: # Just trigger a request. No need to wait until session shutdown. sdk.terminate_session( session_id=session_id, terminate_session_options={}) def search_running_session(sdk: AnyscaleSDK, project_id: str, session_name: str) -> Optional[str]: session_id = None logger.info(f"Looking for existing session with name {session_name}") result = sdk.search_sessions( project_id=project_id, sessions_query=dict(name=dict(equals=session_name))) if len(result.results) > 0 and result.results[0].state == "Running": logger.info("Found existing session.") session_id = result.results[0].id return session_id def create_or_find_compute_template( sdk: AnyscaleSDK, project_id: str, compute_tpl: Dict[Any, Any], _repeat: bool = True) -> Tuple[Optional[str], Optional[str]]: compute_tpl_id = None compute_tpl_name = None if compute_tpl: # As of Anyscale 0.4.1, it is an error to use the same compute template # name within the same organization, between different projects. compute_tpl_name = f"{project_id}/compute/{_dict_hash(compute_tpl)}" logger.info(f"Tests uses compute template " f"with name {compute_tpl_name}. Looking up existing " f"templates.") paging_token = None while not compute_tpl_id: result = sdk.search_compute_templates( dict( project_id=project_id, name=dict(equals=compute_tpl_name), include_anonymous=True), paging_token=paging_token) paging_token = result.metadata.next_paging_token for res in result.results: if res.name == compute_tpl_name: compute_tpl_id = res.id logger.info( f"Template already exists with ID {compute_tpl_id}") break if not paging_token: break if not compute_tpl_id: logger.info(f"Compute template not found. " f"Creating with name {compute_tpl_name}.") try: result = sdk.create_compute_template( dict( name=compute_tpl_name, project_id=project_id, config=compute_tpl)) compute_tpl_id = result.result.id except Exception as e: if _repeat: logger.warning( f"Got exception when trying to create compute " f"template: {e}. Sleeping for 10 seconds and then " f"try again once...") time.sleep(10) return create_or_find_compute_template( sdk=sdk, project_id=project_id, compute_tpl=compute_tpl, _repeat=False) raise e logger.info(f"Compute template created with ID {compute_tpl_id}") return compute_tpl_id, compute_tpl_name def create_or_find_app_config( sdk: AnyscaleSDK, project_id: str, app_config: Dict[Any, Any], _repeat: bool = True) -> Tuple[Optional[str], Optional[str]]: app_config_id = None app_config_name = None if app_config: app_config_name = f"{project_id}-{_dict_hash(app_config)}" logger.info(f"Test uses an app config with hash {app_config_name}. " f"Looking up existing app configs with this name.") paging_token = None while not app_config_id: result = sdk.list_app_configs( project_id=project_id, count=50, paging_token=paging_token) paging_token = result.metadata.next_paging_token for res in result.results: if res.name == app_config_name: app_config_id = res.id logger.info( f"App config already exists with ID {app_config_id}") break if not paging_token or app_config_id: break if not app_config_id: logger.info("App config not found. Creating new one.") try: result = sdk.create_app_config( dict( name=app_config_name, project_id=project_id, config_json=app_config)) app_config_id = result.result.id except Exception as e: if _repeat: logger.warning( f"Got exception when trying to create app " f"config: {e}. Sleeping for 10 seconds and then " f"try again once...") time.sleep(10) return create_or_find_app_config( sdk=sdk, project_id=project_id, app_config=app_config, _repeat=False) raise e logger.info(f"App config created with ID {app_config_id}") return app_config_id, app_config_name def install_app_config_packages(app_config: Dict[Any, Any]): os.environ.update(app_config.get("env_vars", {})) packages = app_config["python"]["pip_packages"] for package in packages: subprocess.check_output(["pip", "install", "-U", package], text=True) def install_matching_ray(): wheel = os.environ.get("RAY_WHEELS", None) if not wheel: return assert "manylinux2014_x86_64" in wheel, wheel if sys.platform == "darwin": platform = "macosx_10_15_intel" elif sys.platform == "win32": platform = "win_amd64" else: platform = "manylinux2014_x86_64" wheel = wheel.replace("manylinux2014_x86_64", platform) subprocess.check_output(["pip", "uninstall", "-y", "ray"], text=True) subprocess.check_output(["pip", "install", "-U", wheel], text=True) def wait_for_build_or_raise(sdk: AnyscaleSDK, app_config_id: Optional[str]) -> Optional[str]: if not app_config_id: return None # Fetch build build_id = None last_status = None result = sdk.list_builds(app_config_id) for build in sorted(result.results, key=lambda b: b.created_at): build_id = build.id last_status = build.status if build.status == "failed": continue if build.status == "succeeded": logger.info(f"Link to app config build: " f"{anyscale_app_config_build_url(build_id)}") return build_id if last_status == "failed": raise AppConfigBuildFailure("App config build failed.") if not build_id: raise AppConfigBuildFailure("No build found for app config.") # Build found but not failed/finished yet completed = False start_wait = time.time() next_report = start_wait + REPORT_S logger.info(f"Waiting for build {build_id} to finish...") logger.info(f"Track progress here: " f"{anyscale_app_config_build_url(build_id)}") while not completed: now = time.time() if now > next_report: logger.info(f"... still waiting for build {build_id} to finish " f"({int(now - start_wait)} seconds) ...") next_report = next_report + REPORT_S result = sdk.get_build(build_id) build = result.result if build.status == "failed": raise AppConfigBuildFailure( f"App config build failed. Please see " f"{anyscale_app_config_build_url(build_id)} for details") if build.status == "succeeded": logger.info("Build succeeded.") return build_id completed = build.status not in ["in_progress", "pending"] if completed: raise AppConfigBuildFailure( f"Unknown build status: {build.status}. Please see " f"{anyscale_app_config_build_url(build_id)} for details") time.sleep(1) return build_id def run_job(cluster_name: str, compute_tpl_name: str, cluster_env_name: str, job_name: str, min_workers: str, script: str, script_args: List[str], env_vars: Dict[str, str], autosuspend: int) -> Tuple[int, str]: # Start cluster and job address = f"anyscale://{cluster_name}?autosuspend={autosuspend}" logger.info(f"Starting job {job_name} with Ray address: {address}") env = copy.deepcopy(os.environ) env.update(GLOBAL_CONFIG) env.update(env_vars) env["RAY_ADDRESS"] = address env["RAY_JOB_NAME"] = job_name env["RAY_RELEASE_MIN_WORKERS"] = str(min_workers) proc = subprocess.Popen( script.split(" ") + script_args, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True) proc.stdout.reconfigure(line_buffering=True) logs = "" for line in proc.stdout: logs += line sys.stdout.write(line) proc.wait() return proc.returncode, logs def create_and_wait_for_session( sdk: AnyscaleSDK, stop_event: multiprocessing.Event, session_name: str, session_options: Dict[Any, Any], ) -> str: # Create session logger.info(f"Creating session {session_name}") result = sdk.create_session(session_options) session_id = result.result.id # Trigger session start logger.info(f"Starting session {session_name} ({session_id})") session_url = anyscale_session_url( project_id=GLOBAL_CONFIG["ANYSCALE_PROJECT"], session_id=session_id) logger.info(f"Link to session: {session_url}") result = sdk.start_session(session_id, start_session_options={}) sop_id = result.result.id completed = result.result.completed # Wait for session logger.info(f"Waiting for session {session_name}...") start_wait = time.time() next_report = start_wait + REPORT_S while not completed: # Sleep 1 sec before next check. time.sleep(1) session_operation_response = sdk.get_session_operation( sop_id, _request_timeout=30) session_operation = session_operation_response.result completed = session_operation.completed _check_stop(stop_event, "session") now = time.time() if now > next_report: logger.info(f"... still waiting for session {session_name} " f"({int(now - start_wait)} seconds) ...") next_report = next_report + REPORT_S return session_id def run_session_command(sdk: AnyscaleSDK, session_id: str, cmd_to_run: str, result_queue: multiprocessing.Queue, env_vars: Dict[str, str], state_str: str = "CMD_RUN") -> Tuple[str, int]: full_cmd = " ".join(f"{k}={v}" for k, v in env_vars.items()) + " " + cmd_to_run logger.info(f"Running command in session {session_id}: \n" f"{full_cmd}") session_url = anyscale_session_url( project_id=GLOBAL_CONFIG["ANYSCALE_PROJECT"], session_id=session_id) logger.info(f"Link to session: {session_url}") result_queue.put(State(state_str, time.time(), None)) result = sdk.create_session_command( dict(session_id=session_id, shell_command=full_cmd)) scd_id = result.result.id return scd_id, result def wait_for_session_command_to_complete(create_session_command_result, sdk: AnyscaleSDK, scd_id: str, stop_event: multiprocessing.Event, state_str: str = "CMD_RUN"): result = create_session_command_result completed = result.result.finished_at is not None start_wait = time.time() next_report = start_wait + REPORT_S while not completed: # Sleep 1 sec before next check. time.sleep(1) result = exponential_backoff_retry( lambda: sdk.get_session_command(session_command_id=scd_id), retry_exceptions=Exception, initial_retry_delay_s=10, max_retries=3) completed = result.result.finished_at if state_str == "CMD_RUN": _check_stop(stop_event, "command") elif state_str == "CMD_PREPARE": _check_stop(stop_event, "prepare_command") now = time.time() if now > next_report: logger.info(f"... still waiting for command to finish " f"({int(now - start_wait)} seconds) ...") next_report = next_report + REPORT_S status_code = result.result.status_code runtime = time.time() - start_wait if status_code != 0: if state_str == "CMD_RUN": raise RuntimeError( f"Command returned non-success status: {status_code}") elif state_str == "CMD_PREPARE": raise PrepareCommandRuntimeError( f"Prepare command returned non-success status: {status_code}") return status_code, runtime def get_command_logs(session_controller: SessionController, scd_id: str, lines: int = 50): result = exponential_backoff_retry( lambda: session_controller.api_client.get_execution_logs_api_v2_session_commands_session_command_id_execution_logs_get( # noqa: E501 session_command_id=scd_id, start_line=-1 * lines, end_line=0), retry_exceptions=Exception, initial_retry_delay_s=10, max_retries=3) return result.result.lines def get_remote_json_content( temp_dir: str, session_name: str, remote_file: Optional[str], session_controller: SessionController, ): if not remote_file: logger.warning("No remote file specified, returning empty dict") return {} local_target_file = os.path.join(temp_dir, ".tmp.json") session_controller.pull( session_name=session_name, source=remote_file, target=local_target_file) with open(local_target_file, "rt") as f: return json.load(f) def get_local_json_content(local_file: Optional[str], ): if not local_file: logger.warning("No local file specified, returning empty dict") return {} with open(local_file, "rt") as f: return json.load(f) def pull_artifacts_and_store_in_cloud( temp_dir: str, logs: str, session_name: str, test_name: str, artifacts: Optional[Dict[Any, Any]], session_controller: SessionController, ): output_log_file = os.path.join(temp_dir, "output.log") with open(output_log_file, "wt") as f: f.write(logs) bucket = GLOBAL_CONFIG["RELEASE_AWS_BUCKET"] location = f"{GLOBAL_CONFIG['RELEASE_AWS_LOCATION']}" \ f"/{session_name}/{test_name}" saved_artifacts = {} s3_client = boto3.client("s3") s3_client.upload_file(output_log_file, bucket, f"{location}/output.log") saved_artifacts["output.log"] = f"s3://{bucket}/{location}/output.log" # Download artifacts if artifacts: for name, remote_file in artifacts.items(): logger.info(f"Downloading artifact `{name}` from " f"{remote_file}") local_target_file = os.path.join(temp_dir, name) session_controller.pull( session_name=session_name, source=remote_file, target=local_target_file) # Upload artifacts to s3 s3_client.upload_file(local_target_file, bucket, f"{location}/{name}") saved_artifacts[name] = f"s3://{bucket}/{location}/{name}" return saved_artifacts def find_session_by_test_name( sdk: AnyscaleSDK, session_controller: SessionController, temp_dir: str, state_json: str, project_id: str, test_name: str, ) -> Optional[Tuple[str, str, Dict[Any, Any]]]: paging_token = None while True: # Will break if paging_token is None after first search result = sdk.search_sessions( project_id=project_id, sessions_query=dict( name=dict(contains=test_name), state_filter=["Running"], paging=dict(count=20, paging_token=paging_token))) for session in result.results: logger.info(f"Found sessions {session.name}") if not session.name.startswith(test_name): continue try: session_state = get_remote_json_content( temp_dir=temp_dir, session_name=session.name, remote_file=state_json, session_controller=session_controller) except Exception as exc: raise RuntimeError(f"Could not get remote json content " f"for session {session.name}") from exc if session_state.get("test_name") == test_name: return session.id, session.name, session_state session_token = result.metadata.next_paging_token if not session_token: return None def get_latest_running_command_id(sdk: AnyscaleSDK, session_id: str ) -> Tuple[Optional[str], Optional[bool]]: scd_id = None paging_token = None success = None while not scd_id: result = sdk.list_session_commands( session_id=session_id, paging_token=paging_token) paging_token = result.metadata.next_paging_token for cmd in result.results: if not scd_id: scd_id = cmd.id completed = cmd.finished_at is not None if completed: if success is None: success = True success = success and cmd.status_code == 0 if not completed: return cmd.id, None return scd_id, success or False def run_test_config( local_dir: str, project_id: str, test_name: str, test_config: Dict[Any, Any], commit_url: str, session_name: str = None, smoke_test: bool = False, no_terminate: bool = False, kick_off_only: bool = False, check_progress: bool = False, upload_artifacts: bool = True, keep_results_dir: bool = False, app_config_id_override: Optional[str] = None, ) -> Dict[Any, Any]: # Todo (mid-term): Support other cluster definitions # (not only cluster configs) cluster_config_rel_path = test_config["cluster"].get( "cluster_config", None) cluster_config = _load_config(local_dir, cluster_config_rel_path) app_config_rel_path = test_config["cluster"].get("app_config", None) app_config = _load_config(local_dir, app_config_rel_path) compute_tpl_rel_path = test_config["cluster"].get("compute_template", None) compute_tpl = _load_config(local_dir, compute_tpl_rel_path) stop_event = multiprocessing.Event() result_queue = multiprocessing.Queue() if not session_name: session_name = f"{test_name}_{int(time.time())}" temp_dir = tempfile.mkdtemp() # Result and state files results_json = test_config["run"].get("results", None) if results_json is None: results_json = "/tmp/release_test_out.json" state_json = test_config["run"].get("state", None) if state_json is None: state_json = "/tmp/release_test_state.json" env_vars = { "RAY_ADDRESS": os.environ.get("RAY_ADDRESS", "auto"), "TEST_OUTPUT_JSON": results_json, "TEST_STATE_JSON": state_json, "IS_SMOKE_TEST": "1" if smoke_test else "0", } with open(os.path.join(local_dir, ".anyscale.yaml"), "wt") as f: f.write(f"project_id: {project_id}") os.chdir(local_dir) # Setup interface # Unfortunately, there currently seems to be no great way to # transfer files with the Anyscale SDK. # So we use the session controller instead. sdk = AnyscaleSDK(auth_token=GLOBAL_CONFIG["ANYSCALE_CLI_TOKEN"]) session_controller = SessionController( api_client=instantiate_api_client( cli_token=GLOBAL_CONFIG["ANYSCALE_CLI_TOKEN"], host=GLOBAL_CONFIG["ANYSCALE_HOST"], ), anyscale_api_client=sdk.api_client, ) timeout = test_config["run"].get("timeout", 1800) if "RELEASE_OVERRIDE_TIMEOUT" in os.environ: previous_timeout = timeout timeout = int(os.environ.get("RELEASE_OVERRIDE_TIMEOUT", str(timeout))) logger.warning(f"Release test timeout override: {timeout} " f"(would have been {previous_timeout})") # If a test is long running, timeout does not mean it failed is_long_running = test_config["run"].get("long_running", False) build_id_override = None if test_config["run"].get("use_connect"): autosuspend_mins = test_config["run"].get("autosuspend_mins", 5) assert not kick_off_only, \ "Unsupported for running with Anyscale connect." if app_config_id_override is not None: logger.info( "Using connect and an app config override, waiting until " "build finishes so we can fetch the app config in order to " "install its pip packages locally.") build_id_override = wait_for_build_or_raise( sdk, app_config_id_override) response = sdk.get_cluster_environment_build(build_id_override) app_config = response.result.config_json install_app_config_packages(app_config) install_matching_ray() elif "autosuspend_mins" in test_config["run"]: raise ValueError( "'autosuspend_mins' is only supported if 'use_connect' is True.") # Add information to results dict def _update_results(results: Dict): if "last_update" in results: results["last_update_diff"] = time.time() - results["last_update"] if smoke_test: results["smoke_test"] = True def _process_finished_command(session_controller: SessionController, scd_id: str, results: Optional[Dict] = None, runtime: int = None, commit_url: str = None, session_url: str = None): logger.info("Command finished successfully.") if results_json: results = results or get_remote_json_content( temp_dir=temp_dir, session_name=session_name, remote_file=results_json, session_controller=session_controller, ) else: results = {"passed": 1} _update_results(results) if scd_id: logs = get_command_logs(session_controller, scd_id, test_config.get("log_lines", 50)) else: logs = "No command found to fetch logs for" if upload_artifacts: saved_artifacts = pull_artifacts_and_store_in_cloud( temp_dir=temp_dir, logs=logs, # Also save logs in cloud session_name=session_name, test_name=test_name, artifacts=test_config.get("artifacts", {}), session_controller=session_controller, ) logger.info("Fetched results and stored on the cloud. Returning.") else: saved_artifacts = {} logger.info("Usually I would have fetched the results and " "artifacts and stored them on S3.") # Add these metadata here to avoid changing SQL schema. results["_runtime"] = runtime results["_session_url"] = session_url results["_commit_url"] = commit_url results["_stable"] = test_config.get("stable", True) result_queue.put( State( "END", time.time(), { "status": "finished", "last_logs": logs, "results": results, "artifacts": saved_artifacts, }, )) # When running the test script in client mode, the finish command is a # completed local process. def _process_finished_client_command(returncode: int, logs: str): if upload_artifacts: saved_artifacts = pull_artifacts_and_store_in_cloud( temp_dir=temp_dir, logs=logs, # Also save logs in cloud session_name=session_name, test_name=test_name, artifacts=None, session_controller=None, ) logger.info("Stored results on the cloud. Returning.") else: saved_artifacts = {} logger.info("Usually I would have fetched the results and " "artifacts and stored them on S3.") if results_json: results = get_local_json_content(local_file=results_json, ) else: results = { "passed": int(returncode == 0), } results["returncode"] = returncode _update_results(results) result_queue.put( State( "END", time.time(), { "status": "finished", "last_logs": logs, "results": results, "artifacts": saved_artifacts, }, )) def _run(logger): # These values will be set as the test runs. session_url = None runtime = None anyscale.conf.CLI_TOKEN = GLOBAL_CONFIG["ANYSCALE_CLI_TOKEN"] test_uses_ray_connect = test_config["run"].get("use_connect") session_id = None scd_id = None try: # First, look for running sessions session_id = search_running_session(sdk, project_id, session_name) compute_tpl_name = None app_config_id = app_config_id_override app_config_name = None build_id = build_id_override if not session_id: logger.info("No session found.") # Start session session_options = dict( name=session_name, project_id=project_id) if cluster_config is not None: logging.info("Starting session with cluster config") cluster_config_str = json.dumps(cluster_config) session_options["cluster_config"] = cluster_config_str session_options["cloud_id"] = ( GLOBAL_CONFIG["ANYSCALE_CLOUD_ID"], ) session_options["uses_app_config"] = False else: logging.info("Starting session with app/compute config") # Find/create compute template compute_tpl_id, compute_tpl_name = \ create_or_find_compute_template( sdk, project_id, compute_tpl) logger.info(f"Link to compute template: " f"{anyscale_compute_tpl_url(compute_tpl_id)}") # Find/create app config if app_config_id is None: ( app_config_id, app_config_name, ) = create_or_find_app_config(sdk, project_id, app_config) else: logger.info( f"Using override app config {app_config_id}") app_config_name = sdk.get_app_config( app_config_id).result.name if build_id is None: # We might have already retrieved the build ID when # installing app config packages locally if using # connect, so only get the build ID if it's not set. build_id = wait_for_build_or_raise(sdk, app_config_id) session_options["compute_template_id"] = compute_tpl_id session_options["build_id"] = build_id session_options["uses_app_config"] = True session_id = create_and_wait_for_session( sdk=sdk, stop_event=stop_event, session_name=session_name, session_options=session_options, ) prepare_command = test_config["run"].get("prepare") test_state_file = os.path.join(local_dir, "test_state.json") with open(test_state_file, "wt") as f: json.dump({ "start_time": time.time(), "test_name": test_name }, f) if prepare_command or not test_uses_ray_connect: if test_uses_ray_connect: logger.info("Found a prepare command, so pushing it " "to the session.") logger.info("Syncing files to session...") session_controller.push( session_name=session_name, source=None, target=None, config=None, all_nodes=False, ) logger.info("Syncing test state to session...") session_controller.push( session_name=session_name, source=test_state_file, target=state_json, config=None, all_nodes=False, ) session_url = anyscale_session_url( project_id=GLOBAL_CONFIG["ANYSCALE_PROJECT"], session_id=session_id) _check_stop(stop_event, "file_sync") if prepare_command: logger.info( f"Running preparation command: {prepare_command}") scd_id, result = run_session_command( sdk=sdk, session_id=session_id, cmd_to_run=prepare_command, result_queue=result_queue, env_vars=env_vars, state_str="CMD_PREPARE") _, _ = wait_for_session_command_to_complete( result, sdk=sdk, scd_id=scd_id, stop_event=stop_event, state_str="CMD_PREPARE") if test_uses_ray_connect: script_args = test_config["run"].get("args", []) if smoke_test: script_args += ["--smoke-test"] min_workers = 0 for node_type in compute_tpl["worker_node_types"]: min_workers += node_type["min_workers"] result_queue.put(State("CMD_RUN", time.time(), None)) returncode, logs = run_job( cluster_name=session_name, compute_tpl_name=compute_tpl_name, cluster_env_name=app_config_name, job_name=session_name, min_workers=min_workers, script=test_config["run"]["script"], script_args=script_args, env_vars=env_vars, autosuspend=autosuspend_mins) _process_finished_client_command(returncode, logs) return cmd_to_run = test_config["run"]["script"] + " " args = test_config["run"].get("args", []) if args: cmd_to_run += " ".join(args) + " " if smoke_test: cmd_to_run += " --smoke-test" scd_id, result = run_session_command( sdk=sdk, session_id=session_id, cmd_to_run=cmd_to_run, result_queue=result_queue, env_vars=env_vars, state_str="CMD_RUN") if not kick_off_only: _, runtime = wait_for_session_command_to_complete( result, sdk=sdk, scd_id=scd_id, stop_event=stop_event, state_str="CMD_RUN") _process_finished_command( session_controller=session_controller, scd_id=scd_id, runtime=runtime, session_url=session_url, commit_url=commit_url) else: result_queue.put( State("END", time.time(), { "status": "kickoff", "last_logs": "" })) except (ReleaseTestTimeoutError, Exception) as e: logger.error(e, exc_info=True) logs = str(e) if scd_id is not None: try: logs = logs + "; Command logs:" + get_command_logs( session_controller, scd_id, test_config.get("log_lines", 50)) except Exception as e2: logger.error(e2, exc_info=True) if isinstance(e, ReleaseTestTimeoutError) and is_long_running: _process_finished_command( session_controller=session_controller, scd_id=scd_id) else: timeout_type = "" runtime = None if isinstance(e, CommandTimeoutError): timeout_type = "timeout" runtime = 0 elif (isinstance(e, PrepareCommandTimeoutError) or isinstance(e, FileSyncTimeoutError) or isinstance(e, SessionTimeoutError) or isinstance(e, PrepareCommandRuntimeError) or isinstance(e, AppConfigBuildFailure)): timeout_type = "infra_timeout" runtime = None elif isinstance(e, RuntimeError): timeout_type = "runtime_error" runtime = 0 else: timeout_type = "unknown timeout" runtime = None results = {} results["_runtime"] = runtime results["_session_url"] = session_url results["_commit_url"] = commit_url results["_stable"] = test_config.get("stable", True) result_queue.put( State( "END", time.time(), { "status": timeout_type, "last_logs": logs, "results": results })) finally: if no_terminate: logger.warning( "`no_terminate` is set to True, so the session will " "*not* be terminated!") else: _cleanup_session(sdk, session_id) def _check_progress(logger): anyscale.conf.CLI_TOKEN = GLOBAL_CONFIG["ANYSCALE_CLI_TOKEN"] should_terminate = False session_id = None scd_id = None try: existing_session = find_session_by_test_name( sdk=sdk, session_controller=session_controller, temp_dir=temp_dir, state_json=state_json, project_id=project_id, test_name=test_name) if existing_session is None: logger.info(f"Found no existing session for {test_name}") result_queue.put( State("END", time.time(), { "status": "nosession", "last_logs": "" })) return session_id, session_name, session_state = existing_session logger.info(f"Found existing session for {test_name}: " f"{session_name}") scd_id, success = get_latest_running_command_id( sdk=sdk, session_id=session_id) latest_result = get_remote_json_content( temp_dir=temp_dir, session_name=session_name, remote_file=results_json, session_controller=session_controller, ) result_time_key = test_config["run"].get("time_key", None) maximum_update_delay = test_config["run"].get( "max_update_delay", None) if result_time_key and maximum_update_delay: last_update = latest_result.get(result_time_key, None) if not last_update: result_queue.put( State( "END", time.time(), { "status": "error", "last_logs": f"Test did not store " f"{result_time_key} in the " f"results json." })) return delay = time.time() - last_update logger.info(f"Last update was at {last_update:.2f}. " f"This was {delay:.2f} seconds ago " f"(maximum allowed: {maximum_update_delay})") if delay > maximum_update_delay: raise RuntimeError( f"Test did not update the results json within " f"the last {maximum_update_delay} seconds.") if time.time() - session_state["start_time"] > timeout: logger.info( f"Test command reached timeout after {timeout} seconds") _process_finished_command( session_controller=session_controller, scd_id=scd_id, results=latest_result) should_terminate = True elif success: logger.info("All commands finished.") _process_finished_command( session_controller=session_controller, scd_id=scd_id, results=latest_result) should_terminate = True else: rest_time = timeout - time.time() + session_state["start_time"] logger.info(f"Test command should continue running " f"for {rest_time} seconds") result_queue.put( State("END", time.time(), { "status": "kickoff", "last_logs": "Test is still running" })) except Exception as e: logger.error(e, exc_info=True) logs = str(e) if scd_id is not None: try: logs = get_command_logs(session_controller, scd_id, test_config.get("log_lines", 50)) logs += f"\n{str(e)}" except Exception as e2: logger.error(e2, exc_info=True) result_queue.put( State("END", time.time(), { "status": "error", "last_logs": logs })) should_terminate = True finally: if should_terminate: logger.warning("Terminating session") _cleanup_session(sdk, session_id) if not check_progress: process = multiprocessing.Process(target=_run, args=(logger, )) else: process = multiprocessing.Process( target=_check_progress, args=(logger, )) build_timeout = test_config["run"].get("build_timeout", 1800) project_url = anyscale_project_url( project_id=GLOBAL_CONFIG["ANYSCALE_PROJECT"]) logger.info(f"Link to project: {project_url}") msg = f"This will now run test {test_name}." if smoke_test: msg += " This is a smoke test." if is_long_running: msg += " This is a long running test." logger.info(msg) logger.info(f"Starting process with timeout {timeout} " f"(build timeout {build_timeout})") process.start() timeout_time = time.time() + build_timeout result = {} while process.is_alive(): try: state: State = result_queue.get(timeout=1) except (Empty, TimeoutError): if time.time() > timeout_time: stop_event.set() logger.warning("Process timed out.") if not is_long_running: logger.warning("Terminating process in 10 seconds.") time.sleep(10) logger.warning("Terminating process now.") process.terminate() else: logger.info("Process is long running. Give 2 minutes to " "fetch result and terminate.") start_terminate = time.time() while time.time( ) < start_terminate + 120 and process.is_alive(): time.sleep(1) if process.is_alive(): logger.warning("Terminating forcefully now.") process.terminate() else: logger.info("Long running results collected.") break continue if not isinstance(state, State): raise RuntimeError(f"Expected `State` object, got {result}") if state.state == "CMD_PREPARE": timeout_time = state.timestamp + timeout if state.state == "CMD_RUN": timeout_time = state.timestamp + timeout elif state.state == "END": result = state.data break while not result_queue.empty(): state = result_queue.get_nowait() result = state.data logger.info("Final check if everything worked.") try: result.setdefault("status", "error (status not found)") except (TimeoutError, Empty): result = {"status": "timeout", "last_logs": "Test timed out."} logger.info(f"Final results: {result}") log_results_and_artifacts(result) if not keep_results_dir: logger.info(f"Removing results dir {temp_dir}") shutil.rmtree(temp_dir) else: with open(os.path.join(temp_dir, "results.json"), "wt") as fp: json.dump(result, fp) out_dir = os.path.expanduser(GLOBAL_CONFIG["RELEASE_RESULTS_DIR"]) logger.info(f"Moving results dir {temp_dir} to persistent location " f"{out_dir}") shutil.rmtree(out_dir, ignore_errors=True) shutil.copytree(temp_dir, out_dir) logger.info(f"Dir contents: {os.listdir(out_dir)}") return result def run_test(test_config_file: str, test_name: str, project_id: str, commit_url: str, category: str = "unspecified", smoke_test: bool = False, no_terminate: bool = False, kick_off_only: bool = False, check_progress: bool = False, report: bool = True, keep_results_dir: bool = False, session_name: Optional[str] = None, app_config_id_override=None) -> Dict[str, Any]: with open(test_config_file, "rt") as f: test_configs = yaml.safe_load(f) test_config_dict = {} for test_config in test_configs: name = test_config.pop("name") test_config_dict[name] = test_config if test_name not in test_config_dict: raise ValueError( f"Test with name `{test_name}` not found in test config file " f"at `{test_config_file}`.") test_config = test_config_dict[test_name] if smoke_test and "smoke_test" in test_config: smoke_test_config = test_config.pop("smoke_test") test_config = _deep_update(test_config, smoke_test_config) local_dir = os.path.dirname(test_config_file) if "local_dir" in test_config: local_dir = os.path.join(local_dir, test_config["local_dir"]) if test_config["run"].get("use_connect"): assert not kick_off_only, \ "--kick-off-only is unsupported when running with " \ "Anyscale connect." assert not check_progress, \ "--check is unsupported when running with Anyscale connect." if test_config.get("artifacts", {}): logger.error( "Saving artifacts are not yet supported when running with " "Anyscale connect.") result = run_test_config( local_dir, project_id, test_name, test_config, commit_url, session_name=session_name, smoke_test=smoke_test, no_terminate=no_terminate, kick_off_only=kick_off_only, check_progress=check_progress, upload_artifacts=report, keep_results_dir=keep_results_dir, app_config_id_override=app_config_id_override) status = result.get("status", "invalid") if kick_off_only: if status != "kickoff": raise RuntimeError("Error kicking off test.") logger.info("Kicked off test. It's now up to the `--check` " "part of the script to track its process.") return {} else: # `--check` or no kick off only if status == "nosession": logger.info(f"No running session found for test {test_name}, so " f"assuming everything is fine.") return {} if status == "kickoff": logger.info(f"Test {test_name} is still running.") return {} last_logs = result.get("last_logs", "No logs.") test_suite = os.path.basename(test_config_file).replace(".yaml", "") report_kwargs = dict( test_suite=test_suite, test_name=test_name, status=status, last_logs=last_logs, results=result.get("results", {}), artifacts=result.get("artifacts", {}), category=category, ) if report: report_result(**report_kwargs) else: logger.info(f"Usually I would now report the following results:\n" f"{report_kwargs}") if has_errored(result): raise RuntimeError(last_logs) return report_kwargs if __name__ == "__main__": parser = argparse.ArgumentParser( description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument( "--test-config", type=str, required=True, help="Test config file") parser.add_argument("--test-name", type=str, help="Test name in config") parser.add_argument( "--ray-wheels", required=False, type=str, help="URL to ray wheels") parser.add_argument( "--no-terminate", action="store_true", default=False, help="Don't terminate session after failure") parser.add_argument( "--no-report", action="store_true", default=False, help="Do not report any results or upload to S3") parser.add_argument( "--kick-off-only", action="store_true", default=False, help="Kick off only (don't wait for command to finish)") parser.add_argument( "--check", action="store_true", default=False, help="Check (long running) status") parser.add_argument( "--keep-results-dir", action="store_true", default=False, help="Keep results in directory (named RELEASE_RESULTS_DIR), e.g. " "for Buildkite artifact upload.") parser.add_argument( "--category", type=str, default="unspecified", help="Category name, e.g. `release-1.3.0` (will be saved in database)") parser.add_argument( "--smoke-test", action="store_true", help="Finish quickly for testing") parser.add_argument( "--session-name", required=False, type=str, help="Name of the session to run this test.") parser.add_argument( "--app-config-id-override", required=False, type=str, help=("An app config ID, which will override the test config app " "config.")) args, _ = parser.parse_known_args() if not GLOBAL_CONFIG["ANYSCALE_PROJECT"]: raise RuntimeError( "You have to set the ANYSCALE_PROJECT environment variable!") maybe_fetch_api_token() if args.ray_wheels: os.environ["RAY_WHEELS"] = str(args.ray_wheels) url = str(args.ray_wheels) elif not args.check and not os.environ.get("RAY_WHEELS"): url = find_ray_wheels( GLOBAL_CONFIG["RAY_REPO"], GLOBAL_CONFIG["RAY_BRANCH"], GLOBAL_CONFIG["RAY_VERSION"], ) if not url: raise RuntimeError(f"Could not find wheels for " f"Ray {GLOBAL_CONFIG['RAY_VERSION']}, " f"branch {GLOBAL_CONFIG['RAY_BRANCH']}") # RAY_COMMIT is set by find_ray_wheels elif os.environ.get("RAY_WHEELS"): logger.info(f"Using Ray wheels provided from URL: " f"{os.environ.get('RAY_WHEELS')}") url = os.environ.get("RAY_WHEELS") populate_wheels_sanity_check(os.environ.get("RAY_COMMIT", "")) test_config_file = os.path.abspath(os.path.expanduser(args.test_config)) result_dict = run_test( test_config_file=test_config_file, test_name=args.test_name, project_id=GLOBAL_CONFIG["ANYSCALE_PROJECT"], commit_url=url, category=args.category, smoke_test=args.smoke_test, no_terminate=args.no_terminate or args.kick_off_only, kick_off_only=args.kick_off_only, check_progress=args.check, report=not args.no_report, session_name=args.session_name, keep_results_dir=args.keep_results_dir, app_config_id_override=args.app_config_id_override, ) if result_dict: # If we get a result dict, check if any alerts should be raised from alert import SUITE_TO_FN, default_handle_result logger.info("Checking if results are valid...") handle_result_kwargs = result_dict.copy() handle_result_kwargs["created_on"] = None test_suite = handle_result_kwargs.get("test_suite", None) test_name = handle_result_kwargs.get("test_name", None) category = handle_result_kwargs.get("category", None) handle_fn = SUITE_TO_FN.get(test_suite, None) if not handle_fn: logger.warning(f"No handle for suite {test_suite}") alert = default_handle_result(**handle_result_kwargs) else: alert = handle_fn(**handle_result_kwargs) if alert: # If we get an alert, the test failed. raise RuntimeError(alert) else: logger.info(f"No alert raised for test {test_suite}/{test_name} " f"({category}) - the test successfully passed!")
true
true
f70143280a7d3e0800ab0318774e9f82ec6c1c84
2,014
py
Python
read_groups_from_bam.py
DReichLab/adna-workflow
07c6da8e64234decb7373fe7109e09395a45cb58
[ "BSD-3-Clause" ]
9
2019-05-28T11:16:14.000Z
2022-02-24T01:22:47.000Z
read_groups_from_bam.py
DReichLab/adna-workflow
07c6da8e64234decb7373fe7109e09395a45cb58
[ "BSD-3-Clause" ]
3
2020-01-09T20:12:02.000Z
2020-11-17T14:50:28.000Z
read_groups_from_bam.py
DReichLab/adna-workflow
07c6da8e64234decb7373fe7109e09395a45cb58
[ "BSD-3-Clause" ]
1
2019-08-04T12:46:01.000Z
2019-08-04T12:46:01.000Z
import argparse import pysam # parse the read group strings from a bam/sam header # return array of read group strings def read_groups_from_bam(bam_filename, use_libraries=False): bam = pysam.AlignmentFile(bam_filename, "rb") header = bam.header results = {} if 'RG' in header: read_groups = header['RG'] if use_libraries: field = 'LB' else: field = 'ID' #print(read_groups) for read_group in read_groups: results[read_group[field]] = 1 #read_group['SM'] = sample #print(read_group) results_without_duplicates = [key for (key, ignored) in results.items()] sorted_read_groups = sorted(results_without_duplicates) return sorted_read_groups def read_groups_and_libraries_from_bam(bam_filename): bam = pysam.AlignmentFile(bam_filename, "rb") header = bam.header results = {} if 'RG' in header: read_groups = header['RG'] #print(read_groups) for read_group in read_groups: read_group_id = read_group['ID'] read_group_library = read_group['LB'] results[read_group_id] = read_group_library return results if __name__ == "__main__": parser = argparse.ArgumentParser(description="Show the read groups in a bam.") parser.add_argument('-p', "--pulldown", help="report read groups colon-delimited for pulldown", action='store_true') parser.add_argument('-l', "--libraries", help="report libraries instead of read groups", action='store_true') parser.add_argument('-b', "--both", help="report read groups and libraries", action='store_true') parser.add_argument("bam", help="bam for read groups") args = parser.parse_args() bam_filename = args.bam if args.both: read_groups_to_libraries = read_groups_and_libraries_from_bam(bam_filename) for read_group, library in read_groups_to_libraries.items(): print("{}\t{}".format(read_group, library)) else: read_groups = read_groups_from_bam(bam_filename, args.libraries) if args.pulldown: print(':'.join(read_groups)) else: for read_group in read_groups: print(read_group)
29.617647
117
0.735849
import argparse import pysam def read_groups_from_bam(bam_filename, use_libraries=False): bam = pysam.AlignmentFile(bam_filename, "rb") header = bam.header results = {} if 'RG' in header: read_groups = header['RG'] if use_libraries: field = 'LB' else: field = 'ID' for read_group in read_groups: results[read_group[field]] = 1 results_without_duplicates = [key for (key, ignored) in results.items()] sorted_read_groups = sorted(results_without_duplicates) return sorted_read_groups def read_groups_and_libraries_from_bam(bam_filename): bam = pysam.AlignmentFile(bam_filename, "rb") header = bam.header results = {} if 'RG' in header: read_groups = header['RG'] for read_group in read_groups: read_group_id = read_group['ID'] read_group_library = read_group['LB'] results[read_group_id] = read_group_library return results if __name__ == "__main__": parser = argparse.ArgumentParser(description="Show the read groups in a bam.") parser.add_argument('-p', "--pulldown", help="report read groups colon-delimited for pulldown", action='store_true') parser.add_argument('-l', "--libraries", help="report libraries instead of read groups", action='store_true') parser.add_argument('-b', "--both", help="report read groups and libraries", action='store_true') parser.add_argument("bam", help="bam for read groups") args = parser.parse_args() bam_filename = args.bam if args.both: read_groups_to_libraries = read_groups_and_libraries_from_bam(bam_filename) for read_group, library in read_groups_to_libraries.items(): print("{}\t{}".format(read_group, library)) else: read_groups = read_groups_from_bam(bam_filename, args.libraries) if args.pulldown: print(':'.join(read_groups)) else: for read_group in read_groups: print(read_group)
true
true
f701433e2b27650c2062d7a20ec39b804434334b
750
py
Python
alembic/versions/18b9d421fbde_add_initial_models.py
maskducks/garlic-bot
cf8e30fcaeb99cc8877f65eacd07504a9970bf45
[ "MIT" ]
4
2022-03-19T16:53:09.000Z
2022-03-19T23:42:25.000Z
alembic/versions/18b9d421fbde_add_initial_models.py
maskducks/garlic-bot
cf8e30fcaeb99cc8877f65eacd07504a9970bf45
[ "MIT" ]
3
2022-03-20T11:58:30.000Z
2022-03-26T09:16:33.000Z
alembic/versions/18b9d421fbde_add_initial_models.py
maskducks/garlic-bot
cf8e30fcaeb99cc8877f65eacd07504a9970bf45
[ "MIT" ]
2
2022-03-21T05:22:47.000Z
2022-03-28T04:40:11.000Z
"""add initial models Revision ID: 18b9d421fbde Revises: Create Date: 2022-03-19 12:36:16.067795 """ import sqlalchemy as sa from alembic import op # revision identifiers, used by Alembic. revision = "18b9d421fbde" down_revision = None branch_labels = None depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table( "stats", sa.Column("user", sa.BigInteger(), nullable=False), sa.Column("count", sa.BigInteger(), nullable=True), sa.PrimaryKeyConstraint("user"), ) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_table("stats") # ### end Alembic commands ###
22.058824
65
0.656
import sqlalchemy as sa from alembic import op revision = "18b9d421fbde" down_revision = None branch_labels = None depends_on = None def upgrade(): op.create_table( "stats", sa.Column("user", sa.BigInteger(), nullable=False), sa.Column("count", sa.BigInteger(), nullable=True), sa.PrimaryKeyConstraint("user"), ) def downgrade(): op.drop_table("stats")
true
true
f701439a3be07ffe80c7e22fe838ba3e56ff4b67
2,705
py
Python
suites/Scenarios/CreateManyContractsInOneTrx.py
echoprotocol/pytests
5dce698558c2ba703aea03aab79906af1437da5d
[ "MIT" ]
1
2021-03-12T05:17:02.000Z
2021-03-12T05:17:02.000Z
suites/Scenarios/CreateManyContractsInOneTrx.py
echoprotocol/pytests
5dce698558c2ba703aea03aab79906af1437da5d
[ "MIT" ]
1
2019-11-19T12:10:59.000Z
2019-11-19T12:10:59.000Z
suites/Scenarios/CreateManyContractsInOneTrx.py
echoprotocol/pytests
5dce698558c2ba703aea03aab79906af1437da5d
[ "MIT" ]
2
2019-04-29T10:46:48.000Z
2019-10-29T10:01:03.000Z
# -*- coding: utf-8 -*- from common.base_test import BaseTest import lemoncheesecake.api as lcc from lemoncheesecake.matching import check_that, has_length SUITE = { "description": "Creating many contracts in a single transaction" } @lcc.prop("main", "type") @lcc.tags("scenarios", "many_contracts_in_one_trx") @lcc.suite("Check scenario 'Create many contracts in a single transaction'") class CreateManyContractsInOneTrx(BaseTest): def __init__(self): super().__init__() self.__database_api_identifier = None self.__registration_api_identifier = None self.contract = self.get_byte_code("piggy", "code") self.echo_acc0 = None def setup_suite(self): super().setup_suite() self._connect_to_echopy_lib() lcc.set_step("Setup for {}".format(self.__class__.__name__)) self.__database_api_identifier = self.get_identifier("database") self.__registration_api_identifier = self.get_identifier("registration") lcc.log_info( "API identifiers are: database='{}', registration='{}'".format( self.__database_api_identifier, self.__registration_api_identifier ) ) self.echo_acc0 = self.get_account_id( self.accounts[0], self.__database_api_identifier, self.__registration_api_identifier ) lcc.log_info("Echo account is '{}'".format(self.echo_acc0)) def teardown_suite(self): self._disconnect_to_echopy_lib() super().teardown_suite() @lcc.test( "The scenario describes creating many contracts in a single transaction " "on the Echo network, written in Solidity." ) def create_many_contracts_in_one_trx_scenario(self, get_random_integer_up_to_fifty): number_of_contracts = get_random_integer_up_to_fifty lcc.set_step("Create '{}' 'Piggy' contracts in the Echo network".format(number_of_contracts)) operation = self.echo_ops.get_contract_create_operation( echo=self.echo, registrar=self.echo_acc0, bytecode=self.contract ) collected_operation = self.collect_operations(operation, self.__database_api_identifier) list_operations = [] for i in range(number_of_contracts): list_operations.append(collected_operation) broadcast_result = self.echo_ops.broadcast(echo=self.echo, list_operations=list_operations, log_broadcast=False) lcc.set_step("Check that all contracts created in the Echo network") check_that( "in 'broadcast_result' are 'operation_results'", broadcast_result.get("trx").get("operation_results"), has_length(number_of_contracts) )
40.984848
120
0.697967
from common.base_test import BaseTest import lemoncheesecake.api as lcc from lemoncheesecake.matching import check_that, has_length SUITE = { "description": "Creating many contracts in a single transaction" } @lcc.prop("main", "type") @lcc.tags("scenarios", "many_contracts_in_one_trx") @lcc.suite("Check scenario 'Create many contracts in a single transaction'") class CreateManyContractsInOneTrx(BaseTest): def __init__(self): super().__init__() self.__database_api_identifier = None self.__registration_api_identifier = None self.contract = self.get_byte_code("piggy", "code") self.echo_acc0 = None def setup_suite(self): super().setup_suite() self._connect_to_echopy_lib() lcc.set_step("Setup for {}".format(self.__class__.__name__)) self.__database_api_identifier = self.get_identifier("database") self.__registration_api_identifier = self.get_identifier("registration") lcc.log_info( "API identifiers are: database='{}', registration='{}'".format( self.__database_api_identifier, self.__registration_api_identifier ) ) self.echo_acc0 = self.get_account_id( self.accounts[0], self.__database_api_identifier, self.__registration_api_identifier ) lcc.log_info("Echo account is '{}'".format(self.echo_acc0)) def teardown_suite(self): self._disconnect_to_echopy_lib() super().teardown_suite() @lcc.test( "The scenario describes creating many contracts in a single transaction " "on the Echo network, written in Solidity." ) def create_many_contracts_in_one_trx_scenario(self, get_random_integer_up_to_fifty): number_of_contracts = get_random_integer_up_to_fifty lcc.set_step("Create '{}' 'Piggy' contracts in the Echo network".format(number_of_contracts)) operation = self.echo_ops.get_contract_create_operation( echo=self.echo, registrar=self.echo_acc0, bytecode=self.contract ) collected_operation = self.collect_operations(operation, self.__database_api_identifier) list_operations = [] for i in range(number_of_contracts): list_operations.append(collected_operation) broadcast_result = self.echo_ops.broadcast(echo=self.echo, list_operations=list_operations, log_broadcast=False) lcc.set_step("Check that all contracts created in the Echo network") check_that( "in 'broadcast_result' are 'operation_results'", broadcast_result.get("trx").get("operation_results"), has_length(number_of_contracts) )
true
true
f70144df36bfb120c94c043c99c51af609fbbdea
39,698
py
Python
sbaas/analysis/analysis_stage00/stage00_query.py
SBRG/sbaas
9df76bbffdd620cf8566744a2b0503935998fbe0
[ "Apache-2.0" ]
1
2017-05-13T04:35:08.000Z
2017-05-13T04:35:08.000Z
sbaas/analysis/analysis_stage00/stage00_query.py
SBRG/sbaas
9df76bbffdd620cf8566744a2b0503935998fbe0
[ "Apache-2.0" ]
null
null
null
sbaas/analysis/analysis_stage00/stage00_query.py
SBRG/sbaas
9df76bbffdd620cf8566744a2b0503935998fbe0
[ "Apache-2.0" ]
2
2017-02-23T19:32:38.000Z
2020-01-14T19:13:05.000Z
from sbaas.analysis.analysis_base import * class stage00_query(base_analysis): def get_structureFile_standards(self,met_id_I): '''Querry structure file and extension from metabolomics standards''' try: structure = self.session.query(standards.structure_file, standards.structure_file_extention).filter( standards.met_id.like(met_id_I)).all(); struct_file_O = ''; struct_file_ext_O = ''; if structure: struct_file_O = structure[0][0]; struct_file_ext_O = structure[0][1]; else: print('no structure file found for ' + met_id_I); exit(-1); return struct_file_O, struct_file_ext_O except SQLAlchemyError as e: print(e); def get_exactMassAndFormula_standards(self,met_id_I): '''Querry exact mass and formula from metabolomics standards''' try: massformula = self.session.query(standards.exactmass, standards.formula).filter( standards.met_id.like(met_id_I)).all(); mass_O = ''; formula_O = ''; if massformula: mass_O = massformula[0][0]; formula_O = massformula[0][1]; else: print('no mass and formula found for ' + met_id_I); exit(-1); return mass_O, formula_O except SQLAlchemyError as e: print(e); def get_Q1AndQ3MassAndMode_MSComponents(self,met_id_I): '''Querry q1 mass, q3 mass, and ms_mode from ms_components''' try: mscomponents = self.session.query(MS_components.q1_mass, MS_components.q3_mass, MS_components.ms_mode).filter( MS_components.met_id.like(met_id_I)).order_by( MS_components.ms_mode.asc(), MS_components.q1_mass.asc(), MS_components.q3_mass.asc()).all(); mscomponents_O = []; for msc in mscomponents: mscomponents_1 = {}; mscomponents_1['met_id'] = met_id_I; mscomponents_1['q1_mass'] = msc.q1_mass; mscomponents_1['q3_mass'] = msc.q3_mass; mscomponents_1['ms_mode'] = msc.ms_mode; mscomponents_O.append(mscomponents_1); return mscomponents_O; except SQLAlchemyError as e: print(e); def get_row_MSComponents(self,met_id_I,ms_mode_I,ms_methodtype_I): '''Querry row from ms_components by met_id, ms_mode, and ms_methodtype''' try: mscomponents = self.session.query(MS_components.q1_mass,MS_components.q3_mass, MS_components.ms3_mass,MS_components.met_name,MS_components.dp, MS_components.ep,MS_components.ce,MS_components.cxp,MS_components.af, MS_components.quantifier,MS_components.ms_mode,MS_components.ion_intensity_rank, MS_components.ion_abundance,MS_components.precursor_formula, MS_components.product_ion_reference,MS_components.product_formula, MS_components.production_ion_notes,MS_components.met_id, MS_components.external_reference,MS_components.q1_mass_units, MS_components.q3_mass_units,MS_components.ms3_mass_units, MS_components.threshold_units,MS_components.dp_units, MS_components.ep_units,MS_components.ce_units, MS_components.cxp_units,MS_components.af_units, MS_components.ms_group,MS_components.threshold, MS_components.dwell_weight,MS_components.component_name, MS_components.ms_include,MS_components.ms_is,MS_components.precursor_fragment, MS_components.product_fragment,MS_components.precursor_exactmass, MS_components.product_exactmass,MS_components.ms_methodtype).filter( MS_components.met_id.like(met_id_I), MS_components.ms_mode.like(ms_mode_I), MS_components.ms_methodtype.like(ms_methodtype_I)).all(); mscomponents_O = []; if not mscomponents: print('bad query for row in ms_components: ') print('met_id: ' + met_id_I + ', ms_mode_I: ' + ms_mode_I + ', ms_methodtype_I: ' + ms_methodtype_I); exit(-1) for msc in mscomponents: mscomponents_1 = {}; mscomponents_1["q1_mass"] = msc.q1_mass; mscomponents_1["q3_mass"] = msc.q3_mass; mscomponents_1["ms3_mass"] = msc.ms3_mass; mscomponents_1["met_name"] = msc.met_name; mscomponents_1["dp"] = msc.dp; mscomponents_1["ep"] = msc.ep; mscomponents_1["ce"] = msc.ce; mscomponents_1["cxp"] = msc.cxp; mscomponents_1["af"] = msc.af; mscomponents_1["quantifier"] = msc.quantifier; mscomponents_1["ms_mode"] = msc.ms_mode; mscomponents_1["ion_intensity_rank"] = msc.ion_intensity_rank; mscomponents_1["ion_abundance"] = msc.ion_abundance; mscomponents_1["precursor_formula"] = msc.precursor_formula; mscomponents_1["product_ion_reference"] = msc.product_ion_reference; mscomponents_1["product_formula"] = msc.product_formula; mscomponents_1["production_ion_notes"] = msc.production_ion_notes; mscomponents_1["met_id"] = msc.met_id; mscomponents_1["external_reference"] = msc.external_reference; mscomponents_1["q1_mass_units"] = msc.q1_mass_units; mscomponents_1["q3_mass_units"] = msc.q3_mass_units; mscomponents_1["ms3_mass_units"] = msc.ms3_mass_units; mscomponents_1["threshold_units"] = msc.threshold_units; mscomponents_1["dp_units"] = msc.dp_units; mscomponents_1["ep_units"] = msc.ep_units; mscomponents_1["ce_units"] = msc.ce_units; mscomponents_1["cxp_units"] = msc.cxp_units; mscomponents_1["af_units"] = msc.af_units; mscomponents_1["ms_group"] = msc.ms_group; mscomponents_1["threshold"] = msc.threshold; mscomponents_1["dwell_weight"] = msc.dwell_weight; mscomponents_1["component_name"] = msc.component_name; mscomponents_1["ms_include"] = msc.ms_include; mscomponents_1["ms_is"] = msc.ms_is; mscomponents_1["precursor_fragment"] = msc.precursor_fragment; mscomponents_1["product_fragment"] = msc.product_fragment; mscomponents_1["precursor_exactmass"] = msc.precursor_exactmass; mscomponents_1["product_exactmass"] = msc.product_exactmass; mscomponents_1["ms_methodtype"] = msc.ms_methodtype; mscomponents_O.append(mscomponents_1); return mscomponents_O; except SQLAlchemyError as e: print(e); def get_row_MSComponents_metIDAndFormula(self,met_id_I,precursor_formula_I, product_formula_I,ms_methodtype_I): '''Querry row from ms_components by met_id, precursor_formula, product_formula''' try: mscomponents = self.session.query(MS_components.q1_mass,MS_components.q3_mass, MS_components.ms3_mass,MS_components.met_name,MS_components.dp, MS_components.ep,MS_components.ce,MS_components.cxp,MS_components.af, MS_components.quantifier,MS_components.ms_mode,MS_components.ion_intensity_rank, MS_components.ion_abundance,MS_components.precursor_formula, MS_components.product_ion_reference,MS_components.product_formula, MS_components.production_ion_notes,MS_components.met_id, MS_components.external_reference,MS_components.q1_mass_units, MS_components.q3_mass_units,MS_components.ms3_mass_units, MS_components.threshold_units,MS_components.dp_units, MS_components.ep_units,MS_components.ce_units, MS_components.cxp_units,MS_components.af_units, MS_components.ms_group,MS_components.threshold, MS_components.dwell_weight,MS_components.component_name, MS_components.ms_include,MS_components.ms_is,MS_components.precursor_fragment, MS_components.product_fragment,MS_components.precursor_exactmass, MS_components.product_exactmass,MS_components.ms_methodtype).filter( MS_components.met_id.like(met_id_I), MS_components.precursor_formula.like(precursor_formula_I), MS_components.product_formula.like(product_formula_I), MS_components.ms_methodtype.like(ms_methodtype_I)).all(); mscomponents_O = []; if not mscomponents: print('bad query for row in ms_components: ') print('met_id: ' + met_id_I + ', precursor_formula_I: ' + precursor_formula_I + ', product_formula_I: ' + product_formula_I + ', ms_methodtype_I: ' + ms_methodtype_I); exit(-1) for msc in mscomponents: mscomponents_1 = {}; mscomponents_1["q1_mass"] = msc.q1_mass; mscomponents_1["q3_mass"] = msc.q3_mass; mscomponents_1["ms3_mass"] = msc.ms3_mass; mscomponents_1["met_name"] = msc.met_name; mscomponents_1["dp"] = msc.dp; mscomponents_1["ep"] = msc.ep; mscomponents_1["ce"] = msc.ce; mscomponents_1["cxp"] = msc.cxp; mscomponents_1["af"] = msc.af; mscomponents_1["quantifier"] = msc.quantifier; mscomponents_1["ms_mode"] = msc.ms_mode; mscomponents_1["ion_intensity_rank"] = msc.ion_intensity_rank; mscomponents_1["ion_abundance"] = msc.ion_abundance; mscomponents_1["precursor_formula"] = msc.precursor_formula; mscomponents_1["product_ion_reference"] = msc.product_ion_reference; mscomponents_1["product_formula"] = msc.product_formula; mscomponents_1["production_ion_notes"] = msc.production_ion_notes; mscomponents_1["met_id"] = msc.met_id; mscomponents_1["external_reference"] = msc.external_reference; mscomponents_1["q1_mass_units"] = msc.q1_mass_units; mscomponents_1["q3_mass_units"] = msc.q3_mass_units; mscomponents_1["ms3_mass_units"] = msc.ms3_mass_units; mscomponents_1["threshold_units"] = msc.threshold_units; mscomponents_1["dp_units"] = msc.dp_units; mscomponents_1["ep_units"] = msc.ep_units; mscomponents_1["ce_units"] = msc.ce_units; mscomponents_1["cxp_units"] = msc.cxp_units; mscomponents_1["af_units"] = msc.af_units; mscomponents_1["ms_group"] = msc.ms_group; mscomponents_1["threshold"] = msc.threshold; mscomponents_1["dwell_weight"] = msc.dwell_weight; mscomponents_1["component_name"] = msc.component_name; mscomponents_1["ms_include"] = msc.ms_include; mscomponents_1["ms_is"] = msc.ms_is; mscomponents_1["precursor_fragment"] = msc.precursor_fragment; mscomponents_1["product_fragment"] = msc.product_fragment; mscomponents_1["precursor_exactmass"] = msc.precursor_exactmass; mscomponents_1["product_exactmass"] = msc.product_exactmass; mscomponents_1["ms_methodtype"] = msc.ms_methodtype; mscomponents_O.append(mscomponents_1); return mscomponents_O[0]; except SQLAlchemyError as e: print(e); def get_nMaxBioReps_sampleDescription(self,experiment_id_I): '''Query the maximum number of biological replicates corresponding to a given experiment''' try: bioReps = self.session.query(sample_description.sample_replicate).filter( experiment.id.like(experiment_id_I), experiment.sample_name.like(sample.sample_name), sample.sample_id.like(sample_description.sample_id), sample_description.istechnical != True).group_by( sample_description.sample_replicate).order_by( sample_description.sample_replicate.desc()).all(); maxBioReps_O = 0; if bioReps: maxBioReps_O = max(bioReps[0]); else: print('no biological replicates found for experiment ' + experiment_id_I); exit(-1); return maxBioReps_O; except SQLAlchemyError as e: print(e); def get_batchFileInfo_experimentID(self,experiment_id_I,sample_type_I): '''Query data from experiment and sample for batch file''' try: data = self.session.query(experiment.id, sample.sample_name, experiment.acquisition_method_id, sample.sample_dilution, sample.sample_type, sample_description.sample_replicate, sample_description.sample_desc, sample_description.sample_name_abbreviation).filter( experiment.id.like(experiment_id_I), experiment.sample_name.like(sample.sample_name), sample.sample_type.like(sample_type_I), sample.sample_id.like(sample_description.sample_id)).group_by( experiment.id, sample.sample_name, experiment.acquisition_method_id, sample.sample_dilution, sample.sample_type, sample_description.sample_replicate, sample_description.sample_desc, sample_description.sample_name_abbreviation).order_by( experiment.id.asc(), sample.sample_dilution.desc(), sample_description.sample_name_abbreviation.asc(), #sample.sample_name.asc(), sample_description.sample_replicate.asc(), sample_description.sample_desc.desc()).all(); #.order_by( # experiment.id.asc(), # sample.sample_dilution.desc(), # sample_description.sample_replicate.asc(), # sample_description.sample_desc.desc(), # sample.sample_name.asc()).all(); data_O = []; if data: for d in data: data_tmp = {}; data_tmp['id']=d.id; data_tmp['sample_name']=d.sample_name; data_tmp['sample_type']=d.sample_type; data_tmp['acquisition_method_id']=d.acquisition_method_id; data_tmp['sample_dilution']=d.sample_dilution; data_tmp['sample_replicate']=d.sample_replicate; data_O.append(data_tmp); else: print('no data found for experiment ' + experiment_id_I + ' and sample_type' + sample_type_I); return data_O; except SQLAlchemyError as e: print(e); def get_batchFileInfo_experimentIDAndExpType(self,experiment_id_I,sample_type_I,exp_type_I): '''Query data from experiment and sample for batch file''' try: data = self.session.query(experiment.id, sample.sample_name, experiment.acquisition_method_id, sample.sample_dilution, sample.sample_type, sample_description.sample_replicate, sample_description.sample_desc, sample_description.sample_name_abbreviation).filter( experiment.id.like(experiment_id_I), experiment.exp_type_id==exp_type_I, experiment.sample_name.like(sample.sample_name), sample.sample_type.like(sample_type_I), sample.sample_id.like(sample_description.sample_id)).group_by( experiment.id, sample.sample_name, experiment.acquisition_method_id, sample.sample_dilution, sample.sample_type, sample_description.sample_replicate, sample_description.sample_desc, sample_description.sample_name_abbreviation).order_by( experiment.id.asc(), sample.sample_dilution.desc(), sample_description.sample_name_abbreviation.asc(), #sample.sample_name.asc(), sample_description.sample_replicate.asc(), sample_description.sample_desc.desc()).all(); #.order_by( # experiment.id.asc(), # sample.sample_dilution.desc(), # sample_description.sample_replicate.asc(), # sample_description.sample_desc.desc(), # sample.sample_name.asc()).all(); data_O = []; if data: for d in data: data_tmp = {}; data_tmp['id']=d.id; data_tmp['sample_name']=d.sample_name; data_tmp['sample_type']=d.sample_type; data_tmp['acquisition_method_id']=d.acquisition_method_id; data_tmp['sample_dilution']=d.sample_dilution; data_tmp['sample_replicate']=d.sample_replicate; data_O.append(data_tmp); else: print('no data found for experiment ' + experiment_id_I + ' and sample_type' + sample_type_I); return data_O; except SQLAlchemyError as e: print(e); def delete_sample_experimentIDAndSampleID_experiment(self,dataListDelete_I): '''Delete specific samples from an experiment by their sample ID from experiment''' deletes = []; for d in dataListDelete_I: try: delete = self.session.query(experiment).filter( experiment.id.like(d['experiment_id']), sample.sample_id.like(d['sample_id']), experiment.sample_name.like(sample.sample_name)).delete( synchronize_session=False); if delete == 0: print('row not found') print(d); deletes.append(delete); except SQLAlchemyError as e: print(e); self.session.commit(); def delete_sample_experimentIDAndSampleID_sample(self,dataListDelete_I): '''Delete specific samples from an experiment by their sample ID from sample''' deletes = []; for d in dataListDelete_I: try: delete = self.session.query(sample).filter( experiment.id.like(d['experiment_id']), sample.sample_id.like(d['sample_id']), experiment.sample_name.like(sample.sample_name)).delete( synchronize_session=False); if delete == 0: print('row not found') print(d); deletes.append(delete); except SQLAlchemyError as e: print(e); self.session.commit(); def delete_sample_experimentIDAndSampleID_sampleDescription(self,dataListDelete_I): '''Delete specific samples from an experiment by their sample ID from sample_description''' deletes = []; for d in dataListDelete_I: try: delete = self.session.query(sample_description).filter( experiment.id.like(d['experiment_id']), sample.sample_id.like(d['sample_id']), experiment.sample_name.like(sample.sample_name), sample_description.sample_id.like(sample.sample_id)).delete( synchronize_session=False); if delete == 0: print('row not found') print(d); deletes.append(delete); except SQLAlchemyError as e: print(e); self.session.commit(); def delete_sample_experimentIDAndSampleID_sampleStorage(self,dataListDelete_I): '''Delete specific samples from an experiment by their sample ID from sample_storage''' deletes = []; for d in dataListDelete_I: try: delete = self.session.query(sample_storage).filter( experiment.id.like(d['experiment_id']), sample.sample_id.like(d['sample_id']), experiment.sample_name.like(sample.sample_name), sample_storage.sample_id.like(sample.sample_id)).delete( synchronize_session=False); if delete == 0: print('row not found') print(d); deletes.append(delete); except SQLAlchemyError as e: print(e); self.session.commit(); def delete_sample_experimentIDAndSampleID_samplePhysiologicalParameters(self,dataListDelete_I): '''Delete specific samples from an experiment by their sample ID from sample_physiologicalparameters''' deletes = []; for d in dataListDelete_I: try: delete = self.session.query(sample_physiologicalParameters).filter( experiment.id.like(d['experiment_id']), sample.sample_id.like(d['sample_id']), experiment.sample_name.like(sample.sample_name), sample_physiologicalParameters.sample_id.like(sample.sample_id)).delete( synchronize_session=False); if delete == 0: print('row not found') print(d); deletes.append(delete); except SQLAlchemyError as e: print(e); self.session.commit(); def get_calibratorIDAndLevel_sampleNameAndSampleType_sample(self,sample_name_I,sample_type_I): '''Querry calibrator id and level from metabolomics sample''' try: calibratorInfo = self.session.query(sample.calibrator_id, sample.calibrator_level).filter( sample.sample_name.like(sample_name_I), sample.sample_type.like(sample_type_I)).all(); id_O = None; level_O = None; if calibratorInfo: id_O = calibratorInfo[0][0]; level_O = calibratorInfo[0][1]; else: print('no calibrator id nor level found for sample_name/sample_type ' + sample_name_I + ' / ' + sample_type_I); return id_O, level_O except SQLAlchemyError as e: print(e); def get_calibratorConcentrationAndUnit_metIDAndCalibratorIDAndLevel_calibratorConcentrations(self, met_id_I, calibrator_id_I, calibrator_level_I): '''Querry calibrator id and level from metabolomics sample''' concentration_O = 0.0; unit_O = None; # 1. query the calibrator id for the metabolite try: calibratorID = self.session.query( calibrator2mix.calibrator_id).filter( mix2met_id.met_id.like(met_id_I), mix2met_id.mix_id.like(calibrator2mix.mix_id)).all(); calibrator_id_O = None; if calibratorID: calibrator_id_O = calibratorID[0][0]; else: print('no calibrator ID nor unit found for met_id ' + met_id_I); except SQLAlchemyError as e: print(e); # 2. check if the calibrator id matches if calibrator_id_O == calibrator_id_I: # 3. query the concentration and units try: calibratorInfo = self.session.query( calibrator_concentrations.calibrator_concentration, calibrator_concentrations.concentration_units).filter( calibrator_concentrations.met_id.like(met_id_I), calibrator_concentrations.calibrator_level == calibrator_level_I).all(); if calibratorInfo: concentration_O = calibratorInfo[0][0]; unit_O = calibratorInfo[0][1]; else: print('no calibrator concentration nor unit found for met_id/calibrator_id/calibrator_level ' + met_id_I + ' / ' + str(calibrator_id_I) + ' / ' + str(calibrator_level_I)); return concentration_O, unit_O except SQLAlchemyError as e: print(e); else: return concentration_O, unit_O def get_acqusitionMethod(self,lc_method_I,ms_mode_I,ms_methodtype_I): '''Querry acqusition method (i.e., join tables lc_elution and ms_components)''' try: mscomponents = self.session.query(MS_components.component_name, MS_components.met_id, MS_components.met_name, MS_components.q1_mass, MS_components.q3_mass, MS_components.dp, MS_components.ep, MS_components.ce, MS_components.cxp, MS_components.precursor_formula, MS_components.product_formula, MS_components.quantifier, MS_components.ms_group, MS_components.threshold, MS_components.dwell_weight, lc_elution.rt, lc_elution.ms_window, lc_elution.rt_units, lc_elution.window_units).filter( lc_elution.lc_method_id.like(lc_method_I), MS_components.ms_mode.like(ms_mode_I), MS_components.ms_methodtype.like(ms_methodtype_I), MS_components.met_id.like(lc_elution.met_id), MS_components.ms_include).group_by( # query only components that are included in the method MS_components.component_name, MS_components.met_id, MS_components.met_name, MS_components.q1_mass, MS_components.q3_mass, MS_components.dp, MS_components.ep, MS_components.ce, MS_components.cxp, MS_components.precursor_formula, MS_components.product_formula, MS_components.quantifier, MS_components.ms_group, MS_components.threshold, MS_components.dwell_weight, lc_elution.rt, lc_elution.ms_window, lc_elution.rt_units, lc_elution.window_units).order_by( lc_elution.rt.asc(), MS_components.component_name.asc()).all(); mscomponents_O = []; if not mscomponents: print('bad query for row in ms_components: ') print('lc_method_I: ' + lc_method_I + ', ms_mode_I: ' + ms_mode_I + ', ms_methodtype_I: ' + ms_methodtype_I); exit(-1) for msc in mscomponents: mscomponents_1 = {}; mscomponents_1["q1_mass"] = msc.q1_mass; mscomponents_1["q3_mass"] = msc.q3_mass; mscomponents_1["met_name"] = msc.met_name; mscomponents_1["dp"] = msc.dp; mscomponents_1["ep"] = msc.ep; mscomponents_1["ce"] = msc.ce; mscomponents_1["cxp"] = msc.cxp; mscomponents_1["quantifier"] = msc.quantifier; mscomponents_1["met_id"] = msc.met_id; mscomponents_1["ms_group"] = msc.ms_group; mscomponents_1["threshold"] = msc.threshold; mscomponents_1["dwell_weight"] = msc.dwell_weight; mscomponents_1["component_name"] = msc.component_name; mscomponents_1["rt"] = msc.rt; mscomponents_1["ms_window"] = msc.ms_window; mscomponents_1["rt_units"] = msc.rt_units; mscomponents_1["window_units"] = msc.window_units; mscomponents_O.append(mscomponents_1); return mscomponents_O; except SQLAlchemyError as e: print(e); def delete_sample_experimentID_experiment(self,dataListDelete_I): '''Delete samples from an experiment from experiment''' deletes = []; for d in dataListDelete_I: try: delete = self.session.query(experiment).filter( experiment.id.like(d['experiment_id'])).delete( synchronize_session=False); if delete == 0: print('row not found') print(d); deletes.append(delete); except SQLAlchemyError as e: print(e); self.session.commit(); def delete_sample_experimentID_sample(self,dataListDelete_I): '''Delete an experiment from sample''' deletes = []; for d in dataListDelete_I: try: delete = self.session.query(sample).filter( experiment.id.like(d['experiment_id']), experiment.sample_name.like(sample.sample_name)).delete( synchronize_session=False); if delete == 0: print('row not found') print(d); deletes.append(delete); except SQLAlchemyError as e: print(e); self.session.commit(); def get_nMaxBioReps_experimentIDAndSampleName_sampleDescription(self,experiment_id_I,sample_name_I): '''Query the maximum number of biological replicates corresponding to a given experiment''' try: bioReps = self.session.query(sample_description.sample_replicate).filter( experiment.id.like(experiment_id_I), experiment.sample_name.like(sample_name_I), experiment.sample_name.like(sample.sample_name), sample.sample_id.like(sample_description.sample_id), sample_description.istechnical != True).group_by( sample_description.sample_replicate).order_by( sample_description.sample_replicate.desc()).all(); maxBioReps_O = 0; if bioReps: maxBioReps_O = max(bioReps[0]); else: print('no biological replicates found for experiment ' + experiment_id_I); exit(-1); return maxBioReps_O; except SQLAlchemyError as e: print(e); def get_nMaxBioReps_experimentIDAndSampleID_sampleDescription(self,experiment_id_I,sample_id_I): '''Query the maximum number of biological replicates corresponding to a given experiment''' try: bioReps = self.session.query(sample_description.sample_replicate).filter( experiment.id.like(experiment_id_I), experiment.sample_name.like(sample.sample_name), sample.sample_id.like(sample_id_I), sample.sample_id.like(sample_description.sample_id), sample_description.sample_desc.like('Broth'), sample_description.istechnical != True).group_by( sample_description.sample_replicate).order_by( sample_description.sample_replicate.desc()).all(); maxBioReps_O = 0; if bioReps: maxBioReps_O = max(bioReps[0]); else: print('no biological replicates found for experiment ' + experiment_id_I); exit(-1); return maxBioReps_O; except SQLAlchemyError as e: print(e); def get_nMaxBioReps_experimentIDAndSampleNameAbbreviation_sampleDescription(self,experiment_id_I,sample_name_abbreviation_I): '''Query the maximum number of biological replicates corresponding to a given experiment''' try: bioReps = self.session.query(sample_description.sample_replicate).filter( experiment.id.like(experiment_id_I), experiment.sample_name.like(sample.sample_name), sample.sample_id.like(sample_description.sample_id), sample_description.sample_name_abbreviation.like(sample_name_abbreviation_I), sample_description.sample_desc.like('Broth') #sample_description.istechnical != True ).group_by( sample_description.sample_replicate).order_by( sample_description.sample_replicate.desc()).all(); maxBioReps_O = 0; if bioReps: maxBioReps_O = max(bioReps[0]); else: print('no biological replicates found for experiment ' + experiment_id_I); exit(-1); return maxBioReps_O; except SQLAlchemyError as e: print(e); def get_nMaxBioReps_experimentIDAndSampleNameAbbreviationAndExpType_sampleDescription(self,experiment_id_I,sample_name_abbreviation_I,exp_type_I): '''Query the maximum number of biological replicates corresponding to a given experiment''' try: bioReps = self.session.query(sample_description.sample_replicate).filter( experiment.id.like(experiment_id_I), experiment.exp_type_id==exp_type_I, experiment.sample_name.like(sample.sample_name), sample.sample_id.like(sample_description.sample_id), sample_description.sample_name_abbreviation.like(sample_name_abbreviation_I), sample_description.sample_desc.like('Broth') #sample_description.istechnical != True ).group_by( sample_description.sample_replicate).order_by( sample_description.sample_replicate.desc()).all(); maxBioReps_O = 0; if bioReps: maxBioReps_O = max(bioReps[0]); else: print('no biological replicates found for experiment ' + experiment_id_I); exit(-1); return maxBioReps_O; except SQLAlchemyError as e: print(e); def get_sampleIDs_experimentID_experiment(self,experiment_id_I): '''Querry sample IDs that are used from the experiment''' try: sample_names = self.session.query(sample.sample_id).filter( experiment.id.like(experiment_id_I), experiment.sample_name.like(sample.sample_name)).group_by( sample.sample_id).order_by( sample.sample_id.asc()).all(); sample_names_O = []; for sn in sample_names: sample_names_O.append(sn.sample_id); return sample_names_O; except SQLAlchemyError as e: print(e); def get_sampleNameAbbreviation_experimentIDAndSampleID(self,experiment_id_I,sample_id_I): '''Querry sample name abbreviation from the experiment''' try: sample_name_abbreviations = self.session.query(sample_description.sample_name_abbreviation).filter( sample.sample_id.like(sample_id_I), experiment.id.like(experiment_id_I), experiment.sample_name.like(sample.sample_name), sample.sample_id.like(sample_description.sample_id)).group_by( sample_description.sample_name_abbreviation).order_by( sample_description.sample_name_abbreviation.asc()).all(); sample_name_abbreviations_O = None; sample_name_abbreviations_O = sample_name_abbreviations[0][0]; return sample_name_abbreviations_O; except SQLAlchemyError as e: print(e); def get_sampleNameAbbreviation_experimentIDAndSampleName(self,experiment_id_I,sample_name_I): '''Querry sample name abbreviation from the experiment''' try: sample_name_abbreviations = self.session.query(sample_description.sample_name_abbreviation).filter( sample.sample_name.like(sample_name_I), experiment.id.like(experiment_id_I), experiment.sample_name.like(sample.sample_name), sample.sample_id.like(sample_description.sample_id)).group_by( sample_description.sample_name_abbreviation).order_by( sample_description.sample_name_abbreviation.asc()).all(); sample_name_abbreviations_O = None; sample_name_abbreviations_O = sample_name_abbreviations[0][0]; return sample_name_abbreviations_O; except SQLAlchemyError as e: print(e); def get_sampleLabelAndBoxAndPos_experimentIDAndExperimentTypeID_sampleStorage(self,experiment_id_I,exp_type_id_I): '''Querry sample name abbreviation from the experiment''' try: data = self.session.query(sample_storage.sample_id, sample_storage.sample_label, sample_storage.box, sample_storage.pos).filter( experiment.exp_type_id == exp_type_id_I, experiment.id.like(experiment_id_I), experiment.sample_name.like(sample.sample_name), sample.sample_id.like(sample_storage.sample_id)).group_by( sample_storage.sample_id, sample_storage.sample_label, sample_storage.box, sample_storage.pos).order_by( sample_storage.sample_id.asc()).all(); sampleStorage_O = []; if data: for d in data: sampleStorage_O.append({'sample_id':d.sample_id, 'sample_label':d.sample_label, 'box':d.box, 'pos':d.pos}); return sampleStorage_O; except SQLAlchemyError as e: print(e);
52.580132
191
0.58018
from sbaas.analysis.analysis_base import * class stage00_query(base_analysis): def get_structureFile_standards(self,met_id_I): try: structure = self.session.query(standards.structure_file, standards.structure_file_extention).filter( standards.met_id.like(met_id_I)).all(); struct_file_O = ''; struct_file_ext_O = ''; if structure: struct_file_O = structure[0][0]; struct_file_ext_O = structure[0][1]; else: print('no structure file found for ' + met_id_I); exit(-1); return struct_file_O, struct_file_ext_O except SQLAlchemyError as e: print(e); def get_exactMassAndFormula_standards(self,met_id_I): try: massformula = self.session.query(standards.exactmass, standards.formula).filter( standards.met_id.like(met_id_I)).all(); mass_O = ''; formula_O = ''; if massformula: mass_O = massformula[0][0]; formula_O = massformula[0][1]; else: print('no mass and formula found for ' + met_id_I); exit(-1); return mass_O, formula_O except SQLAlchemyError as e: print(e); def get_Q1AndQ3MassAndMode_MSComponents(self,met_id_I): try: mscomponents = self.session.query(MS_components.q1_mass, MS_components.q3_mass, MS_components.ms_mode).filter( MS_components.met_id.like(met_id_I)).order_by( MS_components.ms_mode.asc(), MS_components.q1_mass.asc(), MS_components.q3_mass.asc()).all(); mscomponents_O = []; for msc in mscomponents: mscomponents_1 = {}; mscomponents_1['met_id'] = met_id_I; mscomponents_1['q1_mass'] = msc.q1_mass; mscomponents_1['q3_mass'] = msc.q3_mass; mscomponents_1['ms_mode'] = msc.ms_mode; mscomponents_O.append(mscomponents_1); return mscomponents_O; except SQLAlchemyError as e: print(e); def get_row_MSComponents(self,met_id_I,ms_mode_I,ms_methodtype_I): try: mscomponents = self.session.query(MS_components.q1_mass,MS_components.q3_mass, MS_components.ms3_mass,MS_components.met_name,MS_components.dp, MS_components.ep,MS_components.ce,MS_components.cxp,MS_components.af, MS_components.quantifier,MS_components.ms_mode,MS_components.ion_intensity_rank, MS_components.ion_abundance,MS_components.precursor_formula, MS_components.product_ion_reference,MS_components.product_formula, MS_components.production_ion_notes,MS_components.met_id, MS_components.external_reference,MS_components.q1_mass_units, MS_components.q3_mass_units,MS_components.ms3_mass_units, MS_components.threshold_units,MS_components.dp_units, MS_components.ep_units,MS_components.ce_units, MS_components.cxp_units,MS_components.af_units, MS_components.ms_group,MS_components.threshold, MS_components.dwell_weight,MS_components.component_name, MS_components.ms_include,MS_components.ms_is,MS_components.precursor_fragment, MS_components.product_fragment,MS_components.precursor_exactmass, MS_components.product_exactmass,MS_components.ms_methodtype).filter( MS_components.met_id.like(met_id_I), MS_components.ms_mode.like(ms_mode_I), MS_components.ms_methodtype.like(ms_methodtype_I)).all(); mscomponents_O = []; if not mscomponents: print('bad query for row in ms_components: ') print('met_id: ' + met_id_I + ', ms_mode_I: ' + ms_mode_I + ', ms_methodtype_I: ' + ms_methodtype_I); exit(-1) for msc in mscomponents: mscomponents_1 = {}; mscomponents_1["q1_mass"] = msc.q1_mass; mscomponents_1["q3_mass"] = msc.q3_mass; mscomponents_1["ms3_mass"] = msc.ms3_mass; mscomponents_1["met_name"] = msc.met_name; mscomponents_1["dp"] = msc.dp; mscomponents_1["ep"] = msc.ep; mscomponents_1["ce"] = msc.ce; mscomponents_1["cxp"] = msc.cxp; mscomponents_1["af"] = msc.af; mscomponents_1["quantifier"] = msc.quantifier; mscomponents_1["ms_mode"] = msc.ms_mode; mscomponents_1["ion_intensity_rank"] = msc.ion_intensity_rank; mscomponents_1["ion_abundance"] = msc.ion_abundance; mscomponents_1["precursor_formula"] = msc.precursor_formula; mscomponents_1["product_ion_reference"] = msc.product_ion_reference; mscomponents_1["product_formula"] = msc.product_formula; mscomponents_1["production_ion_notes"] = msc.production_ion_notes; mscomponents_1["met_id"] = msc.met_id; mscomponents_1["external_reference"] = msc.external_reference; mscomponents_1["q1_mass_units"] = msc.q1_mass_units; mscomponents_1["q3_mass_units"] = msc.q3_mass_units; mscomponents_1["ms3_mass_units"] = msc.ms3_mass_units; mscomponents_1["threshold_units"] = msc.threshold_units; mscomponents_1["dp_units"] = msc.dp_units; mscomponents_1["ep_units"] = msc.ep_units; mscomponents_1["ce_units"] = msc.ce_units; mscomponents_1["cxp_units"] = msc.cxp_units; mscomponents_1["af_units"] = msc.af_units; mscomponents_1["ms_group"] = msc.ms_group; mscomponents_1["threshold"] = msc.threshold; mscomponents_1["dwell_weight"] = msc.dwell_weight; mscomponents_1["component_name"] = msc.component_name; mscomponents_1["ms_include"] = msc.ms_include; mscomponents_1["ms_is"] = msc.ms_is; mscomponents_1["precursor_fragment"] = msc.precursor_fragment; mscomponents_1["product_fragment"] = msc.product_fragment; mscomponents_1["precursor_exactmass"] = msc.precursor_exactmass; mscomponents_1["product_exactmass"] = msc.product_exactmass; mscomponents_1["ms_methodtype"] = msc.ms_methodtype; mscomponents_O.append(mscomponents_1); return mscomponents_O; except SQLAlchemyError as e: print(e); def get_row_MSComponents_metIDAndFormula(self,met_id_I,precursor_formula_I, product_formula_I,ms_methodtype_I): try: mscomponents = self.session.query(MS_components.q1_mass,MS_components.q3_mass, MS_components.ms3_mass,MS_components.met_name,MS_components.dp, MS_components.ep,MS_components.ce,MS_components.cxp,MS_components.af, MS_components.quantifier,MS_components.ms_mode,MS_components.ion_intensity_rank, MS_components.ion_abundance,MS_components.precursor_formula, MS_components.product_ion_reference,MS_components.product_formula, MS_components.production_ion_notes,MS_components.met_id, MS_components.external_reference,MS_components.q1_mass_units, MS_components.q3_mass_units,MS_components.ms3_mass_units, MS_components.threshold_units,MS_components.dp_units, MS_components.ep_units,MS_components.ce_units, MS_components.cxp_units,MS_components.af_units, MS_components.ms_group,MS_components.threshold, MS_components.dwell_weight,MS_components.component_name, MS_components.ms_include,MS_components.ms_is,MS_components.precursor_fragment, MS_components.product_fragment,MS_components.precursor_exactmass, MS_components.product_exactmass,MS_components.ms_methodtype).filter( MS_components.met_id.like(met_id_I), MS_components.precursor_formula.like(precursor_formula_I), MS_components.product_formula.like(product_formula_I), MS_components.ms_methodtype.like(ms_methodtype_I)).all(); mscomponents_O = []; if not mscomponents: print('bad query for row in ms_components: ') print('met_id: ' + met_id_I + ', precursor_formula_I: ' + precursor_formula_I + ', product_formula_I: ' + product_formula_I + ', ms_methodtype_I: ' + ms_methodtype_I); exit(-1) for msc in mscomponents: mscomponents_1 = {}; mscomponents_1["q1_mass"] = msc.q1_mass; mscomponents_1["q3_mass"] = msc.q3_mass; mscomponents_1["ms3_mass"] = msc.ms3_mass; mscomponents_1["met_name"] = msc.met_name; mscomponents_1["dp"] = msc.dp; mscomponents_1["ep"] = msc.ep; mscomponents_1["ce"] = msc.ce; mscomponents_1["cxp"] = msc.cxp; mscomponents_1["af"] = msc.af; mscomponents_1["quantifier"] = msc.quantifier; mscomponents_1["ms_mode"] = msc.ms_mode; mscomponents_1["ion_intensity_rank"] = msc.ion_intensity_rank; mscomponents_1["ion_abundance"] = msc.ion_abundance; mscomponents_1["precursor_formula"] = msc.precursor_formula; mscomponents_1["product_ion_reference"] = msc.product_ion_reference; mscomponents_1["product_formula"] = msc.product_formula; mscomponents_1["production_ion_notes"] = msc.production_ion_notes; mscomponents_1["met_id"] = msc.met_id; mscomponents_1["external_reference"] = msc.external_reference; mscomponents_1["q1_mass_units"] = msc.q1_mass_units; mscomponents_1["q3_mass_units"] = msc.q3_mass_units; mscomponents_1["ms3_mass_units"] = msc.ms3_mass_units; mscomponents_1["threshold_units"] = msc.threshold_units; mscomponents_1["dp_units"] = msc.dp_units; mscomponents_1["ep_units"] = msc.ep_units; mscomponents_1["ce_units"] = msc.ce_units; mscomponents_1["cxp_units"] = msc.cxp_units; mscomponents_1["af_units"] = msc.af_units; mscomponents_1["ms_group"] = msc.ms_group; mscomponents_1["threshold"] = msc.threshold; mscomponents_1["dwell_weight"] = msc.dwell_weight; mscomponents_1["component_name"] = msc.component_name; mscomponents_1["ms_include"] = msc.ms_include; mscomponents_1["ms_is"] = msc.ms_is; mscomponents_1["precursor_fragment"] = msc.precursor_fragment; mscomponents_1["product_fragment"] = msc.product_fragment; mscomponents_1["precursor_exactmass"] = msc.precursor_exactmass; mscomponents_1["product_exactmass"] = msc.product_exactmass; mscomponents_1["ms_methodtype"] = msc.ms_methodtype; mscomponents_O.append(mscomponents_1); return mscomponents_O[0]; except SQLAlchemyError as e: print(e); def get_nMaxBioReps_sampleDescription(self,experiment_id_I): try: bioReps = self.session.query(sample_description.sample_replicate).filter( experiment.id.like(experiment_id_I), experiment.sample_name.like(sample.sample_name), sample.sample_id.like(sample_description.sample_id), sample_description.istechnical != True).group_by( sample_description.sample_replicate).order_by( sample_description.sample_replicate.desc()).all(); maxBioReps_O = 0; if bioReps: maxBioReps_O = max(bioReps[0]); else: print('no biological replicates found for experiment ' + experiment_id_I); exit(-1); return maxBioReps_O; except SQLAlchemyError as e: print(e); def get_batchFileInfo_experimentID(self,experiment_id_I,sample_type_I): try: data = self.session.query(experiment.id, sample.sample_name, experiment.acquisition_method_id, sample.sample_dilution, sample.sample_type, sample_description.sample_replicate, sample_description.sample_desc, sample_description.sample_name_abbreviation).filter( experiment.id.like(experiment_id_I), experiment.sample_name.like(sample.sample_name), sample.sample_type.like(sample_type_I), sample.sample_id.like(sample_description.sample_id)).group_by( experiment.id, sample.sample_name, experiment.acquisition_method_id, sample.sample_dilution, sample.sample_type, sample_description.sample_replicate, sample_description.sample_desc, sample_description.sample_name_abbreviation).order_by( experiment.id.asc(), sample.sample_dilution.desc(), sample_description.sample_name_abbreviation.asc(), sample_description.sample_replicate.asc(), sample_description.sample_desc.desc()).all(); data_O = []; if data: for d in data: data_tmp = {}; data_tmp['id']=d.id; data_tmp['sample_name']=d.sample_name; data_tmp['sample_type']=d.sample_type; data_tmp['acquisition_method_id']=d.acquisition_method_id; data_tmp['sample_dilution']=d.sample_dilution; data_tmp['sample_replicate']=d.sample_replicate; data_O.append(data_tmp); else: print('no data found for experiment ' + experiment_id_I + ' and sample_type' + sample_type_I); return data_O; except SQLAlchemyError as e: print(e); def get_batchFileInfo_experimentIDAndExpType(self,experiment_id_I,sample_type_I,exp_type_I): try: data = self.session.query(experiment.id, sample.sample_name, experiment.acquisition_method_id, sample.sample_dilution, sample.sample_type, sample_description.sample_replicate, sample_description.sample_desc, sample_description.sample_name_abbreviation).filter( experiment.id.like(experiment_id_I), experiment.exp_type_id==exp_type_I, experiment.sample_name.like(sample.sample_name), sample.sample_type.like(sample_type_I), sample.sample_id.like(sample_description.sample_id)).group_by( experiment.id, sample.sample_name, experiment.acquisition_method_id, sample.sample_dilution, sample.sample_type, sample_description.sample_replicate, sample_description.sample_desc, sample_description.sample_name_abbreviation).order_by( experiment.id.asc(), sample.sample_dilution.desc(), sample_description.sample_name_abbreviation.asc(), sample_description.sample_replicate.asc(), sample_description.sample_desc.desc()).all(); data_O = []; if data: for d in data: data_tmp = {}; data_tmp['id']=d.id; data_tmp['sample_name']=d.sample_name; data_tmp['sample_type']=d.sample_type; data_tmp['acquisition_method_id']=d.acquisition_method_id; data_tmp['sample_dilution']=d.sample_dilution; data_tmp['sample_replicate']=d.sample_replicate; data_O.append(data_tmp); else: print('no data found for experiment ' + experiment_id_I + ' and sample_type' + sample_type_I); return data_O; except SQLAlchemyError as e: print(e); def delete_sample_experimentIDAndSampleID_experiment(self,dataListDelete_I): deletes = []; for d in dataListDelete_I: try: delete = self.session.query(experiment).filter( experiment.id.like(d['experiment_id']), sample.sample_id.like(d['sample_id']), experiment.sample_name.like(sample.sample_name)).delete( synchronize_session=False); if delete == 0: print('row not found') print(d); deletes.append(delete); except SQLAlchemyError as e: print(e); self.session.commit(); def delete_sample_experimentIDAndSampleID_sample(self,dataListDelete_I): deletes = []; for d in dataListDelete_I: try: delete = self.session.query(sample).filter( experiment.id.like(d['experiment_id']), sample.sample_id.like(d['sample_id']), experiment.sample_name.like(sample.sample_name)).delete( synchronize_session=False); if delete == 0: print('row not found') print(d); deletes.append(delete); except SQLAlchemyError as e: print(e); self.session.commit(); def delete_sample_experimentIDAndSampleID_sampleDescription(self,dataListDelete_I): deletes = []; for d in dataListDelete_I: try: delete = self.session.query(sample_description).filter( experiment.id.like(d['experiment_id']), sample.sample_id.like(d['sample_id']), experiment.sample_name.like(sample.sample_name), sample_description.sample_id.like(sample.sample_id)).delete( synchronize_session=False); if delete == 0: print('row not found') print(d); deletes.append(delete); except SQLAlchemyError as e: print(e); self.session.commit(); def delete_sample_experimentIDAndSampleID_sampleStorage(self,dataListDelete_I): deletes = []; for d in dataListDelete_I: try: delete = self.session.query(sample_storage).filter( experiment.id.like(d['experiment_id']), sample.sample_id.like(d['sample_id']), experiment.sample_name.like(sample.sample_name), sample_storage.sample_id.like(sample.sample_id)).delete( synchronize_session=False); if delete == 0: print('row not found') print(d); deletes.append(delete); except SQLAlchemyError as e: print(e); self.session.commit(); def delete_sample_experimentIDAndSampleID_samplePhysiologicalParameters(self,dataListDelete_I): deletes = []; for d in dataListDelete_I: try: delete = self.session.query(sample_physiologicalParameters).filter( experiment.id.like(d['experiment_id']), sample.sample_id.like(d['sample_id']), experiment.sample_name.like(sample.sample_name), sample_physiologicalParameters.sample_id.like(sample.sample_id)).delete( synchronize_session=False); if delete == 0: print('row not found') print(d); deletes.append(delete); except SQLAlchemyError as e: print(e); self.session.commit(); def get_calibratorIDAndLevel_sampleNameAndSampleType_sample(self,sample_name_I,sample_type_I): try: calibratorInfo = self.session.query(sample.calibrator_id, sample.calibrator_level).filter( sample.sample_name.like(sample_name_I), sample.sample_type.like(sample_type_I)).all(); id_O = None; level_O = None; if calibratorInfo: id_O = calibratorInfo[0][0]; level_O = calibratorInfo[0][1]; else: print('no calibrator id nor level found for sample_name/sample_type ' + sample_name_I + ' / ' + sample_type_I); return id_O, level_O except SQLAlchemyError as e: print(e); def get_calibratorConcentrationAndUnit_metIDAndCalibratorIDAndLevel_calibratorConcentrations(self, met_id_I, calibrator_id_I, calibrator_level_I): concentration_O = 0.0; unit_O = None; try: calibratorID = self.session.query( calibrator2mix.calibrator_id).filter( mix2met_id.met_id.like(met_id_I), mix2met_id.mix_id.like(calibrator2mix.mix_id)).all(); calibrator_id_O = None; if calibratorID: calibrator_id_O = calibratorID[0][0]; else: print('no calibrator ID nor unit found for met_id ' + met_id_I); except SQLAlchemyError as e: print(e); if calibrator_id_O == calibrator_id_I: try: calibratorInfo = self.session.query( calibrator_concentrations.calibrator_concentration, calibrator_concentrations.concentration_units).filter( calibrator_concentrations.met_id.like(met_id_I), calibrator_concentrations.calibrator_level == calibrator_level_I).all(); if calibratorInfo: concentration_O = calibratorInfo[0][0]; unit_O = calibratorInfo[0][1]; else: print('no calibrator concentration nor unit found for met_id/calibrator_id/calibrator_level ' + met_id_I + ' / ' + str(calibrator_id_I) + ' / ' + str(calibrator_level_I)); return concentration_O, unit_O except SQLAlchemyError as e: print(e); else: return concentration_O, unit_O def get_acqusitionMethod(self,lc_method_I,ms_mode_I,ms_methodtype_I): try: mscomponents = self.session.query(MS_components.component_name, MS_components.met_id, MS_components.met_name, MS_components.q1_mass, MS_components.q3_mass, MS_components.dp, MS_components.ep, MS_components.ce, MS_components.cxp, MS_components.precursor_formula, MS_components.product_formula, MS_components.quantifier, MS_components.ms_group, MS_components.threshold, MS_components.dwell_weight, lc_elution.rt, lc_elution.ms_window, lc_elution.rt_units, lc_elution.window_units).filter( lc_elution.lc_method_id.like(lc_method_I), MS_components.ms_mode.like(ms_mode_I), MS_components.ms_methodtype.like(ms_methodtype_I), MS_components.met_id.like(lc_elution.met_id), MS_components.ms_include).group_by( MS_components.component_name, MS_components.met_id, MS_components.met_name, MS_components.q1_mass, MS_components.q3_mass, MS_components.dp, MS_components.ep, MS_components.ce, MS_components.cxp, MS_components.precursor_formula, MS_components.product_formula, MS_components.quantifier, MS_components.ms_group, MS_components.threshold, MS_components.dwell_weight, lc_elution.rt, lc_elution.ms_window, lc_elution.rt_units, lc_elution.window_units).order_by( lc_elution.rt.asc(), MS_components.component_name.asc()).all(); mscomponents_O = []; if not mscomponents: print('bad query for row in ms_components: ') print('lc_method_I: ' + lc_method_I + ', ms_mode_I: ' + ms_mode_I + ', ms_methodtype_I: ' + ms_methodtype_I); exit(-1) for msc in mscomponents: mscomponents_1 = {}; mscomponents_1["q1_mass"] = msc.q1_mass; mscomponents_1["q3_mass"] = msc.q3_mass; mscomponents_1["met_name"] = msc.met_name; mscomponents_1["dp"] = msc.dp; mscomponents_1["ep"] = msc.ep; mscomponents_1["ce"] = msc.ce; mscomponents_1["cxp"] = msc.cxp; mscomponents_1["quantifier"] = msc.quantifier; mscomponents_1["met_id"] = msc.met_id; mscomponents_1["ms_group"] = msc.ms_group; mscomponents_1["threshold"] = msc.threshold; mscomponents_1["dwell_weight"] = msc.dwell_weight; mscomponents_1["component_name"] = msc.component_name; mscomponents_1["rt"] = msc.rt; mscomponents_1["ms_window"] = msc.ms_window; mscomponents_1["rt_units"] = msc.rt_units; mscomponents_1["window_units"] = msc.window_units; mscomponents_O.append(mscomponents_1); return mscomponents_O; except SQLAlchemyError as e: print(e); def delete_sample_experimentID_experiment(self,dataListDelete_I): deletes = []; for d in dataListDelete_I: try: delete = self.session.query(experiment).filter( experiment.id.like(d['experiment_id'])).delete( synchronize_session=False); if delete == 0: print('row not found') print(d); deletes.append(delete); except SQLAlchemyError as e: print(e); self.session.commit(); def delete_sample_experimentID_sample(self,dataListDelete_I): deletes = []; for d in dataListDelete_I: try: delete = self.session.query(sample).filter( experiment.id.like(d['experiment_id']), experiment.sample_name.like(sample.sample_name)).delete( synchronize_session=False); if delete == 0: print('row not found') print(d); deletes.append(delete); except SQLAlchemyError as e: print(e); self.session.commit(); def get_nMaxBioReps_experimentIDAndSampleName_sampleDescription(self,experiment_id_I,sample_name_I): try: bioReps = self.session.query(sample_description.sample_replicate).filter( experiment.id.like(experiment_id_I), experiment.sample_name.like(sample_name_I), experiment.sample_name.like(sample.sample_name), sample.sample_id.like(sample_description.sample_id), sample_description.istechnical != True).group_by( sample_description.sample_replicate).order_by( sample_description.sample_replicate.desc()).all(); maxBioReps_O = 0; if bioReps: maxBioReps_O = max(bioReps[0]); else: print('no biological replicates found for experiment ' + experiment_id_I); exit(-1); return maxBioReps_O; except SQLAlchemyError as e: print(e); def get_nMaxBioReps_experimentIDAndSampleID_sampleDescription(self,experiment_id_I,sample_id_I): try: bioReps = self.session.query(sample_description.sample_replicate).filter( experiment.id.like(experiment_id_I), experiment.sample_name.like(sample.sample_name), sample.sample_id.like(sample_id_I), sample.sample_id.like(sample_description.sample_id), sample_description.sample_desc.like('Broth'), sample_description.istechnical != True).group_by( sample_description.sample_replicate).order_by( sample_description.sample_replicate.desc()).all(); maxBioReps_O = 0; if bioReps: maxBioReps_O = max(bioReps[0]); else: print('no biological replicates found for experiment ' + experiment_id_I); exit(-1); return maxBioReps_O; except SQLAlchemyError as e: print(e); def get_nMaxBioReps_experimentIDAndSampleNameAbbreviation_sampleDescription(self,experiment_id_I,sample_name_abbreviation_I): try: bioReps = self.session.query(sample_description.sample_replicate).filter( experiment.id.like(experiment_id_I), experiment.sample_name.like(sample.sample_name), sample.sample_id.like(sample_description.sample_id), sample_description.sample_name_abbreviation.like(sample_name_abbreviation_I), sample_description.sample_desc.like('Broth') ).group_by( sample_description.sample_replicate).order_by( sample_description.sample_replicate.desc()).all(); maxBioReps_O = 0; if bioReps: maxBioReps_O = max(bioReps[0]); else: print('no biological replicates found for experiment ' + experiment_id_I); exit(-1); return maxBioReps_O; except SQLAlchemyError as e: print(e); def get_nMaxBioReps_experimentIDAndSampleNameAbbreviationAndExpType_sampleDescription(self,experiment_id_I,sample_name_abbreviation_I,exp_type_I): try: bioReps = self.session.query(sample_description.sample_replicate).filter( experiment.id.like(experiment_id_I), experiment.exp_type_id==exp_type_I, experiment.sample_name.like(sample.sample_name), sample.sample_id.like(sample_description.sample_id), sample_description.sample_name_abbreviation.like(sample_name_abbreviation_I), sample_description.sample_desc.like('Broth') ).group_by( sample_description.sample_replicate).order_by( sample_description.sample_replicate.desc()).all(); maxBioReps_O = 0; if bioReps: maxBioReps_O = max(bioReps[0]); else: print('no biological replicates found for experiment ' + experiment_id_I); exit(-1); return maxBioReps_O; except SQLAlchemyError as e: print(e); def get_sampleIDs_experimentID_experiment(self,experiment_id_I): try: sample_names = self.session.query(sample.sample_id).filter( experiment.id.like(experiment_id_I), experiment.sample_name.like(sample.sample_name)).group_by( sample.sample_id).order_by( sample.sample_id.asc()).all(); sample_names_O = []; for sn in sample_names: sample_names_O.append(sn.sample_id); return sample_names_O; except SQLAlchemyError as e: print(e); def get_sampleNameAbbreviation_experimentIDAndSampleID(self,experiment_id_I,sample_id_I): try: sample_name_abbreviations = self.session.query(sample_description.sample_name_abbreviation).filter( sample.sample_id.like(sample_id_I), experiment.id.like(experiment_id_I), experiment.sample_name.like(sample.sample_name), sample.sample_id.like(sample_description.sample_id)).group_by( sample_description.sample_name_abbreviation).order_by( sample_description.sample_name_abbreviation.asc()).all(); sample_name_abbreviations_O = None; sample_name_abbreviations_O = sample_name_abbreviations[0][0]; return sample_name_abbreviations_O; except SQLAlchemyError as e: print(e); def get_sampleNameAbbreviation_experimentIDAndSampleName(self,experiment_id_I,sample_name_I): try: sample_name_abbreviations = self.session.query(sample_description.sample_name_abbreviation).filter( sample.sample_name.like(sample_name_I), experiment.id.like(experiment_id_I), experiment.sample_name.like(sample.sample_name), sample.sample_id.like(sample_description.sample_id)).group_by( sample_description.sample_name_abbreviation).order_by( sample_description.sample_name_abbreviation.asc()).all(); sample_name_abbreviations_O = None; sample_name_abbreviations_O = sample_name_abbreviations[0][0]; return sample_name_abbreviations_O; except SQLAlchemyError as e: print(e); def get_sampleLabelAndBoxAndPos_experimentIDAndExperimentTypeID_sampleStorage(self,experiment_id_I,exp_type_id_I): try: data = self.session.query(sample_storage.sample_id, sample_storage.sample_label, sample_storage.box, sample_storage.pos).filter( experiment.exp_type_id == exp_type_id_I, experiment.id.like(experiment_id_I), experiment.sample_name.like(sample.sample_name), sample.sample_id.like(sample_storage.sample_id)).group_by( sample_storage.sample_id, sample_storage.sample_label, sample_storage.box, sample_storage.pos).order_by( sample_storage.sample_id.asc()).all(); sampleStorage_O = []; if data: for d in data: sampleStorage_O.append({'sample_id':d.sample_id, 'sample_label':d.sample_label, 'box':d.box, 'pos':d.pos}); return sampleStorage_O; except SQLAlchemyError as e: print(e);
true
true
f701454a3029889fb15ae188e49b88eafbc88eec
35,148
py
Python
localstack/utils/common.py
efritz/localstack
8163592b4de16508769030017d1022dbc1d89327
[ "Apache-2.0" ]
null
null
null
localstack/utils/common.py
efritz/localstack
8163592b4de16508769030017d1022dbc1d89327
[ "Apache-2.0" ]
null
null
null
localstack/utils/common.py
efritz/localstack
8163592b4de16508769030017d1022dbc1d89327
[ "Apache-2.0" ]
null
null
null
import io import os import re import pwd import grp import sys import json import uuid import time import glob import base64 import socket import hashlib import decimal import logging import tarfile import zipfile import binascii import calendar import tempfile import threading import subprocess import six import shutil import requests import dns.resolver import functools from io import BytesIO from contextlib import closing from datetime import datetime from six import with_metaclass from six.moves import cStringIO as StringIO from six.moves.urllib.parse import urlparse from multiprocessing.dummy import Pool from localstack import config from localstack.config import DEFAULT_ENCODING from localstack.constants import ENV_DEV from localstack.utils import bootstrap from localstack.utils.bootstrap import FuncThread # arrays for temporary files and resources TMP_FILES = [] TMP_THREADS = [] TMP_PROCESSES = [] # cache clean variables CACHE_CLEAN_TIMEOUT = 60 * 5 CACHE_MAX_AGE = 60 * 60 CACHE_FILE_PATTERN = os.path.join(tempfile.gettempdir(), '_random_dir_', 'cache.*.json') last_cache_clean_time = {'time': 0} mutex_clean = threading.Semaphore(1) # misc. constants TIMESTAMP_FORMAT = '%Y-%m-%dT%H:%M:%S' TIMESTAMP_FORMAT_MILLIS = '%Y-%m-%dT%H:%M:%S.%fZ' CODEC_HANDLER_UNDERSCORE = 'underscore' # chunk size for file downloads DOWNLOAD_CHUNK_SIZE = 1024 * 1024 # set up logger LOG = logging.getLogger(__name__) # flag to indicate whether we've received and processed the stop signal INFRA_STOPPED = False # generic cache object CACHE = {} # lock for creating certificate files SSL_CERT_LOCK = threading.RLock() class CustomEncoder(json.JSONEncoder): """ Helper class to convert JSON documents with datetime, decimals, or bytes. """ def default(self, o): if isinstance(o, decimal.Decimal): if o % 1 > 0: return float(o) else: return int(o) if isinstance(o, datetime): return str(o) if isinstance(o, six.binary_type): return to_str(o) try: return super(CustomEncoder, self).default(o) except Exception: return None class ShellCommandThread(FuncThread): """ Helper class to run a shell command in a background thread. """ def __init__(self, cmd, params={}, outfile=None, env_vars={}, stdin=False, quiet=True, inherit_cwd=False, inherit_env=True): self.cmd = cmd self.process = None self.outfile = outfile or os.devnull self.stdin = stdin self.env_vars = env_vars self.inherit_cwd = inherit_cwd self.inherit_env = inherit_env FuncThread.__init__(self, self.run_cmd, params, quiet=quiet) def run_cmd(self, params): def convert_line(line): line = to_str(line or '') return '%s\r\n' % line.strip() def filter_line(line): """ Return True if this line should be filtered, i.e., not printed """ return '(Press CTRL+C to quit)' in line try: self.process = run(self.cmd, asynchronous=True, stdin=self.stdin, outfile=self.outfile, env_vars=self.env_vars, inherit_cwd=self.inherit_cwd, inherit_env=self.inherit_env) if self.outfile: if self.outfile == subprocess.PIPE: # get stdout/stderr from child process and write to parent output streams = ((self.process.stdout, sys.stdout), (self.process.stderr, sys.stderr)) for instream, outstream in streams: for line in iter(instream.readline, None): # `line` should contain a newline at the end as we're iterating, # hence we can safely break the loop if `line` is None or empty string if line in [None, '', b'']: break if not (line and line.strip()) and self.is_killed(): break line = convert_line(line) if filter_line(line): continue outstream.write(line) outstream.flush() self.process.wait() else: self.process.communicate() except Exception as e: if self.process and not self.quiet: LOG.warning('Shell command error "%s": %s' % (e, self.cmd)) if self.process and not self.quiet and self.process.returncode != 0: LOG.warning('Shell command exit code "%s": %s' % (self.process.returncode, self.cmd)) def is_killed(self): if not self.process: return True if INFRA_STOPPED: return True # Note: Do NOT import "psutil" at the root scope, as this leads # to problems when importing this file from our test Lambdas in Docker # (Error: libc.musl-x86_64.so.1: cannot open shared object file) import psutil return not psutil.pid_exists(self.process.pid) def stop(self, quiet=False): # Note: Do NOT import "psutil" at the root scope, as this leads # to problems when importing this file from our test Lambdas in Docker # (Error: libc.musl-x86_64.so.1: cannot open shared object file) import psutil if not self.process: LOG.warning("No process found for command '%s'" % self.cmd) return parent_pid = self.process.pid try: parent = psutil.Process(parent_pid) for child in parent.children(recursive=True): child.kill() parent.kill() self.process = None except Exception: if not quiet: LOG.warning('Unable to kill process with pid %s' % parent_pid) class JsonObject(object): """ Generic JSON serializable object for simplified subclassing """ def to_json(self, indent=None): return json.dumps(self, default=lambda o: ((float(o) if o % 1 > 0 else int(o)) if isinstance(o, decimal.Decimal) else o.__dict__), sort_keys=True, indent=indent) def apply_json(self, j): if isinstance(j, str): j = json.loads(j) self.__dict__.update(j) def to_dict(self): return json.loads(self.to_json()) @classmethod def from_json(cls, j): j = JsonObject.as_dict(j) result = cls() result.apply_json(j) return result @classmethod def from_json_list(cls, l): return [cls.from_json(j) for j in l] @classmethod def as_dict(cls, obj): if isinstance(obj, dict): return obj return obj.to_dict() def __str__(self): return self.to_json() def __repr__(self): return self.__str__() class CaptureOutput(object): """ A context manager that captures stdout/stderr of the current thread. Use it as follows: with CaptureOutput() as c: ... print(c.stdout(), c.stderr()) """ orig_stdout = sys.stdout orig_stderr = sys.stderr orig___stdout = sys.__stdout__ orig___stderr = sys.__stderr__ CONTEXTS_BY_THREAD = {} class LogStreamIO(io.StringIO): def write(self, s): if isinstance(s, str) and hasattr(s, 'decode'): s = s.decode('unicode-escape') return super(CaptureOutput.LogStreamIO, self).write(s) def __init__(self): self._stdout = self.LogStreamIO() self._stderr = self.LogStreamIO() def __enter__(self): # Note: import werkzeug here (not at top of file) to allow dependency pruning from werkzeug.local import LocalProxy ident = self._ident() if ident not in self.CONTEXTS_BY_THREAD: self.CONTEXTS_BY_THREAD[ident] = self self._set(LocalProxy(self._proxy(sys.stdout, 'stdout')), LocalProxy(self._proxy(sys.stderr, 'stderr')), LocalProxy(self._proxy(sys.__stdout__, 'stdout')), LocalProxy(self._proxy(sys.__stderr__, 'stderr'))) return self def __exit__(self, type, value, traceback): ident = self._ident() removed = self.CONTEXTS_BY_THREAD.pop(ident, None) if not self.CONTEXTS_BY_THREAD: # reset pointers self._set(self.orig_stdout, self.orig_stderr, self.orig___stdout, self.orig___stderr) # get value from streams removed._stdout.flush() removed._stderr.flush() out = removed._stdout.getvalue() err = removed._stderr.getvalue() # close handles removed._stdout.close() removed._stderr.close() removed._stdout = out removed._stderr = err def _set(self, out, err, __out, __err): sys.stdout, sys.stderr, sys.__stdout__, sys.__stderr__ = (out, err, __out, __err) def _proxy(self, original_stream, type): def proxy(): ident = self._ident() ctx = self.CONTEXTS_BY_THREAD.get(ident) if ctx: return ctx._stdout if type == 'stdout' else ctx._stderr return original_stream return proxy def _ident(self): return threading.currentThread().ident def stdout(self): return self._stdout.getvalue() if hasattr(self._stdout, 'getvalue') else self._stdout def stderr(self): return self._stderr.getvalue() if hasattr(self._stderr, 'getvalue') else self._stderr # ---------------- # UTILITY METHODS # ---------------- def synchronized(lock=None): """ Synchronization decorator as described in http://blog.dscpl.com.au/2014/01/the-missing-synchronized-decorator.html. """ def _decorator(wrapped): @functools.wraps(wrapped) def _wrapper(*args, **kwargs): with lock: return wrapped(*args, **kwargs) return _wrapper return _decorator def is_string(s, include_unicode=True, exclude_binary=False): if isinstance(s, six.binary_type) and exclude_binary: return False if isinstance(s, str): return True if include_unicode and isinstance(s, six.text_type): return True return False def is_string_or_bytes(s): return is_string(s) or isinstance(s, six.string_types) or isinstance(s, bytes) def is_base64(s): regex = r'^(?:[A-Za-z0-9+/]{4})*(?:[A-Za-z0-9+/]{2}==|[A-Za-z0-9+/]{3}=)?$' return is_string(s) and re.match(regex, s) def md5(string): m = hashlib.md5() m.update(to_bytes(string)) return m.hexdigest() def in_docker(): return config.in_docker() def has_docker(): try: run('docker ps') return True except Exception: return False def get_docker_container_names(): return bootstrap.get_docker_container_names() def is_port_open(port_or_url, http_path=None, expect_success=True, protocols=['tcp']): port = port_or_url host = 'localhost' protocol = 'http' protocols = protocols if isinstance(protocols, list) else [protocols] if isinstance(port, six.string_types): url = urlparse(port_or_url) port = url.port host = url.hostname protocol = url.scheme nw_protocols = [] nw_protocols += ([socket.SOCK_STREAM] if 'tcp' in protocols else []) nw_protocols += ([socket.SOCK_DGRAM] if 'udp' in protocols else []) for nw_protocol in nw_protocols: with closing(socket.socket(socket.AF_INET, nw_protocol)) as sock: sock.settimeout(1) if nw_protocol == socket.SOCK_DGRAM: try: if port == 53: dnshost = '127.0.0.1' if host == 'localhost' else host resolver = dns.resolver.Resolver() resolver.nameservers = [dnshost] resolver.timeout = 1 resolver.lifetime = 1 answers = resolver.query('google.com', 'A') assert len(answers) > 0 else: sock.sendto(bytes(), (host, port)) sock.recvfrom(1024) except Exception: return False elif nw_protocol == socket.SOCK_STREAM: result = sock.connect_ex((host, port)) if result != 0: return False if 'tcp' not in protocols or not http_path: return True url = '%s://%s:%s%s' % (protocol, host, port, http_path) try: response = safe_requests.get(url) return not expect_success or response.status_code < 400 except Exception: return False def wait_for_port_open(port, http_path=None, expect_success=True, retries=10, sleep_time=0.5): """ Ping the given network port until it becomes available (for a given number of retries). If 'http_path' is set, make a GET request to this path and assert a non-error response. """ def check(): if not is_port_open(port, http_path=http_path, expect_success=expect_success): raise Exception() return retry(check, sleep=sleep_time, retries=retries) def get_free_tcp_port(): tcp = socket.socket(socket.AF_INET, socket.SOCK_STREAM) tcp.bind(('', 0)) addr, port = tcp.getsockname() tcp.close() return port def get_service_protocol(): return 'https' if config.USE_SSL else 'http' def timestamp(time=None, format=TIMESTAMP_FORMAT): if not time: time = datetime.utcnow() if isinstance(time, six.integer_types + (float, )): time = datetime.fromtimestamp(time) return time.strftime(format) def retry(function, retries=3, sleep=1, sleep_before=0, **kwargs): raise_error = None if sleep_before > 0: time.sleep(sleep_before) for i in range(0, retries + 1): try: return function(**kwargs) except Exception as error: raise_error = error time.sleep(sleep) raise raise_error def dump_thread_info(): for t in threading.enumerate(): print(t) print(run("ps aux | grep 'node\\|java\\|python'")) def merge_recursive(source, destination): for key, value in source.items(): if isinstance(value, dict): # get node or create one node = destination.setdefault(key, {}) merge_recursive(value, node) else: if not isinstance(destination, dict): LOG.warning('Destination for merging %s=%s is not dict: %s' % (key, value, destination)) destination[key] = value return destination def merge_dicts(*dicts, **kwargs): """ Merge all dicts in `*dicts` into a single dict, and return the result. If any of the entries in `*dicts` is None, and `default` is specified as keyword argument, then return `default`. """ result = {} for d in dicts: if d is None and 'default' in kwargs: return kwargs['default'] if d: result.update(d) return result def recurse_object(obj, func, path=''): """ Recursively apply `func` to `obj` (may be a list, dict, or other object). """ obj = func(obj, path=path) if isinstance(obj, list): for i in range(len(obj)): tmp_path = '%s[%s]' % (path or '.', i) obj[i] = recurse_object(obj[i], func, tmp_path) elif isinstance(obj, dict): for k, v in obj.items(): tmp_path = '%s%s' % ((path + '.') if path else '', k) obj[k] = recurse_object(v, func, tmp_path) return obj def base64_to_hex(b64_string): return binascii.hexlify(base64.b64decode(b64_string)) def obj_to_xml(obj): """ Return an XML representation of the given object (dict, list, or primitive). Does NOT add a common root element if the given obj is a list. Does NOT work for nested dict structures. """ if isinstance(obj, list): return ''.join([obj_to_xml(o) for o in obj]) if isinstance(obj, dict): return ''.join(['<{k}>{v}</{k}>'.format(k=k, v=obj_to_xml(v)) for (k, v) in obj.items()]) return str(obj) def now_utc(): return mktime(datetime.utcnow()) def now(): return mktime(datetime.now()) def mktime(timestamp): return calendar.timegm(timestamp.timetuple()) def mkdir(folder): if not os.path.exists(folder): try: os.makedirs(folder) except OSError as err: # Ignore rare 'File exists' race conditions. if err.errno != 17: raise def ensure_readable(file_path, default_perms=None): if default_perms is None: default_perms = 0o644 try: with open(file_path, 'rb'): pass except Exception: LOG.info('Updating permissions as file is currently not readable: %s' % file_path) os.chmod(file_path, default_perms) def chown_r(path, user): """ Recursive chown """ uid = pwd.getpwnam(user).pw_uid gid = grp.getgrnam(user).gr_gid os.chown(path, uid, gid) for root, dirs, files in os.walk(path): for dirname in dirs: os.chown(os.path.join(root, dirname), uid, gid) for filename in files: os.chown(os.path.join(root, filename), uid, gid) def chmod_r(path, mode): """ Recursive chmod """ os.chmod(path, mode) for root, dirnames, filenames in os.walk(path): for dirname in dirnames: os.chmod(os.path.join(root, dirname), mode) for filename in filenames: os.chmod(os.path.join(root, filename), mode) def rm_rf(path): """ Recursively removes a file or directory """ if not path or not os.path.exists(path): return # Running the native command can be an order of magnitude faster in Alpine on Travis-CI if is_alpine(): try: return run('rm -rf "%s"' % path) except Exception: pass # Make sure all files are writeable and dirs executable to remove chmod_r(path, 0o777) # check if the file is either a normal file, or, e.g., a fifo exists_but_non_dir = os.path.exists(path) and not os.path.isdir(path) if os.path.isfile(path) or exists_but_non_dir: os.remove(path) else: shutil.rmtree(path) def cp_r(src, dst): """Recursively copies file/directory""" if os.path.isfile(src): shutil.copy(src, dst) else: shutil.copytree(src, dst) def download(url, path, verify_ssl=True): """Downloads file at url to the given path""" # make sure we're creating a new session here to # enable parallel file downloads during installation! s = requests.Session() r = s.get(url, stream=True, verify=verify_ssl) # check status code before attempting to read body if r.status_code >= 400: raise Exception('Failed to download %s, response code %s' % (url, r.status_code)) total = 0 try: if not os.path.exists(os.path.dirname(path)): os.makedirs(os.path.dirname(path)) LOG.debug('Starting download from %s to %s (%s bytes)' % (url, path, r.headers.get('content-length'))) with open(path, 'wb') as f: for chunk in r.iter_content(DOWNLOAD_CHUNK_SIZE): total += len(chunk) if chunk: # filter out keep-alive new chunks f.write(chunk) LOG.debug('Writing %s bytes (total %s) to %s' % (len(chunk), total, path)) else: LOG.debug('Empty chunk %s (total %s) from %s' % (chunk, total, url)) f.flush() os.fsync(f) if os.path.getsize(path) == 0: LOG.warning('Zero bytes downloaded from %s, retrying' % url) download(url, path, verify_ssl) return LOG.debug('Done downloading %s, response code %s, total bytes %d' % (url, r.status_code, total)) finally: LOG.debug('Cleaning up file handles for download of %s' % url) r.close() s.close() def parse_chunked_data(data): """ Parse the body of an HTTP message transmitted with chunked transfer encoding. """ data = (data or '').strip() chunks = [] while data: length = re.match(r'^([0-9a-zA-Z]+)\r\n.*', data) if not length: break length = length.group(1).lower() length = int(length, 16) data = data.partition('\r\n')[2] chunks.append(data[:length]) data = data[length:].strip() return ''.join(chunks) def is_number(s): try: float(s) # for int, long and float return True except (TypeError, ValueError): return False def is_mac_os(): return bootstrap.is_mac_os() def is_linux(): return bootstrap.is_linux() def is_alpine(): try: if '_is_alpine_' not in CACHE: CACHE['_is_alpine_'] = False if not os.path.exists('/etc/issue'): return False out = to_str(subprocess.check_output('cat /etc/issue', shell=True)) CACHE['_is_alpine_'] = 'Alpine' in out except subprocess.CalledProcessError: return False return CACHE['_is_alpine_'] def get_arch(): if is_mac_os(): return 'osx' if is_alpine(): return 'alpine' if is_linux(): return 'linux' raise Exception('Unable to determine system architecture') def short_uid(): return str(uuid.uuid4())[0:8] def json_safe(item): """ return a copy of the given object (e.g., dict) that is safe for JSON dumping """ try: return json.loads(json.dumps(item, cls=CustomEncoder)) except Exception: item = fix_json_keys(item) return json.loads(json.dumps(item, cls=CustomEncoder)) def fix_json_keys(item): """ make sure the keys of a JSON are strings (not binary type or other) """ item_copy = item if isinstance(item, list): item_copy = [] for i in item: item_copy.append(fix_json_keys(i)) if isinstance(item, dict): item_copy = {} for k, v in item.items(): item_copy[to_str(k)] = fix_json_keys(v) return item_copy def save_file(file, content, append=False): mode = 'a' if append else 'w+' if not isinstance(content, six.string_types): mode = mode + 'b' with open(file, mode) as f: f.write(content) f.flush() def load_file(file_path, default=None, mode=None): if not os.path.isfile(file_path): return default if not mode: mode = 'r' with open(file_path, mode) as f: result = f.read() return result def to_str(obj, encoding=DEFAULT_ENCODING, errors='strict'): """ If ``obj`` is an instance of ``binary_type``, return ``obj.decode(encoding, errors)``, otherwise return ``obj`` """ return obj.decode(encoding, errors) if isinstance(obj, six.binary_type) else obj def to_bytes(obj, encoding=DEFAULT_ENCODING, errors='strict'): """ If ``obj`` is an instance of ``text_type``, return ``obj.encode(encoding, errors)``, otherwise return ``obj`` """ return obj.encode(encoding, errors) if isinstance(obj, six.text_type) else obj def cleanup(files=True, env=ENV_DEV, quiet=True): if files: cleanup_tmp_files() def cleanup_threads_and_processes(quiet=True): for t in TMP_THREADS: t.stop(quiet=quiet) for p in TMP_PROCESSES: try: p.terminate() except Exception as e: print(e) # clear lists clear_list(TMP_THREADS) clear_list(TMP_PROCESSES) def clear_list(l): while len(l): del l[0] def cleanup_tmp_files(): for tmp in TMP_FILES: try: rm_rf(tmp) except Exception: pass # file likely doesn't exist, or permission denied del TMP_FILES[:] def new_tmp_file(): """ Return a path to a new temporary file. """ tmp_file, tmp_path = tempfile.mkstemp() os.close(tmp_file) TMP_FILES.append(tmp_path) return tmp_path def new_tmp_dir(): folder = new_tmp_file() rm_rf(folder) mkdir(folder) return folder def is_ip_address(addr): try: socket.inet_aton(addr) return True except socket.error: return False def is_zip_file(content): stream = BytesIO(content) return zipfile.is_zipfile(stream) def unzip(path, target_dir, overwrite=True): if is_alpine(): # Running the native command can be an order of magnitude faster in Alpine on Travis-CI flags = '-o' if overwrite else '' return run('cd %s; unzip %s %s' % (target_dir, flags, path)) try: zip_ref = zipfile.ZipFile(path, 'r') except Exception as e: LOG.warning('Unable to open zip file: %s: %s' % (path, e)) raise e # Make sure to preserve file permissions in the zip file # https://www.burgundywall.com/post/preserving-file-perms-with-python-zipfile-module try: for file_entry in zip_ref.infolist(): _unzip_file_entry(zip_ref, file_entry, target_dir) finally: zip_ref.close() def _unzip_file_entry(zip_ref, file_entry, target_dir): """ Extracts a Zipfile entry and preserves permissions """ zip_ref.extract(file_entry.filename, path=target_dir) out_path = os.path.join(target_dir, file_entry.filename) perm = file_entry.external_attr >> 16 os.chmod(out_path, perm or 0o777) def untar(path, target_dir): mode = 'r:gz' if path.endswith('gz') else 'r' with tarfile.open(path, mode) as tar: tar.extractall(path=target_dir) def zip_contains_jar_entries(content, jar_path_prefix=None, match_single_jar=True): try: with tempfile.NamedTemporaryFile() as tf: tf.write(content) tf.flush() with zipfile.ZipFile(tf.name, 'r') as zf: jar_entries = [e for e in zf.infolist() if e.filename.lower().endswith('.jar')] if match_single_jar and len(jar_entries) == 1 and len(zf.infolist()) == 1: return True matching_prefix = [e for e in jar_entries if not jar_path_prefix or e.filename.lower().startswith(jar_path_prefix)] return len(matching_prefix) > 0 except Exception: return False def is_jar_archive(content): """ Determine whether `content` contains valid zip bytes representing a JAR archive that contains at least one *.class file and a META-INF/MANIFEST.MF file. """ try: with tempfile.NamedTemporaryFile() as tf: tf.write(content) tf.flush() with zipfile.ZipFile(tf.name, 'r') as zf: class_files = [e for e in zf.infolist() if e.filename.endswith('.class')] manifest_file = [e for e in zf.infolist() if e.filename.upper() == 'META-INF/MANIFEST.MF'] if not class_files or not manifest_file: return False except Exception: return False return True def is_root(): out = run('whoami').strip() return out == 'root' def cleanup_resources(): cleanup_tmp_files() cleanup_threads_and_processes() @synchronized(lock=SSL_CERT_LOCK) def generate_ssl_cert(target_file=None, overwrite=False, random=False, return_content=False, serial_number=None): # Note: Do NOT import "OpenSSL" at the root scope # (Our test Lambdas are importing this file but don't have the module installed) from OpenSSL import crypto def all_exist(*files): return all([os.path.exists(f) for f in files]) if target_file and not overwrite and os.path.exists(target_file): key_file_name = '%s.key' % target_file cert_file_name = '%s.crt' % target_file if all_exist(key_file_name, cert_file_name): return target_file, cert_file_name, key_file_name if random and target_file: if '.' in target_file: target_file = target_file.replace('.', '.%s.' % short_uid(), 1) else: target_file = '%s.%s' % (target_file, short_uid()) # create a key pair k = crypto.PKey() k.generate_key(crypto.TYPE_RSA, 2048) # create a self-signed cert cert = crypto.X509() subj = cert.get_subject() subj.C = 'AU' subj.ST = 'Some-State' subj.L = 'Some-Locality' subj.O = 'LocalStack Org' # noqa subj.OU = 'Testing' subj.CN = 'localhost' # Note: new requirements for recent OSX versions: https://support.apple.com/en-us/HT210176 # More details: https://www.iol.unh.edu/blog/2019/10/10/macos-catalina-and-chrome-trust serial_number = serial_number or 1001 cert.set_version(2) cert.set_serial_number(serial_number) cert.gmtime_adj_notBefore(0) cert.gmtime_adj_notAfter(2 * 365 * 24 * 60 * 60) cert.set_issuer(cert.get_subject()) cert.set_pubkey(k) alt_names = b'DNS:localhost,DNS:test.localhost.atlassian.io,IP:127.0.0.1' cert.add_extensions([ crypto.X509Extension(b'subjectAltName', False, alt_names), crypto.X509Extension(b'basicConstraints', True, b'CA:false'), crypto.X509Extension(b'keyUsage', True, b'nonRepudiation,digitalSignature,keyEncipherment'), crypto.X509Extension(b'extendedKeyUsage', True, b'serverAuth') ]) cert.sign(k, 'SHA256') cert_file = StringIO() key_file = StringIO() cert_file.write(to_str(crypto.dump_certificate(crypto.FILETYPE_PEM, cert))) key_file.write(to_str(crypto.dump_privatekey(crypto.FILETYPE_PEM, k))) cert_file_content = cert_file.getvalue().strip() key_file_content = key_file.getvalue().strip() file_content = '%s\n%s' % (key_file_content, cert_file_content) if target_file: key_file_name = '%s.key' % target_file cert_file_name = '%s.crt' % target_file # check existence to avoid permission denied issues: # https://github.com/localstack/localstack/issues/1607 if not all_exist(target_file, key_file_name, cert_file_name): for i in range(2): try: save_file(target_file, file_content) save_file(key_file_name, key_file_content) save_file(cert_file_name, cert_file_content) break except Exception as e: if i > 0: raise LOG.info('Unable to store certificate file under %s, using tmp file instead: %s' % (target_file, e)) # Fix for https://github.com/localstack/localstack/issues/1743 target_file = '%s.pem' % new_tmp_file() key_file_name = '%s.key' % target_file cert_file_name = '%s.crt' % target_file TMP_FILES.append(target_file) TMP_FILES.append(key_file_name) TMP_FILES.append(cert_file_name) if not return_content: return target_file, cert_file_name, key_file_name return file_content def run_safe(_python_lambda, print_error=False, **kwargs): try: return _python_lambda(**kwargs) except Exception as e: if print_error: LOG.warning('Unable to execute function: %s' % e) def run_cmd_safe(**kwargs): return run_safe(run, print_error=False, **kwargs) def run(cmd, cache_duration_secs=0, **kwargs): def do_run(cmd): return bootstrap.run(cmd, **kwargs) if cache_duration_secs <= 0: return do_run(cmd) hash = md5(cmd) cache_file = CACHE_FILE_PATTERN.replace('*', hash) mkdir(os.path.dirname(CACHE_FILE_PATTERN)) if os.path.isfile(cache_file): # check file age mod_time = os.path.getmtime(cache_file) time_now = now() if mod_time > (time_now - cache_duration_secs): f = open(cache_file) result = f.read() f.close() return result result = do_run(cmd) f = open(cache_file, 'w+') f.write(result) f.close() clean_cache() return result def clone(item): return json.loads(json.dumps(item)) def clone_safe(item): return clone(json_safe(item)) def remove_non_ascii(text): # text = unicode(text, "utf-8") text = text.decode('utf-8', CODEC_HANDLER_UNDERSCORE) # text = unicodedata.normalize('NFKD', text) text = text.encode('ascii', CODEC_HANDLER_UNDERSCORE) return text class NetrcBypassAuth(requests.auth.AuthBase): def __call__(self, r): return r class _RequestsSafe(type): """ Wrapper around requests library, which can prevent it from verifying SSL certificates or reading credentials from ~/.netrc file """ verify_ssl = True def __getattr__(self, name): method = requests.__dict__.get(name.lower()) if not method: return method def _wrapper(*args, **kwargs): if 'auth' not in kwargs: kwargs['auth'] = NetrcBypassAuth() if not self.verify_ssl and args[0].startswith('https://') and 'verify' not in kwargs: kwargs['verify'] = False return method(*args, **kwargs) return _wrapper # create class-of-a-class class safe_requests(with_metaclass(_RequestsSafe)): pass def make_http_request(url, data=None, headers=None, method='GET'): if is_string(method): method = requests.__dict__[method.lower()] return method(url, headers=headers, data=data, auth=NetrcBypassAuth(), verify=False) class SafeStringIO(io.StringIO): """ Safe StringIO implementation that doesn't fail if str is passed in Python 2. """ def write(self, obj): if six.PY2 and isinstance(obj, str): obj = obj.decode('unicode-escape') return super(SafeStringIO, self).write(obj) def clean_cache(file_pattern=CACHE_FILE_PATTERN, last_clean_time=last_cache_clean_time, max_age=CACHE_MAX_AGE): mutex_clean.acquire() time_now = now() try: if last_clean_time['time'] > time_now - CACHE_CLEAN_TIMEOUT: return for cache_file in set(glob.glob(file_pattern)): mod_time = os.path.getmtime(cache_file) if time_now > mod_time + max_age: rm_rf(cache_file) last_clean_time['time'] = time_now finally: mutex_clean.release() return time_now def truncate(data, max_length=100): return (data[:max_length] + '...') if len(data) > max_length else data def parallelize(func, list, size=None): if not size: size = len(list) if size <= 0: return None pool = Pool(size) result = pool.map(func, list) pool.close() pool.join() return result def isoformat_milliseconds(t): try: return t.isoformat(timespec='milliseconds') except TypeError: return t.isoformat()[:-3] # Code that requires util functions from above CACHE_FILE_PATTERN = CACHE_FILE_PATTERN.replace('_random_dir_', short_uid())
31.750678
120
0.61682
import io import os import re import pwd import grp import sys import json import uuid import time import glob import base64 import socket import hashlib import decimal import logging import tarfile import zipfile import binascii import calendar import tempfile import threading import subprocess import six import shutil import requests import dns.resolver import functools from io import BytesIO from contextlib import closing from datetime import datetime from six import with_metaclass from six.moves import cStringIO as StringIO from six.moves.urllib.parse import urlparse from multiprocessing.dummy import Pool from localstack import config from localstack.config import DEFAULT_ENCODING from localstack.constants import ENV_DEV from localstack.utils import bootstrap from localstack.utils.bootstrap import FuncThread TMP_FILES = [] TMP_THREADS = [] TMP_PROCESSES = [] CACHE_CLEAN_TIMEOUT = 60 * 5 CACHE_MAX_AGE = 60 * 60 CACHE_FILE_PATTERN = os.path.join(tempfile.gettempdir(), '_random_dir_', 'cache.*.json') last_cache_clean_time = {'time': 0} mutex_clean = threading.Semaphore(1) TIMESTAMP_FORMAT = '%Y-%m-%dT%H:%M:%S' TIMESTAMP_FORMAT_MILLIS = '%Y-%m-%dT%H:%M:%S.%fZ' CODEC_HANDLER_UNDERSCORE = 'underscore' DOWNLOAD_CHUNK_SIZE = 1024 * 1024 LOG = logging.getLogger(__name__) INFRA_STOPPED = False # generic cache object CACHE = {} # lock for creating certificate files SSL_CERT_LOCK = threading.RLock() class CustomEncoder(json.JSONEncoder): def default(self, o): if isinstance(o, decimal.Decimal): if o % 1 > 0: return float(o) else: return int(o) if isinstance(o, datetime): return str(o) if isinstance(o, six.binary_type): return to_str(o) try: return super(CustomEncoder, self).default(o) except Exception: return None class ShellCommandThread(FuncThread): def __init__(self, cmd, params={}, outfile=None, env_vars={}, stdin=False, quiet=True, inherit_cwd=False, inherit_env=True): self.cmd = cmd self.process = None self.outfile = outfile or os.devnull self.stdin = stdin self.env_vars = env_vars self.inherit_cwd = inherit_cwd self.inherit_env = inherit_env FuncThread.__init__(self, self.run_cmd, params, quiet=quiet) def run_cmd(self, params): def convert_line(line): line = to_str(line or '') return '%s\r\n' % line.strip() def filter_line(line): return '(Press CTRL+C to quit)' in line try: self.process = run(self.cmd, asynchronous=True, stdin=self.stdin, outfile=self.outfile, env_vars=self.env_vars, inherit_cwd=self.inherit_cwd, inherit_env=self.inherit_env) if self.outfile: if self.outfile == subprocess.PIPE: # get stdout/stderr from child process and write to parent output streams = ((self.process.stdout, sys.stdout), (self.process.stderr, sys.stderr)) for instream, outstream in streams: for line in iter(instream.readline, None): # `line` should contain a newline at the end as we're iterating, if line in [None, '', b'']: break if not (line and line.strip()) and self.is_killed(): break line = convert_line(line) if filter_line(line): continue outstream.write(line) outstream.flush() self.process.wait() else: self.process.communicate() except Exception as e: if self.process and not self.quiet: LOG.warning('Shell command error "%s": %s' % (e, self.cmd)) if self.process and not self.quiet and self.process.returncode != 0: LOG.warning('Shell command exit code "%s": %s' % (self.process.returncode, self.cmd)) def is_killed(self): if not self.process: return True if INFRA_STOPPED: return True import psutil return not psutil.pid_exists(self.process.pid) def stop(self, quiet=False): import psutil if not self.process: LOG.warning("No process found for command '%s'" % self.cmd) return parent_pid = self.process.pid try: parent = psutil.Process(parent_pid) for child in parent.children(recursive=True): child.kill() parent.kill() self.process = None except Exception: if not quiet: LOG.warning('Unable to kill process with pid %s' % parent_pid) class JsonObject(object): def to_json(self, indent=None): return json.dumps(self, default=lambda o: ((float(o) if o % 1 > 0 else int(o)) if isinstance(o, decimal.Decimal) else o.__dict__), sort_keys=True, indent=indent) def apply_json(self, j): if isinstance(j, str): j = json.loads(j) self.__dict__.update(j) def to_dict(self): return json.loads(self.to_json()) @classmethod def from_json(cls, j): j = JsonObject.as_dict(j) result = cls() result.apply_json(j) return result @classmethod def from_json_list(cls, l): return [cls.from_json(j) for j in l] @classmethod def as_dict(cls, obj): if isinstance(obj, dict): return obj return obj.to_dict() def __str__(self): return self.to_json() def __repr__(self): return self.__str__() class CaptureOutput(object): orig_stdout = sys.stdout orig_stderr = sys.stderr orig___stdout = sys.__stdout__ orig___stderr = sys.__stderr__ CONTEXTS_BY_THREAD = {} class LogStreamIO(io.StringIO): def write(self, s): if isinstance(s, str) and hasattr(s, 'decode'): s = s.decode('unicode-escape') return super(CaptureOutput.LogStreamIO, self).write(s) def __init__(self): self._stdout = self.LogStreamIO() self._stderr = self.LogStreamIO() def __enter__(self): from werkzeug.local import LocalProxy ident = self._ident() if ident not in self.CONTEXTS_BY_THREAD: self.CONTEXTS_BY_THREAD[ident] = self self._set(LocalProxy(self._proxy(sys.stdout, 'stdout')), LocalProxy(self._proxy(sys.stderr, 'stderr')), LocalProxy(self._proxy(sys.__stdout__, 'stdout')), LocalProxy(self._proxy(sys.__stderr__, 'stderr'))) return self def __exit__(self, type, value, traceback): ident = self._ident() removed = self.CONTEXTS_BY_THREAD.pop(ident, None) if not self.CONTEXTS_BY_THREAD: self._set(self.orig_stdout, self.orig_stderr, self.orig___stdout, self.orig___stderr) removed._stdout.flush() removed._stderr.flush() out = removed._stdout.getvalue() err = removed._stderr.getvalue() removed._stdout.close() removed._stderr.close() removed._stdout = out removed._stderr = err def _set(self, out, err, __out, __err): sys.stdout, sys.stderr, sys.__stdout__, sys.__stderr__ = (out, err, __out, __err) def _proxy(self, original_stream, type): def proxy(): ident = self._ident() ctx = self.CONTEXTS_BY_THREAD.get(ident) if ctx: return ctx._stdout if type == 'stdout' else ctx._stderr return original_stream return proxy def _ident(self): return threading.currentThread().ident def stdout(self): return self._stdout.getvalue() if hasattr(self._stdout, 'getvalue') else self._stdout def stderr(self): return self._stderr.getvalue() if hasattr(self._stderr, 'getvalue') else self._stderr def synchronized(lock=None): def _decorator(wrapped): @functools.wraps(wrapped) def _wrapper(*args, **kwargs): with lock: return wrapped(*args, **kwargs) return _wrapper return _decorator def is_string(s, include_unicode=True, exclude_binary=False): if isinstance(s, six.binary_type) and exclude_binary: return False if isinstance(s, str): return True if include_unicode and isinstance(s, six.text_type): return True return False def is_string_or_bytes(s): return is_string(s) or isinstance(s, six.string_types) or isinstance(s, bytes) def is_base64(s): regex = r'^(?:[A-Za-z0-9+/]{4})*(?:[A-Za-z0-9+/]{2}==|[A-Za-z0-9+/]{3}=)?$' return is_string(s) and re.match(regex, s) def md5(string): m = hashlib.md5() m.update(to_bytes(string)) return m.hexdigest() def in_docker(): return config.in_docker() def has_docker(): try: run('docker ps') return True except Exception: return False def get_docker_container_names(): return bootstrap.get_docker_container_names() def is_port_open(port_or_url, http_path=None, expect_success=True, protocols=['tcp']): port = port_or_url host = 'localhost' protocol = 'http' protocols = protocols if isinstance(protocols, list) else [protocols] if isinstance(port, six.string_types): url = urlparse(port_or_url) port = url.port host = url.hostname protocol = url.scheme nw_protocols = [] nw_protocols += ([socket.SOCK_STREAM] if 'tcp' in protocols else []) nw_protocols += ([socket.SOCK_DGRAM] if 'udp' in protocols else []) for nw_protocol in nw_protocols: with closing(socket.socket(socket.AF_INET, nw_protocol)) as sock: sock.settimeout(1) if nw_protocol == socket.SOCK_DGRAM: try: if port == 53: dnshost = '127.0.0.1' if host == 'localhost' else host resolver = dns.resolver.Resolver() resolver.nameservers = [dnshost] resolver.timeout = 1 resolver.lifetime = 1 answers = resolver.query('google.com', 'A') assert len(answers) > 0 else: sock.sendto(bytes(), (host, port)) sock.recvfrom(1024) except Exception: return False elif nw_protocol == socket.SOCK_STREAM: result = sock.connect_ex((host, port)) if result != 0: return False if 'tcp' not in protocols or not http_path: return True url = '%s://%s:%s%s' % (protocol, host, port, http_path) try: response = safe_requests.get(url) return not expect_success or response.status_code < 400 except Exception: return False def wait_for_port_open(port, http_path=None, expect_success=True, retries=10, sleep_time=0.5): def check(): if not is_port_open(port, http_path=http_path, expect_success=expect_success): raise Exception() return retry(check, sleep=sleep_time, retries=retries) def get_free_tcp_port(): tcp = socket.socket(socket.AF_INET, socket.SOCK_STREAM) tcp.bind(('', 0)) addr, port = tcp.getsockname() tcp.close() return port def get_service_protocol(): return 'https' if config.USE_SSL else 'http' def timestamp(time=None, format=TIMESTAMP_FORMAT): if not time: time = datetime.utcnow() if isinstance(time, six.integer_types + (float, )): time = datetime.fromtimestamp(time) return time.strftime(format) def retry(function, retries=3, sleep=1, sleep_before=0, **kwargs): raise_error = None if sleep_before > 0: time.sleep(sleep_before) for i in range(0, retries + 1): try: return function(**kwargs) except Exception as error: raise_error = error time.sleep(sleep) raise raise_error def dump_thread_info(): for t in threading.enumerate(): print(t) print(run("ps aux | grep 'node\\|java\\|python'")) def merge_recursive(source, destination): for key, value in source.items(): if isinstance(value, dict): node = destination.setdefault(key, {}) merge_recursive(value, node) else: if not isinstance(destination, dict): LOG.warning('Destination for merging %s=%s is not dict: %s' % (key, value, destination)) destination[key] = value return destination def merge_dicts(*dicts, **kwargs): result = {} for d in dicts: if d is None and 'default' in kwargs: return kwargs['default'] if d: result.update(d) return result def recurse_object(obj, func, path=''): obj = func(obj, path=path) if isinstance(obj, list): for i in range(len(obj)): tmp_path = '%s[%s]' % (path or '.', i) obj[i] = recurse_object(obj[i], func, tmp_path) elif isinstance(obj, dict): for k, v in obj.items(): tmp_path = '%s%s' % ((path + '.') if path else '', k) obj[k] = recurse_object(v, func, tmp_path) return obj def base64_to_hex(b64_string): return binascii.hexlify(base64.b64decode(b64_string)) def obj_to_xml(obj): if isinstance(obj, list): return ''.join([obj_to_xml(o) for o in obj]) if isinstance(obj, dict): return ''.join(['<{k}>{v}</{k}>'.format(k=k, v=obj_to_xml(v)) for (k, v) in obj.items()]) return str(obj) def now_utc(): return mktime(datetime.utcnow()) def now(): return mktime(datetime.now()) def mktime(timestamp): return calendar.timegm(timestamp.timetuple()) def mkdir(folder): if not os.path.exists(folder): try: os.makedirs(folder) except OSError as err: if err.errno != 17: raise def ensure_readable(file_path, default_perms=None): if default_perms is None: default_perms = 0o644 try: with open(file_path, 'rb'): pass except Exception: LOG.info('Updating permissions as file is currently not readable: %s' % file_path) os.chmod(file_path, default_perms) def chown_r(path, user): uid = pwd.getpwnam(user).pw_uid gid = grp.getgrnam(user).gr_gid os.chown(path, uid, gid) for root, dirs, files in os.walk(path): for dirname in dirs: os.chown(os.path.join(root, dirname), uid, gid) for filename in files: os.chown(os.path.join(root, filename), uid, gid) def chmod_r(path, mode): os.chmod(path, mode) for root, dirnames, filenames in os.walk(path): for dirname in dirnames: os.chmod(os.path.join(root, dirname), mode) for filename in filenames: os.chmod(os.path.join(root, filename), mode) def rm_rf(path): if not path or not os.path.exists(path): return if is_alpine(): try: return run('rm -rf "%s"' % path) except Exception: pass chmod_r(path, 0o777) exists_but_non_dir = os.path.exists(path) and not os.path.isdir(path) if os.path.isfile(path) or exists_but_non_dir: os.remove(path) else: shutil.rmtree(path) def cp_r(src, dst): if os.path.isfile(src): shutil.copy(src, dst) else: shutil.copytree(src, dst) def download(url, path, verify_ssl=True): # enable parallel file downloads during installation! s = requests.Session() r = s.get(url, stream=True, verify=verify_ssl) # check status code before attempting to read body if r.status_code >= 400: raise Exception('Failed to download %s, response code %s' % (url, r.status_code)) total = 0 try: if not os.path.exists(os.path.dirname(path)): os.makedirs(os.path.dirname(path)) LOG.debug('Starting download from %s to %s (%s bytes)' % (url, path, r.headers.get('content-length'))) with open(path, 'wb') as f: for chunk in r.iter_content(DOWNLOAD_CHUNK_SIZE): total += len(chunk) if chunk: # filter out keep-alive new chunks f.write(chunk) LOG.debug('Writing %s bytes (total %s) to %s' % (len(chunk), total, path)) else: LOG.debug('Empty chunk %s (total %s) from %s' % (chunk, total, url)) f.flush() os.fsync(f) if os.path.getsize(path) == 0: LOG.warning('Zero bytes downloaded from %s, retrying' % url) download(url, path, verify_ssl) return LOG.debug('Done downloading %s, response code %s, total bytes %d' % (url, r.status_code, total)) finally: LOG.debug('Cleaning up file handles for download of %s' % url) r.close() s.close() def parse_chunked_data(data): data = (data or '').strip() chunks = [] while data: length = re.match(r'^([0-9a-zA-Z]+)\r\n.*', data) if not length: break length = length.group(1).lower() length = int(length, 16) data = data.partition('\r\n')[2] chunks.append(data[:length]) data = data[length:].strip() return ''.join(chunks) def is_number(s): try: float(s) # for int, long and float return True except (TypeError, ValueError): return False def is_mac_os(): return bootstrap.is_mac_os() def is_linux(): return bootstrap.is_linux() def is_alpine(): try: if '_is_alpine_' not in CACHE: CACHE['_is_alpine_'] = False if not os.path.exists('/etc/issue'): return False out = to_str(subprocess.check_output('cat /etc/issue', shell=True)) CACHE['_is_alpine_'] = 'Alpine' in out except subprocess.CalledProcessError: return False return CACHE['_is_alpine_'] def get_arch(): if is_mac_os(): return 'osx' if is_alpine(): return 'alpine' if is_linux(): return 'linux' raise Exception('Unable to determine system architecture') def short_uid(): return str(uuid.uuid4())[0:8] def json_safe(item): try: return json.loads(json.dumps(item, cls=CustomEncoder)) except Exception: item = fix_json_keys(item) return json.loads(json.dumps(item, cls=CustomEncoder)) def fix_json_keys(item): item_copy = item if isinstance(item, list): item_copy = [] for i in item: item_copy.append(fix_json_keys(i)) if isinstance(item, dict): item_copy = {} for k, v in item.items(): item_copy[to_str(k)] = fix_json_keys(v) return item_copy def save_file(file, content, append=False): mode = 'a' if append else 'w+' if not isinstance(content, six.string_types): mode = mode + 'b' with open(file, mode) as f: f.write(content) f.flush() def load_file(file_path, default=None, mode=None): if not os.path.isfile(file_path): return default if not mode: mode = 'r' with open(file_path, mode) as f: result = f.read() return result def to_str(obj, encoding=DEFAULT_ENCODING, errors='strict'): return obj.decode(encoding, errors) if isinstance(obj, six.binary_type) else obj def to_bytes(obj, encoding=DEFAULT_ENCODING, errors='strict'): return obj.encode(encoding, errors) if isinstance(obj, six.text_type) else obj def cleanup(files=True, env=ENV_DEV, quiet=True): if files: cleanup_tmp_files() def cleanup_threads_and_processes(quiet=True): for t in TMP_THREADS: t.stop(quiet=quiet) for p in TMP_PROCESSES: try: p.terminate() except Exception as e: print(e) # clear lists clear_list(TMP_THREADS) clear_list(TMP_PROCESSES) def clear_list(l): while len(l): del l[0] def cleanup_tmp_files(): for tmp in TMP_FILES: try: rm_rf(tmp) except Exception: pass # file likely doesn't exist, or permission denied del TMP_FILES[:] def new_tmp_file(): tmp_file, tmp_path = tempfile.mkstemp() os.close(tmp_file) TMP_FILES.append(tmp_path) return tmp_path def new_tmp_dir(): folder = new_tmp_file() rm_rf(folder) mkdir(folder) return folder def is_ip_address(addr): try: socket.inet_aton(addr) return True except socket.error: return False def is_zip_file(content): stream = BytesIO(content) return zipfile.is_zipfile(stream) def unzip(path, target_dir, overwrite=True): if is_alpine(): flags = '-o' if overwrite else '' return run('cd %s; unzip %s %s' % (target_dir, flags, path)) try: zip_ref = zipfile.ZipFile(path, 'r') except Exception as e: LOG.warning('Unable to open zip file: %s: %s' % (path, e)) raise e try: for file_entry in zip_ref.infolist(): _unzip_file_entry(zip_ref, file_entry, target_dir) finally: zip_ref.close() def _unzip_file_entry(zip_ref, file_entry, target_dir): zip_ref.extract(file_entry.filename, path=target_dir) out_path = os.path.join(target_dir, file_entry.filename) perm = file_entry.external_attr >> 16 os.chmod(out_path, perm or 0o777) def untar(path, target_dir): mode = 'r:gz' if path.endswith('gz') else 'r' with tarfile.open(path, mode) as tar: tar.extractall(path=target_dir) def zip_contains_jar_entries(content, jar_path_prefix=None, match_single_jar=True): try: with tempfile.NamedTemporaryFile() as tf: tf.write(content) tf.flush() with zipfile.ZipFile(tf.name, 'r') as zf: jar_entries = [e for e in zf.infolist() if e.filename.lower().endswith('.jar')] if match_single_jar and len(jar_entries) == 1 and len(zf.infolist()) == 1: return True matching_prefix = [e for e in jar_entries if not jar_path_prefix or e.filename.lower().startswith(jar_path_prefix)] return len(matching_prefix) > 0 except Exception: return False def is_jar_archive(content): try: with tempfile.NamedTemporaryFile() as tf: tf.write(content) tf.flush() with zipfile.ZipFile(tf.name, 'r') as zf: class_files = [e for e in zf.infolist() if e.filename.endswith('.class')] manifest_file = [e for e in zf.infolist() if e.filename.upper() == 'META-INF/MANIFEST.MF'] if not class_files or not manifest_file: return False except Exception: return False return True def is_root(): out = run('whoami').strip() return out == 'root' def cleanup_resources(): cleanup_tmp_files() cleanup_threads_and_processes() @synchronized(lock=SSL_CERT_LOCK) def generate_ssl_cert(target_file=None, overwrite=False, random=False, return_content=False, serial_number=None): from OpenSSL import crypto def all_exist(*files): return all([os.path.exists(f) for f in files]) if target_file and not overwrite and os.path.exists(target_file): key_file_name = '%s.key' % target_file cert_file_name = '%s.crt' % target_file if all_exist(key_file_name, cert_file_name): return target_file, cert_file_name, key_file_name if random and target_file: if '.' in target_file: target_file = target_file.replace('.', '.%s.' % short_uid(), 1) else: target_file = '%s.%s' % (target_file, short_uid()) # create a key pair k = crypto.PKey() k.generate_key(crypto.TYPE_RSA, 2048) # create a self-signed cert cert = crypto.X509() subj = cert.get_subject() subj.C = 'AU' subj.ST = 'Some-State' subj.L = 'Some-Locality' subj.O = 'LocalStack Org' # noqa subj.OU = 'Testing' subj.CN = 'localhost' # Note: new requirements for recent OSX versions: https://support.apple.com/en-us/HT210176 # More details: https://www.iol.unh.edu/blog/2019/10/10/macos-catalina-and-chrome-trust serial_number = serial_number or 1001 cert.set_version(2) cert.set_serial_number(serial_number) cert.gmtime_adj_notBefore(0) cert.gmtime_adj_notAfter(2 * 365 * 24 * 60 * 60) cert.set_issuer(cert.get_subject()) cert.set_pubkey(k) alt_names = b'DNS:localhost,DNS:test.localhost.atlassian.io,IP:127.0.0.1' cert.add_extensions([ crypto.X509Extension(b'subjectAltName', False, alt_names), crypto.X509Extension(b'basicConstraints', True, b'CA:false'), crypto.X509Extension(b'keyUsage', True, b'nonRepudiation,digitalSignature,keyEncipherment'), crypto.X509Extension(b'extendedKeyUsage', True, b'serverAuth') ]) cert.sign(k, 'SHA256') cert_file = StringIO() key_file = StringIO() cert_file.write(to_str(crypto.dump_certificate(crypto.FILETYPE_PEM, cert))) key_file.write(to_str(crypto.dump_privatekey(crypto.FILETYPE_PEM, k))) cert_file_content = cert_file.getvalue().strip() key_file_content = key_file.getvalue().strip() file_content = '%s\n%s' % (key_file_content, cert_file_content) if target_file: key_file_name = '%s.key' % target_file cert_file_name = '%s.crt' % target_file # check existence to avoid permission denied issues: # https://github.com/localstack/localstack/issues/1607 if not all_exist(target_file, key_file_name, cert_file_name): for i in range(2): try: save_file(target_file, file_content) save_file(key_file_name, key_file_content) save_file(cert_file_name, cert_file_content) break except Exception as e: if i > 0: raise LOG.info('Unable to store certificate file under %s, using tmp file instead: %s' % (target_file, e)) # Fix for https://github.com/localstack/localstack/issues/1743 target_file = '%s.pem' % new_tmp_file() key_file_name = '%s.key' % target_file cert_file_name = '%s.crt' % target_file TMP_FILES.append(target_file) TMP_FILES.append(key_file_name) TMP_FILES.append(cert_file_name) if not return_content: return target_file, cert_file_name, key_file_name return file_content def run_safe(_python_lambda, print_error=False, **kwargs): try: return _python_lambda(**kwargs) except Exception as e: if print_error: LOG.warning('Unable to execute function: %s' % e) def run_cmd_safe(**kwargs): return run_safe(run, print_error=False, **kwargs) def run(cmd, cache_duration_secs=0, **kwargs): def do_run(cmd): return bootstrap.run(cmd, **kwargs) if cache_duration_secs <= 0: return do_run(cmd) hash = md5(cmd) cache_file = CACHE_FILE_PATTERN.replace('*', hash) mkdir(os.path.dirname(CACHE_FILE_PATTERN)) if os.path.isfile(cache_file): # check file age mod_time = os.path.getmtime(cache_file) time_now = now() if mod_time > (time_now - cache_duration_secs): f = open(cache_file) result = f.read() f.close() return result result = do_run(cmd) f = open(cache_file, 'w+') f.write(result) f.close() clean_cache() return result def clone(item): return json.loads(json.dumps(item)) def clone_safe(item): return clone(json_safe(item)) def remove_non_ascii(text): # text = unicode(text, "utf-8") text = text.decode('utf-8', CODEC_HANDLER_UNDERSCORE) # text = unicodedata.normalize('NFKD', text) text = text.encode('ascii', CODEC_HANDLER_UNDERSCORE) return text class NetrcBypassAuth(requests.auth.AuthBase): def __call__(self, r): return r class _RequestsSafe(type): verify_ssl = True def __getattr__(self, name): method = requests.__dict__.get(name.lower()) if not method: return method def _wrapper(*args, **kwargs): if 'auth' not in kwargs: kwargs['auth'] = NetrcBypassAuth() if not self.verify_ssl and args[0].startswith('https://') and 'verify' not in kwargs: kwargs['verify'] = False return method(*args, **kwargs) return _wrapper # create class-of-a-class class safe_requests(with_metaclass(_RequestsSafe)): pass def make_http_request(url, data=None, headers=None, method='GET'): if is_string(method): method = requests.__dict__[method.lower()] return method(url, headers=headers, data=data, auth=NetrcBypassAuth(), verify=False) class SafeStringIO(io.StringIO): def write(self, obj): if six.PY2 and isinstance(obj, str): obj = obj.decode('unicode-escape') return super(SafeStringIO, self).write(obj) def clean_cache(file_pattern=CACHE_FILE_PATTERN, last_clean_time=last_cache_clean_time, max_age=CACHE_MAX_AGE): mutex_clean.acquire() time_now = now() try: if last_clean_time['time'] > time_now - CACHE_CLEAN_TIMEOUT: return for cache_file in set(glob.glob(file_pattern)): mod_time = os.path.getmtime(cache_file) if time_now > mod_time + max_age: rm_rf(cache_file) last_clean_time['time'] = time_now finally: mutex_clean.release() return time_now def truncate(data, max_length=100): return (data[:max_length] + '...') if len(data) > max_length else data def parallelize(func, list, size=None): if not size: size = len(list) if size <= 0: return None pool = Pool(size) result = pool.map(func, list) pool.close() pool.join() return result def isoformat_milliseconds(t): try: return t.isoformat(timespec='milliseconds') except TypeError: return t.isoformat()[:-3] # Code that requires util functions from above CACHE_FILE_PATTERN = CACHE_FILE_PATTERN.replace('_random_dir_', short_uid())
true
true
f70145979560947fad4d5d5d28ecfce2f2944135
36,046
py
Python
pymongo/client_session.py
slatermethuen/mongo-python-driver
923e83cd1a04674c8e4f6b8fe178dcf84fbe5a7d
[ "Apache-2.0" ]
null
null
null
pymongo/client_session.py
slatermethuen/mongo-python-driver
923e83cd1a04674c8e4f6b8fe178dcf84fbe5a7d
[ "Apache-2.0" ]
null
null
null
pymongo/client_session.py
slatermethuen/mongo-python-driver
923e83cd1a04674c8e4f6b8fe178dcf84fbe5a7d
[ "Apache-2.0" ]
null
null
null
# Copyright 2017 MongoDB, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Logical sessions for ordering sequential operations. Requires MongoDB 3.6. .. versionadded:: 3.6 Causally Consistent Reads ========================= .. code-block:: python with client.start_session(causal_consistency=True) as session: collection = client.db.collection collection.update_one({'_id': 1}, {'$set': {'x': 10}}, session=session) secondary_c = collection.with_options( read_preference=ReadPreference.SECONDARY) # A secondary read waits for replication of the write. secondary_c.find_one({'_id': 1}, session=session) If `causal_consistency` is True (the default), read operations that use the session are causally after previous read and write operations. Using a causally consistent session, an application can read its own writes and is guaranteed monotonic reads, even when reading from replica set secondaries. .. mongodoc:: causal-consistency .. _transactions-ref: Transactions ============ MongoDB 4.0 adds support for transactions on replica set primaries. A transaction is associated with a :class:`ClientSession`. To start a transaction on a session, use :meth:`ClientSession.start_transaction` in a with-statement. Then, execute an operation within the transaction by passing the session to the operation: .. code-block:: python orders = client.db.orders inventory = client.db.inventory with client.start_session() as session: with session.start_transaction(): orders.insert_one({"sku": "abc123", "qty": 100}, session=session) inventory.update_one({"sku": "abc123", "qty": {"$gte": 100}}, {"$inc": {"qty": -100}}, session=session) Upon normal completion of ``with session.start_transaction()`` block, the transaction automatically calls :meth:`ClientSession.commit_transaction`. If the block exits with an exception, the transaction automatically calls :meth:`ClientSession.abort_transaction`. In general, multi-document transactions only support read/write (CRUD) operations on existing collections. However, MongoDB 4.4 adds support for creating collections and indexes with some limitations, including an insert operation that would result in the creation of a new collection. For a complete description of all the supported and unsupported operations see the `MongoDB server's documentation for transactions <http://dochub.mongodb.org/core/transactions>`_. A session may only have a single active transaction at a time, multiple transactions on the same session can be executed in sequence. .. versionadded:: 3.7 Sharded Transactions ^^^^^^^^^^^^^^^^^^^^ PyMongo 3.9 adds support for transactions on sharded clusters running MongoDB 4.2. Sharded transactions have the same API as replica set transactions. When running a transaction against a sharded cluster, the session is pinned to the mongos server selected for the first operation in the transaction. All subsequent operations that are part of the same transaction are routed to the same mongos server. When the transaction is completed, by running either commitTransaction or abortTransaction, the session is unpinned. .. versionadded:: 3.9 .. mongodoc:: transactions Classes ======= """ import collections import os import sys import uuid from bson.binary import Binary from bson.int64 import Int64 from bson.py3compat import abc, integer_types, reraise_instance from bson.son import SON from bson.timestamp import Timestamp from pymongo import monotonic from pymongo.errors import (ConfigurationError, ConnectionFailure, InvalidOperation, OperationFailure, PyMongoError, ServerSelectionTimeoutError, WTimeoutError) from pymongo.helpers import _RETRYABLE_ERROR_CODES from pymongo.read_concern import ReadConcern from pymongo.read_preferences import ReadPreference, _ServerMode from pymongo.write_concern import WriteConcern class SessionOptions(object): """Options for a new :class:`ClientSession`. :Parameters: - `causal_consistency` (optional): If True (the default), read operations are causally ordered within the session. - `default_transaction_options` (optional): The default TransactionOptions to use for transactions started on this session. """ def __init__(self, causal_consistency=True, default_transaction_options=None): self._causal_consistency = causal_consistency if default_transaction_options is not None: if not isinstance(default_transaction_options, TransactionOptions): raise TypeError( "default_transaction_options must be an instance of " "pymongo.client_session.TransactionOptions, not: %r" % (default_transaction_options,)) self._default_transaction_options = default_transaction_options @property def causal_consistency(self): """Whether causal consistency is configured.""" return self._causal_consistency @property def default_transaction_options(self): """The default TransactionOptions to use for transactions started on this session. .. versionadded:: 3.7 """ return self._default_transaction_options class TransactionOptions(object): """Options for :meth:`ClientSession.start_transaction`. :Parameters: - `read_concern` (optional): The :class:`~pymongo.read_concern.ReadConcern` to use for this transaction. If ``None`` (the default) the :attr:`read_preference` of the :class:`MongoClient` is used. - `write_concern` (optional): The :class:`~pymongo.write_concern.WriteConcern` to use for this transaction. If ``None`` (the default) the :attr:`read_preference` of the :class:`MongoClient` is used. - `read_preference` (optional): The read preference to use. If ``None`` (the default) the :attr:`read_preference` of this :class:`MongoClient` is used. See :mod:`~pymongo.read_preferences` for options. Transactions which read must use :attr:`~pymongo.read_preferences.ReadPreference.PRIMARY`. - `max_commit_time_ms` (optional): The maximum amount of time to allow a single commitTransaction command to run. This option is an alias for maxTimeMS option on the commitTransaction command. If ``None`` (the default) maxTimeMS is not used. .. versionchanged:: 3.9 Added the ``max_commit_time_ms`` option. .. versionadded:: 3.7 """ def __init__(self, read_concern=None, write_concern=None, read_preference=None, max_commit_time_ms=None): self._read_concern = read_concern self._write_concern = write_concern self._read_preference = read_preference self._max_commit_time_ms = max_commit_time_ms if read_concern is not None: if not isinstance(read_concern, ReadConcern): raise TypeError("read_concern must be an instance of " "pymongo.read_concern.ReadConcern, not: %r" % (read_concern,)) if write_concern is not None: if not isinstance(write_concern, WriteConcern): raise TypeError("write_concern must be an instance of " "pymongo.write_concern.WriteConcern, not: %r" % (write_concern,)) if not write_concern.acknowledged: raise ConfigurationError( "transactions do not support unacknowledged write concern" ": %r" % (write_concern,)) if read_preference is not None: if not isinstance(read_preference, _ServerMode): raise TypeError("%r is not valid for read_preference. See " "pymongo.read_preferences for valid " "options." % (read_preference,)) if max_commit_time_ms is not None: if not isinstance(max_commit_time_ms, integer_types): raise TypeError( "max_commit_time_ms must be an integer or None") @property def read_concern(self): """This transaction's :class:`~pymongo.read_concern.ReadConcern`.""" return self._read_concern @property def write_concern(self): """This transaction's :class:`~pymongo.write_concern.WriteConcern`.""" return self._write_concern @property def read_preference(self): """This transaction's :class:`~pymongo.read_preferences.ReadPreference`. """ return self._read_preference @property def max_commit_time_ms(self): """The maxTimeMS to use when running a commitTransaction command. .. versionadded:: 3.9 """ return self._max_commit_time_ms def _validate_session_write_concern(session, write_concern): """Validate that an explicit session is not used with an unack'ed write. Returns the session to use for the next operation. """ if session: if write_concern is not None and not write_concern.acknowledged: # For unacknowledged writes without an explicit session, # drivers SHOULD NOT use an implicit session. If a driver # creates an implicit session for unacknowledged writes # without an explicit session, the driver MUST NOT send the # session ID. if session._implicit: return None else: raise ConfigurationError( 'Explicit sessions are incompatible with ' 'unacknowledged write concern: %r' % ( write_concern,)) return session class _TransactionContext(object): """Internal transaction context manager for start_transaction.""" def __init__(self, session): self.__session = session def __enter__(self): return self def __exit__(self, exc_type, exc_val, exc_tb): if self.__session.in_transaction: if exc_val is None: self.__session.commit_transaction() else: self.__session.abort_transaction() class _TxnState(object): NONE = 1 STARTING = 2 IN_PROGRESS = 3 COMMITTED = 4 COMMITTED_EMPTY = 5 ABORTED = 6 class _Transaction(object): """Internal class to hold transaction information in a ClientSession.""" def __init__(self, opts): self.opts = opts self.state = _TxnState.NONE self.sharded = False self.pinned_address = None self.recovery_token = None def active(self): return self.state in (_TxnState.STARTING, _TxnState.IN_PROGRESS) def reset(self): self.state = _TxnState.NONE self.sharded = False self.pinned_address = None self.recovery_token = None def _reraise_with_unknown_commit(exc): """Re-raise an exception with the UnknownTransactionCommitResult label.""" exc._add_error_label("UnknownTransactionCommitResult") reraise_instance(exc, trace=sys.exc_info()[2]) def _max_time_expired_error(exc): """Return true if exc is a MaxTimeMSExpired error.""" return isinstance(exc, OperationFailure) and exc.code == 50 # From the transactions spec, all the retryable writes errors plus # WriteConcernFailed. _UNKNOWN_COMMIT_ERROR_CODES = _RETRYABLE_ERROR_CODES | frozenset([ 64, # WriteConcernFailed 50, # MaxTimeMSExpired ]) # From the Convenient API for Transactions spec, with_transaction must # halt retries after 120 seconds. # This limit is non-configurable and was chosen to be twice the 60 second # default value of MongoDB's `transactionLifetimeLimitSeconds` parameter. _WITH_TRANSACTION_RETRY_TIME_LIMIT = 120 def _within_time_limit(start_time): """Are we within the with_transaction retry limit?""" return monotonic.time() - start_time < _WITH_TRANSACTION_RETRY_TIME_LIMIT class ClientSession(object): """A session for ordering sequential operations.""" def __init__(self, client, server_session, options, authset, implicit): # A MongoClient, a _ServerSession, a SessionOptions, and a set. self._client = client self._server_session = server_session self._options = options self._authset = authset self._cluster_time = None self._operation_time = None # Is this an implicitly created session? self._implicit = implicit self._transaction = _Transaction(None) def end_session(self): """Finish this session. If a transaction has started, abort it. It is an error to use the session after the session has ended. """ self._end_session(lock=True) def _end_session(self, lock): if self._server_session is not None: try: if self.in_transaction: self.abort_transaction() finally: self._client._return_server_session(self._server_session, lock) self._server_session = None def _check_ended(self): if self._server_session is None: raise InvalidOperation("Cannot use ended session") def __enter__(self): return self def __exit__(self, exc_type, exc_val, exc_tb): self._end_session(lock=True) @property def client(self): """The :class:`~pymongo.mongo_client.MongoClient` this session was created from. """ return self._client @property def options(self): """The :class:`SessionOptions` this session was created with.""" return self._options @property def session_id(self): """A BSON document, the opaque server session identifier.""" self._check_ended() return self._server_session.session_id @property def cluster_time(self): """The cluster time returned by the last operation executed in this session. """ return self._cluster_time @property def operation_time(self): """The operation time returned by the last operation executed in this session. """ return self._operation_time def _inherit_option(self, name, val): """Return the inherited TransactionOption value.""" if val: return val txn_opts = self.options.default_transaction_options val = txn_opts and getattr(txn_opts, name) if val: return val return getattr(self.client, name) def with_transaction(self, callback, read_concern=None, write_concern=None, read_preference=None, max_commit_time_ms=None): """Execute a callback in a transaction. This method starts a transaction on this session, executes ``callback`` once, and then commits the transaction. For example:: def callback(session): orders = session.client.db.orders inventory = session.client.db.inventory orders.insert_one({"sku": "abc123", "qty": 100}, session=session) inventory.update_one({"sku": "abc123", "qty": {"$gte": 100}}, {"$inc": {"qty": -100}}, session=session) with client.start_session() as session: session.with_transaction(callback) To pass arbitrary arguments to the ``callback``, wrap your callable with a ``lambda`` like this:: def callback(session, custom_arg, custom_kwarg=None): # Transaction operations... with client.start_session() as session: session.with_transaction( lambda s: callback(s, "custom_arg", custom_kwarg=1)) In the event of an exception, ``with_transaction`` may retry the commit or the entire transaction, therefore ``callback`` may be invoked multiple times by a single call to ``with_transaction``. Developers should be mindful of this possiblity when writing a ``callback`` that modifies application state or has any other side-effects. Note that even when the ``callback`` is invoked multiple times, ``with_transaction`` ensures that the transaction will be committed at-most-once on the server. The ``callback`` should not attempt to start new transactions, but should simply run operations meant to be contained within a transaction. The ``callback`` should also not commit the transaction; this is handled automatically by ``with_transaction``. If the ``callback`` does commit or abort the transaction without error, however, ``with_transaction`` will return without taking further action. When ``callback`` raises an exception, ``with_transaction`` automatically aborts the current transaction. When ``callback`` or :meth:`~ClientSession.commit_transaction` raises an exception that includes the ``"TransientTransactionError"`` error label, ``with_transaction`` starts a new transaction and re-executes the ``callback``. When :meth:`~ClientSession.commit_transaction` raises an exception with the ``"UnknownTransactionCommitResult"`` error label, ``with_transaction`` retries the commit until the result of the transaction is known. This method will cease retrying after 120 seconds has elapsed. This timeout is not configurable and any exception raised by the ``callback`` or by :meth:`ClientSession.commit_transaction` after the timeout is reached will be re-raised. Applications that desire a different timeout duration should not use this method. :Parameters: - `callback`: The callable ``callback`` to run inside a transaction. The callable must accept a single argument, this session. Note, under certain error conditions the callback may be run multiple times. - `read_concern` (optional): The :class:`~pymongo.read_concern.ReadConcern` to use for this transaction. - `write_concern` (optional): The :class:`~pymongo.write_concern.WriteConcern` to use for this transaction. - `read_preference` (optional): The read preference to use for this transaction. If ``None`` (the default) the :attr:`read_preference` of this :class:`Database` is used. See :mod:`~pymongo.read_preferences` for options. :Returns: The return value of the ``callback``. .. versionadded:: 3.9 """ start_time = monotonic.time() while True: self.start_transaction( read_concern, write_concern, read_preference, max_commit_time_ms) try: ret = callback(self) except Exception as exc: if self.in_transaction: self.abort_transaction() if (isinstance(exc, PyMongoError) and exc.has_error_label("TransientTransactionError") and _within_time_limit(start_time)): # Retry the entire transaction. continue raise if not self.in_transaction: # Assume callback intentionally ended the transaction. return ret while True: try: self.commit_transaction() except PyMongoError as exc: if (exc.has_error_label("UnknownTransactionCommitResult") and _within_time_limit(start_time) and not _max_time_expired_error(exc)): # Retry the commit. continue if (exc.has_error_label("TransientTransactionError") and _within_time_limit(start_time)): # Retry the entire transaction. break raise # Commit succeeded. return ret def start_transaction(self, read_concern=None, write_concern=None, read_preference=None, max_commit_time_ms=None): """Start a multi-statement transaction. Takes the same arguments as :class:`TransactionOptions`. .. versionchanged:: 3.9 Added the ``max_commit_time_ms`` option. .. versionadded:: 3.7 """ self._check_ended() if self.in_transaction: raise InvalidOperation("Transaction already in progress") read_concern = self._inherit_option("read_concern", read_concern) write_concern = self._inherit_option("write_concern", write_concern) read_preference = self._inherit_option( "read_preference", read_preference) if max_commit_time_ms is None: opts = self.options.default_transaction_options if opts: max_commit_time_ms = opts.max_commit_time_ms self._transaction.opts = TransactionOptions( read_concern, write_concern, read_preference, max_commit_time_ms) self._transaction.reset() self._transaction.state = _TxnState.STARTING self._start_retryable_write() return _TransactionContext(self) def commit_transaction(self): """Commit a multi-statement transaction. .. versionadded:: 3.7 """ self._check_ended() retry = False state = self._transaction.state if state is _TxnState.NONE: raise InvalidOperation("No transaction started") elif state in (_TxnState.STARTING, _TxnState.COMMITTED_EMPTY): # Server transaction was never started, no need to send a command. self._transaction.state = _TxnState.COMMITTED_EMPTY return elif state is _TxnState.ABORTED: raise InvalidOperation( "Cannot call commitTransaction after calling abortTransaction") elif state is _TxnState.COMMITTED: # We're explicitly retrying the commit, move the state back to # "in progress" so that in_transaction returns true. self._transaction.state = _TxnState.IN_PROGRESS retry = True try: self._finish_transaction_with_retry("commitTransaction", retry) except ConnectionFailure as exc: # We do not know if the commit was successfully applied on the # server or if it satisfied the provided write concern, set the # unknown commit error label. exc._remove_error_label("TransientTransactionError") _reraise_with_unknown_commit(exc) except WTimeoutError as exc: # We do not know if the commit has satisfied the provided write # concern, add the unknown commit error label. _reraise_with_unknown_commit(exc) except OperationFailure as exc: if exc.code not in _UNKNOWN_COMMIT_ERROR_CODES: # The server reports errorLabels in the case. raise # We do not know if the commit was successfully applied on the # server or if it satisfied the provided write concern, set the # unknown commit error label. _reraise_with_unknown_commit(exc) finally: self._transaction.state = _TxnState.COMMITTED def abort_transaction(self): """Abort a multi-statement transaction. .. versionadded:: 3.7 """ self._check_ended() state = self._transaction.state if state is _TxnState.NONE: raise InvalidOperation("No transaction started") elif state is _TxnState.STARTING: # Server transaction was never started, no need to send a command. self._transaction.state = _TxnState.ABORTED return elif state is _TxnState.ABORTED: raise InvalidOperation("Cannot call abortTransaction twice") elif state in (_TxnState.COMMITTED, _TxnState.COMMITTED_EMPTY): raise InvalidOperation( "Cannot call abortTransaction after calling commitTransaction") try: self._finish_transaction_with_retry("abortTransaction", False) except (OperationFailure, ConnectionFailure): # The transactions spec says to ignore abortTransaction errors. pass finally: self._transaction.state = _TxnState.ABORTED def _finish_transaction_with_retry(self, command_name, explict_retry): """Run commit or abort with one retry after any retryable error. :Parameters: - `command_name`: Either "commitTransaction" or "abortTransaction". - `explict_retry`: True when this is an explict commit retry attempt, ie the application called session.commit_transaction() twice. """ # This can be refactored with MongoClient._retry_with_session. try: return self._finish_transaction(command_name, explict_retry) except ServerSelectionTimeoutError: raise except ConnectionFailure as exc: try: return self._finish_transaction(command_name, True) except ServerSelectionTimeoutError: # Raise the original error so the application can infer that # an attempt was made. raise exc except OperationFailure as exc: if exc.code not in _RETRYABLE_ERROR_CODES: raise try: return self._finish_transaction(command_name, True) except ServerSelectionTimeoutError: # Raise the original error so the application can infer that # an attempt was made. raise exc def _finish_transaction(self, command_name, retrying): opts = self._transaction.opts wc = opts.write_concern cmd = SON([(command_name, 1)]) if command_name == "commitTransaction": if opts.max_commit_time_ms: cmd['maxTimeMS'] = opts.max_commit_time_ms # Transaction spec says that after the initial commit attempt, # subsequent commitTransaction commands should be upgraded to use # w:"majority" and set a default value of 10 seconds for wtimeout. if retrying: wc_doc = wc.document wc_doc["w"] = "majority" wc_doc.setdefault("wtimeout", 10000) wc = WriteConcern(**wc_doc) if self._transaction.recovery_token: cmd['recoveryToken'] = self._transaction.recovery_token with self._client._socket_for_writes(self) as sock_info: return self._client.admin._command( sock_info, cmd, session=self, write_concern=wc, parse_write_concern_error=True) def _advance_cluster_time(self, cluster_time): """Internal cluster time helper.""" if self._cluster_time is None: self._cluster_time = cluster_time elif cluster_time is not None: if cluster_time["clusterTime"] > self._cluster_time["clusterTime"]: self._cluster_time = cluster_time def advance_cluster_time(self, cluster_time): """Update the cluster time for this session. :Parameters: - `cluster_time`: The :data:`~pymongo.client_session.ClientSession.cluster_time` from another `ClientSession` instance. """ if not isinstance(cluster_time, abc.Mapping): raise TypeError( "cluster_time must be a subclass of collections.Mapping") if not isinstance(cluster_time.get("clusterTime"), Timestamp): raise ValueError("Invalid cluster_time") self._advance_cluster_time(cluster_time) def _advance_operation_time(self, operation_time): """Internal operation time helper.""" if self._operation_time is None: self._operation_time = operation_time elif operation_time is not None: if operation_time > self._operation_time: self._operation_time = operation_time def advance_operation_time(self, operation_time): """Update the operation time for this session. :Parameters: - `operation_time`: The :data:`~pymongo.client_session.ClientSession.operation_time` from another `ClientSession` instance. """ if not isinstance(operation_time, Timestamp): raise TypeError("operation_time must be an instance " "of bson.timestamp.Timestamp") self._advance_operation_time(operation_time) def _process_response(self, reply): """Process a response to a command that was run with this session.""" self._advance_cluster_time(reply.get('$clusterTime')) self._advance_operation_time(reply.get('operationTime')) if self.in_transaction and self._transaction.sharded: recovery_token = reply.get('recoveryToken') if recovery_token: self._transaction.recovery_token = recovery_token @property def has_ended(self): """True if this session is finished.""" return self._server_session is None @property def in_transaction(self): """True if this session has an active multi-statement transaction. .. versionadded:: 3.10 """ return self._transaction.active() @property def _pinned_address(self): """The mongos address this transaction was created on.""" if self._transaction.active(): return self._transaction.pinned_address return None def _pin_mongos(self, server): """Pin this session to the given mongos Server.""" self._transaction.sharded = True self._transaction.pinned_address = server.description.address def _unpin_mongos(self): """Unpin this session from any pinned mongos address.""" self._transaction.pinned_address = None def _txn_read_preference(self): """Return read preference of this transaction or None.""" if self.in_transaction: return self._transaction.opts.read_preference return None def _apply_to(self, command, is_retryable, read_preference): self._check_ended() self._server_session.last_use = monotonic.time() command['lsid'] = self._server_session.session_id if not self.in_transaction: self._transaction.reset() if is_retryable: command['txnNumber'] = self._server_session.transaction_id return if self.in_transaction: if read_preference != ReadPreference.PRIMARY: raise InvalidOperation( 'read preference in a transaction must be primary, not: ' '%r' % (read_preference,)) if self._transaction.state == _TxnState.STARTING: # First command begins a new transaction. self._transaction.state = _TxnState.IN_PROGRESS command['startTransaction'] = True if self._transaction.opts.read_concern: rc = self._transaction.opts.read_concern.document else: rc = {} if (self.options.causal_consistency and self.operation_time is not None): rc['afterClusterTime'] = self.operation_time if rc: command['readConcern'] = rc command['txnNumber'] = self._server_session.transaction_id command['autocommit'] = False def _start_retryable_write(self): self._check_ended() self._server_session.inc_transaction_id() class _ServerSession(object): def __init__(self, pool_id): # Ensure id is type 4, regardless of CodecOptions.uuid_representation. self.session_id = {'id': Binary(uuid.uuid4().bytes, 4)} self.last_use = monotonic.time() self._transaction_id = 0 self.dirty = False self.pool_id = pool_id def mark_dirty(self): """Mark this session as dirty. A server session is marked dirty when a command fails with a network error. Dirty sessions are later discarded from the server session pool. """ self.dirty = True def timed_out(self, session_timeout_minutes): idle_seconds = monotonic.time() - self.last_use # Timed out if we have less than a minute to live. return idle_seconds > (session_timeout_minutes - 1) * 60 @property def transaction_id(self): """Positive 64-bit integer.""" return Int64(self._transaction_id) def inc_transaction_id(self): self._transaction_id += 1 class _ServerSessionPool(collections.deque): """Pool of _ServerSession objects. This class is not thread-safe, access it while holding the Topology lock. """ def __init__(self, *args, **kwargs): super(_ServerSessionPool, self).__init__(*args, **kwargs) self.pool_id = 0 def reset(self): self.pool_id += 1 self.clear() def pop_all(self): ids = [] while self: ids.append(self.pop().session_id) return ids def get_server_session(self, session_timeout_minutes): # Although the Driver Sessions Spec says we only clear stale sessions # in return_server_session, PyMongo can't take a lock when returning # sessions from a __del__ method (like in Cursor.__die), so it can't # clear stale sessions there. In case many sessions were returned via # __del__, check for stale sessions here too. self._clear_stale(session_timeout_minutes) # The most recently used sessions are on the left. while self: s = self.popleft() if not s.timed_out(session_timeout_minutes): return s return _ServerSession(self.pool_id) def return_server_session(self, server_session, session_timeout_minutes): self._clear_stale(session_timeout_minutes) if not server_session.timed_out(session_timeout_minutes): self.return_server_session_no_lock(server_session) def return_server_session_no_lock(self, server_session): # Discard sessions from an old pool to avoid duplicate sessions in the # child process after a fork. if server_session.pool_id == self.pool_id and not server_session.dirty: self.appendleft(server_session) def _clear_stale(self, session_timeout_minutes): # Clear stale sessions. The least recently used are on the right. while self: if self[-1].timed_out(session_timeout_minutes): self.pop() else: # The remaining sessions also haven't timed out. break
38.926566
80
0.645065
import collections import os import sys import uuid from bson.binary import Binary from bson.int64 import Int64 from bson.py3compat import abc, integer_types, reraise_instance from bson.son import SON from bson.timestamp import Timestamp from pymongo import monotonic from pymongo.errors import (ConfigurationError, ConnectionFailure, InvalidOperation, OperationFailure, PyMongoError, ServerSelectionTimeoutError, WTimeoutError) from pymongo.helpers import _RETRYABLE_ERROR_CODES from pymongo.read_concern import ReadConcern from pymongo.read_preferences import ReadPreference, _ServerMode from pymongo.write_concern import WriteConcern class SessionOptions(object): def __init__(self, causal_consistency=True, default_transaction_options=None): self._causal_consistency = causal_consistency if default_transaction_options is not None: if not isinstance(default_transaction_options, TransactionOptions): raise TypeError( "default_transaction_options must be an instance of " "pymongo.client_session.TransactionOptions, not: %r" % (default_transaction_options,)) self._default_transaction_options = default_transaction_options @property def causal_consistency(self): return self._causal_consistency @property def default_transaction_options(self): return self._default_transaction_options class TransactionOptions(object): def __init__(self, read_concern=None, write_concern=None, read_preference=None, max_commit_time_ms=None): self._read_concern = read_concern self._write_concern = write_concern self._read_preference = read_preference self._max_commit_time_ms = max_commit_time_ms if read_concern is not None: if not isinstance(read_concern, ReadConcern): raise TypeError("read_concern must be an instance of " "pymongo.read_concern.ReadConcern, not: %r" % (read_concern,)) if write_concern is not None: if not isinstance(write_concern, WriteConcern): raise TypeError("write_concern must be an instance of " "pymongo.write_concern.WriteConcern, not: %r" % (write_concern,)) if not write_concern.acknowledged: raise ConfigurationError( "transactions do not support unacknowledged write concern" ": %r" % (write_concern,)) if read_preference is not None: if not isinstance(read_preference, _ServerMode): raise TypeError("%r is not valid for read_preference. See " "pymongo.read_preferences for valid " "options." % (read_preference,)) if max_commit_time_ms is not None: if not isinstance(max_commit_time_ms, integer_types): raise TypeError( "max_commit_time_ms must be an integer or None") @property def read_concern(self): return self._read_concern @property def write_concern(self): return self._write_concern @property def read_preference(self): return self._read_preference @property def max_commit_time_ms(self): return self._max_commit_time_ms def _validate_session_write_concern(session, write_concern): if session: if write_concern is not None and not write_concern.acknowledged: if session._implicit: return None else: raise ConfigurationError( 'Explicit sessions are incompatible with ' 'unacknowledged write concern: %r' % ( write_concern,)) return session class _TransactionContext(object): def __init__(self, session): self.__session = session def __enter__(self): return self def __exit__(self, exc_type, exc_val, exc_tb): if self.__session.in_transaction: if exc_val is None: self.__session.commit_transaction() else: self.__session.abort_transaction() class _TxnState(object): NONE = 1 STARTING = 2 IN_PROGRESS = 3 COMMITTED = 4 COMMITTED_EMPTY = 5 ABORTED = 6 class _Transaction(object): def __init__(self, opts): self.opts = opts self.state = _TxnState.NONE self.sharded = False self.pinned_address = None self.recovery_token = None def active(self): return self.state in (_TxnState.STARTING, _TxnState.IN_PROGRESS) def reset(self): self.state = _TxnState.NONE self.sharded = False self.pinned_address = None self.recovery_token = None def _reraise_with_unknown_commit(exc): exc._add_error_label("UnknownTransactionCommitResult") reraise_instance(exc, trace=sys.exc_info()[2]) def _max_time_expired_error(exc): return isinstance(exc, OperationFailure) and exc.code == 50 _UNKNOWN_COMMIT_ERROR_CODES = _RETRYABLE_ERROR_CODES | frozenset([ 64, 50, ]) _WITH_TRANSACTION_RETRY_TIME_LIMIT = 120 def _within_time_limit(start_time): return monotonic.time() - start_time < _WITH_TRANSACTION_RETRY_TIME_LIMIT class ClientSession(object): def __init__(self, client, server_session, options, authset, implicit): # A MongoClient, a _ServerSession, a SessionOptions, and a set. self._client = client self._server_session = server_session self._options = options self._authset = authset self._cluster_time = None self._operation_time = None # Is this an implicitly created session? self._implicit = implicit self._transaction = _Transaction(None) def end_session(self): self._end_session(lock=True) def _end_session(self, lock): if self._server_session is not None: try: if self.in_transaction: self.abort_transaction() finally: self._client._return_server_session(self._server_session, lock) self._server_session = None def _check_ended(self): if self._server_session is None: raise InvalidOperation("Cannot use ended session") def __enter__(self): return self def __exit__(self, exc_type, exc_val, exc_tb): self._end_session(lock=True) @property def client(self): return self._client @property def options(self): return self._options @property def session_id(self): self._check_ended() return self._server_session.session_id @property def cluster_time(self): return self._cluster_time @property def operation_time(self): return self._operation_time def _inherit_option(self, name, val): if val: return val txn_opts = self.options.default_transaction_options val = txn_opts and getattr(txn_opts, name) if val: return val return getattr(self.client, name) def with_transaction(self, callback, read_concern=None, write_concern=None, read_preference=None, max_commit_time_ms=None): start_time = monotonic.time() while True: self.start_transaction( read_concern, write_concern, read_preference, max_commit_time_ms) try: ret = callback(self) except Exception as exc: if self.in_transaction: self.abort_transaction() if (isinstance(exc, PyMongoError) and exc.has_error_label("TransientTransactionError") and _within_time_limit(start_time)): # Retry the entire transaction. continue raise if not self.in_transaction: # Assume callback intentionally ended the transaction. return ret while True: try: self.commit_transaction() except PyMongoError as exc: if (exc.has_error_label("UnknownTransactionCommitResult") and _within_time_limit(start_time) and not _max_time_expired_error(exc)): # Retry the commit. continue if (exc.has_error_label("TransientTransactionError") and _within_time_limit(start_time)): # Retry the entire transaction. break raise # Commit succeeded. return ret def start_transaction(self, read_concern=None, write_concern=None, read_preference=None, max_commit_time_ms=None): self._check_ended() if self.in_transaction: raise InvalidOperation("Transaction already in progress") read_concern = self._inherit_option("read_concern", read_concern) write_concern = self._inherit_option("write_concern", write_concern) read_preference = self._inherit_option( "read_preference", read_preference) if max_commit_time_ms is None: opts = self.options.default_transaction_options if opts: max_commit_time_ms = opts.max_commit_time_ms self._transaction.opts = TransactionOptions( read_concern, write_concern, read_preference, max_commit_time_ms) self._transaction.reset() self._transaction.state = _TxnState.STARTING self._start_retryable_write() return _TransactionContext(self) def commit_transaction(self): self._check_ended() retry = False state = self._transaction.state if state is _TxnState.NONE: raise InvalidOperation("No transaction started") elif state in (_TxnState.STARTING, _TxnState.COMMITTED_EMPTY): # Server transaction was never started, no need to send a command. self._transaction.state = _TxnState.COMMITTED_EMPTY return elif state is _TxnState.ABORTED: raise InvalidOperation( "Cannot call commitTransaction after calling abortTransaction") elif state is _TxnState.COMMITTED: # We're explicitly retrying the commit, move the state back to self._transaction.state = _TxnState.IN_PROGRESS retry = True try: self._finish_transaction_with_retry("commitTransaction", retry) except ConnectionFailure as exc: exc._remove_error_label("TransientTransactionError") _reraise_with_unknown_commit(exc) except WTimeoutError as exc: _reraise_with_unknown_commit(exc) except OperationFailure as exc: if exc.code not in _UNKNOWN_COMMIT_ERROR_CODES: raise _reraise_with_unknown_commit(exc) finally: self._transaction.state = _TxnState.COMMITTED def abort_transaction(self): self._check_ended() state = self._transaction.state if state is _TxnState.NONE: raise InvalidOperation("No transaction started") elif state is _TxnState.STARTING: self._transaction.state = _TxnState.ABORTED return elif state is _TxnState.ABORTED: raise InvalidOperation("Cannot call abortTransaction twice") elif state in (_TxnState.COMMITTED, _TxnState.COMMITTED_EMPTY): raise InvalidOperation( "Cannot call abortTransaction after calling commitTransaction") try: self._finish_transaction_with_retry("abortTransaction", False) except (OperationFailure, ConnectionFailure): pass finally: self._transaction.state = _TxnState.ABORTED def _finish_transaction_with_retry(self, command_name, explict_retry): try: return self._finish_transaction(command_name, explict_retry) except ServerSelectionTimeoutError: raise except ConnectionFailure as exc: try: return self._finish_transaction(command_name, True) except ServerSelectionTimeoutError: raise exc except OperationFailure as exc: if exc.code not in _RETRYABLE_ERROR_CODES: raise try: return self._finish_transaction(command_name, True) except ServerSelectionTimeoutError: raise exc def _finish_transaction(self, command_name, retrying): opts = self._transaction.opts wc = opts.write_concern cmd = SON([(command_name, 1)]) if command_name == "commitTransaction": if opts.max_commit_time_ms: cmd['maxTimeMS'] = opts.max_commit_time_ms if retrying: wc_doc = wc.document wc_doc["w"] = "majority" wc_doc.setdefault("wtimeout", 10000) wc = WriteConcern(**wc_doc) if self._transaction.recovery_token: cmd['recoveryToken'] = self._transaction.recovery_token with self._client._socket_for_writes(self) as sock_info: return self._client.admin._command( sock_info, cmd, session=self, write_concern=wc, parse_write_concern_error=True) def _advance_cluster_time(self, cluster_time): if self._cluster_time is None: self._cluster_time = cluster_time elif cluster_time is not None: if cluster_time["clusterTime"] > self._cluster_time["clusterTime"]: self._cluster_time = cluster_time def advance_cluster_time(self, cluster_time): if not isinstance(cluster_time, abc.Mapping): raise TypeError( "cluster_time must be a subclass of collections.Mapping") if not isinstance(cluster_time.get("clusterTime"), Timestamp): raise ValueError("Invalid cluster_time") self._advance_cluster_time(cluster_time) def _advance_operation_time(self, operation_time): if self._operation_time is None: self._operation_time = operation_time elif operation_time is not None: if operation_time > self._operation_time: self._operation_time = operation_time def advance_operation_time(self, operation_time): if not isinstance(operation_time, Timestamp): raise TypeError("operation_time must be an instance " "of bson.timestamp.Timestamp") self._advance_operation_time(operation_time) def _process_response(self, reply): self._advance_cluster_time(reply.get('$clusterTime')) self._advance_operation_time(reply.get('operationTime')) if self.in_transaction and self._transaction.sharded: recovery_token = reply.get('recoveryToken') if recovery_token: self._transaction.recovery_token = recovery_token @property def has_ended(self): return self._server_session is None @property def in_transaction(self): return self._transaction.active() @property def _pinned_address(self): if self._transaction.active(): return self._transaction.pinned_address return None def _pin_mongos(self, server): self._transaction.sharded = True self._transaction.pinned_address = server.description.address def _unpin_mongos(self): self._transaction.pinned_address = None def _txn_read_preference(self): if self.in_transaction: return self._transaction.opts.read_preference return None def _apply_to(self, command, is_retryable, read_preference): self._check_ended() self._server_session.last_use = monotonic.time() command['lsid'] = self._server_session.session_id if not self.in_transaction: self._transaction.reset() if is_retryable: command['txnNumber'] = self._server_session.transaction_id return if self.in_transaction: if read_preference != ReadPreference.PRIMARY: raise InvalidOperation( 'read preference in a transaction must be primary, not: ' '%r' % (read_preference,)) if self._transaction.state == _TxnState.STARTING: self._transaction.state = _TxnState.IN_PROGRESS command['startTransaction'] = True if self._transaction.opts.read_concern: rc = self._transaction.opts.read_concern.document else: rc = {} if (self.options.causal_consistency and self.operation_time is not None): rc['afterClusterTime'] = self.operation_time if rc: command['readConcern'] = rc command['txnNumber'] = self._server_session.transaction_id command['autocommit'] = False def _start_retryable_write(self): self._check_ended() self._server_session.inc_transaction_id() class _ServerSession(object): def __init__(self, pool_id): self.session_id = {'id': Binary(uuid.uuid4().bytes, 4)} self.last_use = monotonic.time() self._transaction_id = 0 self.dirty = False self.pool_id = pool_id def mark_dirty(self): self.dirty = True def timed_out(self, session_timeout_minutes): idle_seconds = monotonic.time() - self.last_use return idle_seconds > (session_timeout_minutes - 1) * 60 @property def transaction_id(self): return Int64(self._transaction_id) def inc_transaction_id(self): self._transaction_id += 1 class _ServerSessionPool(collections.deque): def __init__(self, *args, **kwargs): super(_ServerSessionPool, self).__init__(*args, **kwargs) self.pool_id = 0 def reset(self): self.pool_id += 1 self.clear() def pop_all(self): ids = [] while self: ids.append(self.pop().session_id) return ids def get_server_session(self, session_timeout_minutes): # sessions from a __del__ method (like in Cursor.__die), so it can't self._clear_stale(session_timeout_minutes) while self: s = self.popleft() if not s.timed_out(session_timeout_minutes): return s return _ServerSession(self.pool_id) def return_server_session(self, server_session, session_timeout_minutes): self._clear_stale(session_timeout_minutes) if not server_session.timed_out(session_timeout_minutes): self.return_server_session_no_lock(server_session) def return_server_session_no_lock(self, server_session): if server_session.pool_id == self.pool_id and not server_session.dirty: self.appendleft(server_session) def _clear_stale(self, session_timeout_minutes): while self: if self[-1].timed_out(session_timeout_minutes): self.pop() else: break
true
true
f70145c5b8f64068fa4b5ed2fd23a6953ff95a4f
136
py
Python
ARC027/ARC027f.py
VolgaKurvar/AtCoder
21acb489f1594bbb1cdc64fbf8421d876b5b476d
[ "Unlicense" ]
null
null
null
ARC027/ARC027f.py
VolgaKurvar/AtCoder
21acb489f1594bbb1cdc64fbf8421d876b5b476d
[ "Unlicense" ]
null
null
null
ARC027/ARC027f.py
VolgaKurvar/AtCoder
21acb489f1594bbb1cdc64fbf8421d876b5b476d
[ "Unlicense" ]
null
null
null
#ARC027f def main(): import sys input=sys.stdin.readline sys.setrecursionlimit(10**6) if __name__ == '__main__': main()
17
32
0.654412
def main(): import sys input=sys.stdin.readline sys.setrecursionlimit(10**6) if __name__ == '__main__': main()
true
true
f70147505fc19c403669681d5e9f657e0d325321
877
py
Python
src/helper/parseCorpus.py
BookGin/ntuir
fc88e955b26fc0279e94c8998c4d624e6be67661
[ "MIT" ]
null
null
null
src/helper/parseCorpus.py
BookGin/ntuir
fc88e955b26fc0279e94c8998c4d624e6be67661
[ "MIT" ]
null
null
null
src/helper/parseCorpus.py
BookGin/ntuir
fc88e955b26fc0279e94c8998c4d624e6be67661
[ "MIT" ]
null
null
null
#!/usr/bin/env python3 # -*- coding: utf-8 -*- import json import sys import bigram # usage: ./parseJson.py "corpus.json path" "output directory" def concatString(path): corpus = "" with open(path, 'r', encoding='UTF-8') as f: for line in f.readlines(): corpus += line return corpus corpus = concatString(sys.argv[1]) data = json.loads(corpus) output_path = sys.argv[2] topics = {} for doc in data: string = bigram.get_words(doc["body"]) topic_num = doc["topic"] if topic_num not in topics: topics[topic_num] = [] topics[topic_num].append(string + "\n") print("Finish traversing corpus.json") for topic_index in topics.keys(): path = "%s/%d.txt" % (output_path, topic_index) with open(path, 'w', encoding='UTF-8') as f: f.writelines(topics[topic_index]) print("Generated %d files." % len(topics))
23.702703
61
0.639681
import json import sys import bigram def concatString(path): corpus = "" with open(path, 'r', encoding='UTF-8') as f: for line in f.readlines(): corpus += line return corpus corpus = concatString(sys.argv[1]) data = json.loads(corpus) output_path = sys.argv[2] topics = {} for doc in data: string = bigram.get_words(doc["body"]) topic_num = doc["topic"] if topic_num not in topics: topics[topic_num] = [] topics[topic_num].append(string + "\n") print("Finish traversing corpus.json") for topic_index in topics.keys(): path = "%s/%d.txt" % (output_path, topic_index) with open(path, 'w', encoding='UTF-8') as f: f.writelines(topics[topic_index]) print("Generated %d files." % len(topics))
true
true