blob_id
stringlengths 40
40
| language
stringclasses 1
value | text
stringlengths 12.9k
50.6k
|
---|---|---|
d786e89b9d478dcff3c541c89731247075d078c3
|
Python
|
<|begin_of_text|>'''
@author: Ken Venner
@contact: [email protected]
@version: 1.13
Read in a file of wine names and create consistent wine descriptions
from these names.
'''
import kvutil
import kvcsv
import re
import sys
import shutil
# may comment out in the future
import pprint
pp = pprint.PrettyPrinter(indent=4)
ppFlag = False
# application variables
optiondictconfig = {
'AppVersion' : {
'value' : '1.13',
'description' : 'defines the version number for the app',
},
'debug' : {
'value' : False,
'type' : 'bool',
'description' : 'defines if we are running in debug mode',
},
'verbose' : {
'value' : 1,
'type' : 'int',
'description' : 'defines the display level for print messages',
},
'setup_check' : {
'value' : False,
'type' : 'bool',
'description' : 'defines if we checking out setup',
},
'pprint' : {
'value' : False,
'type' : 'bool',
'description' : 'defines if we output with pretty print when debugging',
},
'csvfile_master_in' : {
'value' : 'wine_xref.csv',
'description' : 'defines the name of the master data input file',
},
'csvfile_update_in' : {
'value' : 'wineref.csv',
'description' : 'defines the name of the input file to updated',
},
'csvfile_update_out' : {
'value' : 'wineref2.csv',
'description' : 'defines the name of the updated output file',
},
'fldWine' : {
'value' : 'wine',
'description' : 'defines the name of the field that holds the Wine ',
},
'fldWineDescr' : {
'value' : 'winedescr',
'description' : 'defines the name of the field holding the wine description',
},
'fldWineDescrNew' : {
'value' : 'winedescrnew',
'description' : 'defines the name of the NEW field holding the new description ',
},
'fldWineDescrMatch' : {
'value' : None,
'description' : 'defines the name of the NEW field holding the results of comparison existing to new description ',
},
'fldWineMaster' : {
'value' : None,
'description' : 'defines the name of the field that holds the Wine when reading the master file ',
},
'fldWineDescrMaster' : {
'value' : None,
'description' : 'defines the name of the field holding the wine description when reading the master file',
},
'backupfile_ext' : {
'value' : '.bak',
'description' : 'defines the extension to use to copy the update input file to if we are replacing it with output',
},
'defaultnew' : {
'value' : None,
'description' : 'defines if we should take field fldWineDescrNew and set to a value if not set',
},
}
### GLOBAL VARIABLES / LOOKUPS ########################################
# regex search for vintage in wine name
vintageLookup = (
re.compile('\d\d\d\d\s+\d\d(\d\d)'), # two years together - get this one over early
re.compile('^\d\d(\d\d)'), # four position start of line
re.compile('\s\d\d(\d\d)$'), # four position end of line
re.compile('\s\d\d(\d\d)\s'), # four position middle of line
re.compile('XX\d\d(\d\d)\s'), # four position middle of line
re.compile('\s\d\d(\d\d)\/'), # four position split
re.compile('\s\'?(\d\d)\'?$|\s\'?(\d\d)\'?\s'), # two position date with optional apostrophe front or back
)
# regex search for case in wine name
reCase = re.compile(r'12\s*X\s*750\s*ML|\bcase\b|12\/750\s*ML',re.IGNORECASE)
# regex to pick up qualifiers from the wine
reQualLookup = (
(None, re.compile(r'\bWithout\s+Gift\b|\bNo\s+Gift', re.IGNORECASE)), # the none gift do them first
('Gift', re.compile(r'\bGift\b', re.IGNORECASE)),
('VAP', re.compile(r'\bVAP\b', re.IGNORECASE)),
('VAP', re.compile(r'\bGlassVAP\b', re.IGNORECASE)),
('Glass', re.compile(r'\bGlass\b', re.IGNORECASE)),
('Glass', re.compile(r'\bGlasses\b', re.IGNORECASE)),
('Etch', re.compile(r'\bEtch\b', re.IGNORECASE)),
('Basket', re.compile(r'\bBasket\b', re.IGNORECASE)),
)
# regex search to define the size of the wine bottle
sizeLookup = (
('1.75L', re.compile(r'\b1\.75\s*Li?|\b1\.75$', re.IGNORECASE)),
('1.5L', re.compile(r'\b1\.5\s*L?\b|\bMagnum\b', re.IGNORECASE)),
('375mL', re.compile(r'Half\s+Bottle|375ml', re.IGNORECASE)),
('200mL', re.compile(r'\b200\s*ML|\(200\s*ML', re.IGNORECASE)),
('50mL', re.compile(r'\b50\s*ML|\(50\s*ML', re.IGNORECASE)),
('500mL', re.compile(r'\b500\s*ML|\(500\s*ML', re.IGNORECASE)),
('3L', re.compile(r'\b3\s*Li?', re.IGNORECASE)),
('6L', re.compile(r'\b6\s*Li?', re.IGNORECASE)),
('9L', re.compile(r'\b9\s*Li?', re.IGNORECASE)),
('1L', re.compile(r'\b1L\b|\b1\s+L$|\b1.0\s*L\b|\b1\s+Liter\b|\bOne\s+Liter\b|\bLITER\b|\b1\s*LTR', re.IGNORECASE)),
)
# regex extract winery names from the wine field
wineryLookup = (
('Alban', re.compile(r'\bAlban\b', re.IGNORECASE)),
('Arrowood', re.compile(r'\bArrowood\b', re.IGNORECASE)),
('Atalon', re.compile(r'\bAtalon\b', re.IGNORECASE)),
('Attune', re.compile(r'\bAttune\b', re.IGNORECASE)),
('Auteur', re.compile(r'\bAuteur\b', re.IGNORECASE)),
('Austin Hope', re.compile(r'\bAustin\s+Hope\b', re.IGNORECASE)),
('Badge', re.compile(r'\bBadge\b', re.IGNORECASE)),
('Balletto', re.compile(r'\bBalletto\b', re.IGNORECASE)),
('Bell', re.compile(r'\bBell\s+Cellar', re.IGNORECASE)),
('BR Cohn', re.compile(r'\bB\.?\s?R\.?\s+Cohn\b', re.IGNORECASE)),
('Bremer', re.compile(r'\bBremer\b', re.IGNORECASE)),
('Brewer-Clifton', re.compile(r'\bBrewer[\s\-]Clifton\b', re.IGNORECASE)),
('BV', re.compile(r'\bBeaulieu\s+V|\bBV\b', re.IGNORECASE)),
('Belle Glos', re.compile(r'\bBelle\s+Glos\b', re.IGNORECASE)),
('Bennett Ln', re.compile(r'\bBennet+\sLane\b', re.IGNORECASE)),
('Benovia', re.compile(r'\bBenovia\b', re.IGNORECASE)),
('Beringer', re.compile(r'\bBeringer\b', re.IGNORECASE)),
('Blackstone', re.compile(r'\bBlackstone\b', re.IGNORECASE)),
('Brancott', re.compile(r'\bBrancott\b', re.IGNORECASE)),
('Cade', re.compile(r'\bCade\b', re.IGNORECASE)),
('Cain Five', re.compile(r'\bCain\s+Five\b|\bCain\s-\sFive\b|\bCain\s5\b|\bCainFive\b', re.IGNORECASE)),
('Cakebread', re.compile(r'\bCakebread\b', re.IGNORECASE)),
('Cardinale', re.compile(r'\bCardinale\b', re.IGNORECASE)),
('Caymus', re.compile(r'\bCaymus\b', re.IGNORECASE)),
('Chappellet', re.compile(r'\bChappellet\b', re.IGNORECASE)),
('Chalk Hill', re.compile(r'\bChalk\s+Hill\b', re.IGNORECASE)),
('Clos Du Bois', re.compile(r'\bClos\s+Du\s+Bois\b', re.IGNORECASE)),
('ClosDuVal', re.compile(r'\bClos\s+du\s+Val\b', re.IGNORECASE)),
('Colgin', re.compile(r'\bColgin\b', re.IGNORECASE)),
('Concha Don Melchor', re.compile(r'\bConcha\s.*Don\s+Melchor\b|Don\s+Melchor\b', re.IGNORECASE)),
('Continuum', re.compile(r'\bContinuum\b', re.IGNORECASE)),
('Corison', re.compile(r'\bCorison\b', re.IGNORECASE)),
('Cristal', re.compile(r'Roederer\s?.*Cristal\b|\bCristal\b.+Brut', re.IGNORECASE)),
('Curran', re.compile(r'\bCurran\b', re.IGNORECASE)),
('Darioush', re.compile(r'\bDarioush\b', re.IGNORECASE)),
('Darioush', re.compile(r'\bCaravan\b', re.IGNORECASE)),
('David Arthur', re.compile(r'\bDavid\s+Arthur\b', re.IGNORECASE)),
('David Bruce', re.compile(r'\bDavid\s+Bruce\b', re.IGNORECASE)),
('Davis Family', re.compile(r'\bDavis\s+Family\b', re.IGNORECASE)),
('Del Dotto', re.compile(r'\bDel\s+Dotto\b', re.IGNORECASE)),
('Dominus', re.compile(r'\bDominus\b', re.IGNORECASE)),
('Goldeneye', re.compile(r'\bGoldeneye\b', re.IGNORECASE)), # before duckhorn
('Paraduxx', re.compile(r'\bParaduxx\b', re.IGNORECASE)), # before duckhorn
('Domaine Carneros', re.compile(r'\bDomaine\s+Carneros\b', re.IGNORECASE)),
('Dominus', re.compile(r'\Dominus\b', re.IGNORECASE)),
('Drappier', re.compile(r'\bDrappier\b', re.IGNORECASE)),
('Duckhorn', re.compile(r'\bDuckhorn\b', re.IGNORECASE)),
('Dumol', re.compile(r'\bDumol\b', re.IGNORECASE)),
('Dunn', re.compile(r'\bDunn\b', re.IGNORECASE)),
('Ehlers', re.compile(r'\bEhlers\b', re.IGNORECASE)),
('Etude', re.compile(r'\bEtude\b', re.IGNORECASE)),
('Far Niente', re.compile(r'\bFar Niente\b', re.IGNORECASE)),
('Flora', re.compile(r'\bFlora\s+Springs\b', re.IGNORECASE)),
('Flowers', re.compile(r'\bFlowers\b', re.IGNORECASE)),
('Robert Foley', re.compile(r'\bRobert\s+\bFoley\b', re.IGNORECASE)), #before Foley
('Foley', re.compile(r'\bFoley\b', re.IGNORECASE)),
('Foxen', re.compile(r'\bFoxen\b', re.IGNORECASE)),
('Franciscan', re.compile(r'\bFranciscan\b', re.IGNORECASE)),
('Frank Family', re.compile(r'\bFrank Family\b', re.IGNORECASE)),
('Gary Farrell', re.compile(r'\bGary\s+Farrel+\b', re.IGNORECASE)),
('Ghost Block', re.compile(r'\bGhost\s+Block\b', re.IGNORECASE)),
('Grgich', re.compile(r'\bGrgich\b', re.IGNORECASE)),
('Groth', re.compile(r'\bGroth\b', re.IGNORECASE)),
('Gundlach', re.compile(r'\bGundlach\b', re.IGNORECASE)),
('Hansel', re.compile(r'\bHansel\b', re.IGNORECASE)),
('Hanzell', re.compile(r'\bHanzell\b', re.IGNORECASE)),
('Hess', re.compile(r'\bHess\b', re.IGNORECASE)),
('Hewitt', re.compile(r'\bHewitt\b', re.IGNORECASE)),
('Hobbs', re.compile(r'\bHobbs\b|\bcrossbarn\b', re.IGNORECASE)),
('Hundred Acre', re.compile(r'\bHundred\s+Acre\b', re.IGNORECASE)),
('Jordan', re.compile(r'\bJordan\b', re.IGNORECASE)),
('Justin', re.compile(r'\bJustin\b', re.IGNORECASE)),
('Kim Crawford', re.compile(r'\bKim\s+Crawford\b', re.IGNORECASE)),
('Kistler', re.compile(r'\bKistler\b', re.IGNORECASE)),
('Kosta', re.compile(r'\bKosta\s+Browne?\b', re.IGNORECASE)),
('Krug', re.compile(r'\bKrug\b', re.IGNORECASE)),
('Kunde', re.compile(r'\bKunde\b', re.IGNORECASE)),
('LaCrema', re.compile(r'\bLa\s?Crema\b', re.IGNORECASE)),
('Lewis', re.compile(r'\bLewis\b', re.IGNORECASE)),
('Lokoya', re.compile(r'\bLokoya\b', re.IGNORECASE)),
('Meiomi', re.compile(r'\bMeiomi\b', re.IGNORECASE)),
('Melville', re.compile(r'\bMelville\b', re.IGNORECASE)),
('Momento Mori', re.compile(r'\bMomento\s+Mori\b', re.IGNORECASE)),
('Mondavi', re.compile(r'\bMondavi\b', re.IGNORECASE)),
('Montelena', re.compile(r'\bMontelena\b', re.IGNORECASE)),
('Mt Veeder', re.compile(r'^Mount\s+Veeder\b|^Mt\.? Veeder\b|\d+\s+M[^t]*t\s+Veeder\b', re.IGNORECASE)),
('Newton', re.compile(r'\bNewton\b', re.IGNORECASE)),
('Nickel', re.compile(r'\bNickel\b', re.IGNORECASE)),
('Opus One', re.compile(r'\bOpus\s+One\b', re.IGNORECASE)),
('P Togni', re.compile(r'\bTogni\b', re.IGNORECASE)),
('Pahlmeyer Jayson', re.compile(r'\bJayson\b', re.IGNORECASE)), # this before pahlmeyer
('Pahlmeyer', re.compile(r'\bPahlmeyer\b(?!\s*Jay)', re.IGNORECASE)),
('Papillon', re.compile(r'\bPapillon\b', re.IGNORECASE)),
('Patz', re.compile(r'\bPatz\b', re.IGNORECASE)),
('Phelps', re.compile(r'\bPhelps\b', re.IGNORECASE)),
('Plumpjack', re.compile(r'\bPlumpjack\b', re.IGNORECASE)),
('Pride', re.compile(r'\bPride\b', re.IGNORECASE)),
('Prisoner', re.compile(r'\bPrisoner\b', re.IGNORECASE)),
('Provenance', re.compile(r'\bProvenance\b', re.IGNORECASE)),
('R Sinskey', re.compile(r'\bSinskey\b', re.IGNORECASE)),
('Ramey', re.compile(r'\bRamey\b', re.IGNORECASE)),
('Revana', re.compile(r'\bRevana\b', re.IGNORECASE)),
('Raptor', re.compile(r'\bRaptor\s+Ridge\b', re.IGNORECASE)),
('Revana', re.compile(r'\bRevana\b', re.IGNORECASE)),
('Ridge', re.compile(r'\bRidge\b', re.IGNORECASE)),
('Robert Foley', re.compile(r'\bRobert\s+Foley\b', re.IGNORECASE)),
('Rombauer', re.compile(r'\bRombauer\b', re.IGNORECASE)),
('Rudd', re.compile(r'\bRudd\b', re.IGNORECASE)),
('Scarecrow', re.compile(r'\bScarecrow\b', re.IGNORECASE)),
('Sea Smoke', re.compile(r'\bSea\s+Smoke\b', re.IGNORECASE)),
('Seghesio', re.compile(r'\bSeghesio\b', re.IGNORECASE)),
('Shafer', re.compile(r'\bShafer\b', re.IGNORECASE)),
('Sherwin', re.compile(r'\bSherwin\b', re.IGNORECASE)),
('Silver Oak', re.compile(r'\bSilver\s+Oak\b', re.IGNORECASE)),
('Silverado', re.compile(r'\bSilverado\b', re.IGNORECASE)),
('Simi', re.compile(r'\bSimi\b', re.IGNORECASE)),
('Sonoma Cutrer', re.compile(r'\bCutrer\b', re.IGNORECASE)),
('Spottswoode', re.compile(r'\bSpottswoode\b', re.IGNORECASE)),
('Stag Leap', re.compile(r'\bStag.*\sLeap\b', re.IGNORECASE)),
('Sullivan', re.compile(r'\bSullivan\b', re.IGNORECASE)),
('Summerland', re.compile(r'\bSummerland\b', re.IGNORECASE)),
('Summers', re.compile(r'\bSummers\b', re.IGNORECASE)),
('Tantara', re.compile(r'\bTantara\b', re.IGNORECASE)),
('Turnbull', re.compile(r'\bTurnbull\b', re.IGNORECASE)),
('Veuve', re.compile(r'\bVeuve\b', re.IGNORECASE)),
('Viader', re.compile(r'\bViader\b', re.IGNORECASE)),
('Waterstone', re.compile(r'\bWaterstone\b', re.IGNORECASE)),
('Whitehall', re.compile(r'\bWhitehall\b', re.IGNORECASE)),
('Wm Selyem', re.compile(r'\bWilliams\s*\-?Selyem\b', re.IGNORECASE)),
('ZD', re.compile(r'\bZD\b', re.IGNORECASE)),
('Zaca', re.compile(r'\bZaca\b', re.IGNORECASE)),
('zBourbon Woodford Res', re.compile(r'\bWoodford\s+Reserve\b', re.IGNORECASE)),
('zBourbon Woodford Res', re.compile(r'\bWoodford\s+Rsv\b', re.IGNORECASE)),
('zCognac Courvoisier', re.compile(r'\bCourvoisier\b', re.IGNORECASE)),
('zCognac Hennessy', re.compile(r'\bHennesse?y\b', re.IGNORECASE)),
('zCognac Remy', re.compile(r'\bRemy\s+Martin\b|\bRemy\s+Louis', re.IGNORECASE)),
('zCointreau', re.compile(r'\bCointreau\b', re.IGNORECASE)),
('zGin Hendrick', re.compile(r'\bHendrick', re.IGNORECASE)),
('zGin Tanqueray', re.compile(r'\bTanqueray\b', re.IGNORECASE)),
('zRum Mt Gay', re.compile(r'\bMount\s+Gay\b|\bMt\s+Gay', re.IGNORECASE)),
('zRum Ron Zacapa', re.compile(r'\bRon\s+Zacapa\b', re.IGNORECASE)),
('zRye Hayden', re.compile(r'\bBasil\s+Hayden\b', re.IGNORECASE)),
('zSambuca', re.compile(r'\bSambuca\b', re.IGNORECASE)),
('zScotch Glenmorangie', re.compile(r'\bGlenmorangie\b', re.IGNORECASE)),
('zScotch Hibiki Harmony', re.compile(r'\bHibiki\s.*Harmony\b', re.IGNORECASE)),
('zScotch Hibiki', re.compile(r'\bHibiki\b(?!\s*Har)', re.IGNORECASE)),
('zScotch Macallan', re.compile(r'\bMacallan\b', re.IGNORECASE)),
('zTeq Campo Azul', re.compile(r'\bCampo\s+Azul\b', re.IGNORECASE)),
('zTeq Casamigos', re.compile(r'\bCasamigos\b', re.IGNORECASE)),
('zTeq Casino Azul', re.compile(r'\bCasino\s+Azul\b', re.IGNORECASE)),
('zTeq Clase Azul', re.compile(r'\bClase\s+Azul\b', re.IGNORECASE)),
('zTeq Cuervo', re.compile(r'\bJose\s+Cuervo\b|^Cuervo\b', re.IGNORECASE)),
('zTeq Don Julio', re.compile(r'\bDon\s+Julio\b', re.IGNORECASE)),
('zTeq Dos Artes', re.compile(r'\bDos\s+Artes\b|^Cuervo\b', re.IGNORECASE)),
('zTeq Gran Cava', re.compile(r'\bGran\s+Cava\b', re.IGNORECASE)),
('zTeq Herradura', re.compile(r'\bHerradura\b', re.IGNORECASE)),
('zTeq Loma Azul', re.compile(r'\bLoma\s+Azul\b', re.IGNORECASE)),
('zTeq Padre Azul', re.compile(r'\bPadre\s+Azul\b', re.IGNORECASE)),
('zTeq Partida', re.compile(r'\bPartida\b', re.IGNORECASE)),
('zTeq Patron', re.compile(r'\bPatron\b', re.IGNORECASE)),
('zTripleSec Gr Marnier', re.compile(r'\bGrand\s+Marnier\b', re.IGNORECASE)),
('zTripleSec Dekuyper', re.compile(r'\bDekuyper\b', re.IGNORECASE)),
('zTripleSec Hiram', re.compile(r'\bHiram\b', re.IGNORECASE)),
('zVodka Absolut', re.compile(r'\bAbsolut\b', re.IGNORECASE)),
('zVodka Skyy', re.compile(r'\bSkyy\b', re.IGNORECASE)),
('zVodka Tito', re.compile(r'\bTito', re.IGNORECASE)),
('zWhiskey Balvenie', re.compile(r'\bBalvenie\b', re.IGNORECASE)),
('zWhiskey J Walker', re.compile(r'\bJohn+ie\s+Walker\b', re.IGNORECASE)),
# ('', re.compile(r'\b\b', re.IGNORECASE)),
)
# regex extract the grape from the wine fld
grapeLookup = (
('Cab Franc', re.compile(r'\bCabernet\s+Franc|\bCab\s+Franc', re.IGNORECASE)), # before cab
('Cab', re.compile(r'\bCabernet\b|\sCS\s|\sCS$|\bCab\b', re.IGNORECASE)),
('Claret', re.compile(r'\bClaret\b', re.IGNORECASE)),
('Rose Pinot', re.compile(r'\bRose\b.*\bPinot\b|\bPinot\b.*\bRose\b', re.IGNORECASE)),
('Pinot', re.compile(r'\bPinot\b|\bPN\b|\bP\s+Noir\b', re.IGNORECASE)),
('Merlot', re.compile(r'\bMerlot\b|\bME\b', re.IGNORECASE)),
('Sauv Blanc', re.compile(r'\bSauvignon\s+Blanc\b|\bSB\b', re.IGNORECASE)),
('Sauv Blanc', re.compile(r'\bSauvignon\/Fume\s+Blanc\b', re.IGNORECASE)),
('Meritage', re.compile(r'\bMeritage\b', re.IGNORECASE)),
('Fume', re.compile(r'\bFume\b|\bFumé', re.IGNORECASE)),
('Champagne', re.compile(r'\bChampagne\b', re.IGNORECASE)),
('Chard', re.compile(r'\bChar+d|\bCH\b', re.IGNORECASE)),
('Shiraz', re.compile(r'\bShiraz\b', re.IGNORECASE)),
('Syrah', re.compile(r'\bSyrah\b|\bSY\b',re.IGNORECASE)),
('Zin', re.compile(r'\bZinfandel\b|\bZIN\b|\bZN\b', re.IGNORECASE)),
('Rose', re.compile(r'\bRose\b|\bRosé', re.IGNORECASE)),
('Sangiovese', re.compile(r'\Sangiovese\b', re.IGNORECASE)),
# ('Brandy', re.compile(r'\bBrandy\b', re.IGNORECASE)),
('Gewurzt', re.compile(r'\bGew.rztraminer\b|\bGewürzt', re.IGNORECASE)),
('Malbec', re.compile(r'\bMalbec\b', re.IGNORECASE)),
('Viognier', re.compile(r'\bViognier\b', re.IGNORECASE)),
('Roussanne', re.compile(r'\bRoussanne\b', re.IGNORECASE)),
('Charbono', re.compile(r'\bCharbono\b', re.IGNORECASE)),
('PSirah', re.compile(r'\bPetite Sirah\b', re.IGNORECASE)),
('Cuvee', re.compile(r'\bCuvee\b', re.IGNORECASE)),
('Red', re.compile(r'\bRed\b|\bBordeaux\s+Blend\b', re.IGNORECASE)),
('Syrah-Cab', re.compile(r'\bSyrcab\b|\bsyrah[-\s\/]+cab', re.IGNORECASE)),
('Grenache', re.compile(r'\bGrenache\b', re.IGNORECASE)),
('Tempranillo', re.compile(r'\bTempranillo\b', re.IGNORECASE)),
)
# wineries that we don't want to look up the grape on
ignoreGrapeLookup = {
'Cristal' : ['Rose', None],
'Domaine Carneros' : ['Brut', None],
'Dominus' : [None],
'Papillon' : None,
'Paraduxx' : None,
'Veuve' : None,
'zCointreau' : None,
'zGin Hendrick' : None,
'zGin Tanqueray' : ['Ten', None],
'zTripleSec Gr Marnier' : ['1880', '100th', 'Cent', 'Quin', None],
'zTripleSec Dekuyper' : None,
'zTripleSec Hiram' : None,
'zVodka Skyy' : ['Citrus', None],
'zVodka Tito' : None,
# 'Prisoner' : ['Cuttings', 'Red', 'Derange', 'Saldo', 'Blindfold', None],
}
# winery to wine lookup when no grape is found in the wine name
#
# extract the wine name from a winery - when a field does not have a grape lookup for the row
# the name looked up and found will be the name used
noGrapeLookup = {
'Ehlers' : ['120-80'], # matches an abbreviations - and matches fldWineDescr
'Alban' : ['Pandora'],
'BV' : ['Tapestry', 'Latour'],
'Bennett Ln' : ['Maximus'],
'Bremer' : ['Austintatious'],
'Cain Five' : None,
'Colgin' : ['Cariad', 'IX'],
'Concha Don Melchor' : None,
'Continuum' : None,
'Darioush' : ['Duel', 'Darius'],
'Duckhorn' : ['Discussion'],
'Far Niente' : ['Dolce'],
'Flora' : ['Trilogy'],
'Franciscan' : ['Magnificat'],
'Grgich' : ['Violetta'],
'Gundlach' : ['Vintage Reserve'],
'Justin' : ['Isosceles'],
'Krug' : ['Generations'],
'Mondavi' : ['Maestro'],
'Newton' : ['Puzzle'],
'Opus One' : None,
'Phelps' : ['Insignia'],
'Prisoner' : ['Cuttings', 'Derange', 'Saldo', 'Blindfold'],
'Ridge' : ['Monte Bello'],
'Robert Foley' : ['Griffin'],
'Sullivan' : ['Coeur de Vigne'],
'Zaca' : ['ZThree', 'ZCuvee'],
'zCognac Courvoisier' : ['Napolean', 'VS', 'VSOP', 'XO'],
'zCognac Hennessy' : ['Paradis', 'Richard', 'VS', 'VSOP', 'XO', 'Master'],
'zCognac Remy' : ['1738', 'Louis XIII', 'VSOP', 'XO', 'VS'],
'zRum Ron Zacapa' : ['23', 'Negra', 'XO'],
'zRye Hayden' : ['Dark', 'Caribbean'],
'zScotch Hibiki Harmony' : None,
# 'zScotch Hibiki' : ['Toki', '12', '17', '21', '30'],
'zTeq Campo Azul' : ['Extra Anejo', 'Anejo', 'Blanco', 'Reposado'],
'zTeq Casamigos' : ['Extra Anejo', 'Anejo', 'Blanco', 'Reposado'],
'zTeq Casino Azul' : ['Extra Anejo', 'Anejo', 'Blanco', 'Reposado', 'Silver'],
'zTeq Clase Azul' : ['Ultra', 'Extra Anejo', 'Anejo', 'Blanco', 'Reposado', 'Mezcal', 'Plata', 'Platino'],
'zTeq Dos Artes' : ['Extra Anejo'],
'zTeq Gran Cava' : ['Extra Anejo'],
'zTeq Loma Azul' : ['Extra Anejo', 'Anejo', 'Blanco', 'Reposado'],
# 'zTeq Padre Azul' : ['Extra Anejo', 'Anejo', 'Blanco', 'Reposado'],
'zTeq Partida' : ['Blanco', 'Elegante'],
'zVodka Absolut' : ['Citron', 'Mandarin', 'Mandrin', 'Mango', 'Ruby', 'Vanilia', 'Raspberri', 'Grapevine', None],
'zWhiskey J Walker' : ['Double Black', 'Black', 'Blue', 'Gold', 'Green', 'Platinum', 'Red','Swing', 'White', '18', '21'],
}
# regex to use to determine if this is a liquor not a wine
#
# winery -> [ liquor, regex ]
# if there is no grape, and no noGrapeLookup found, but the winery has a liquorLookup
# use the list of lookups to find the additional infomratoin to add to the winery
#
liquorLookup = {
'zRum Mt Gay' : [
('1703 Mst', re.compile(r'\b1703\b', re.IGNORECASE)),
('BB', re.compile(r'\bBlack Barrel\b', re.IGNORECASE)),
('Eclipse Silver', re.compile(r'\bEclipse\s+Silver\b', re.IGNORECASE)),
('Eclipse', re.compile(r'\bEclipse\b', re.IGNORECASE)),
('Old Peat', re.compile(r'\bOld Peat', re.IGNORECASE)),
('Old Pot', re.compile(r'\bPot\s+Still\b', re.IGNORECASE)),
('Old', re.compile(r'\bOld\b', re.IGNORECASE)),
('Silver', re.compile(r'\bSilver\b', re.IGNORECASE)),
('XO Peat', re.compile(r'\bXO\b', re.IGNORECASE)),
],
'zScotch Glenmorangie' : [
('10', re.compile(r'\b10(YR)?\b', re.IGNORECASE)),
('14 Port', re.compile(r'14.+\bQuinta\b|14.+\bPort\b|\bQuinta\b.+14|\bPort\b.+14', re.IGNORECASE)),
('12 Bacalta', re.compile(r'\bBacalta\b', re.IGNORECASE)),
('12 Burgundy', re.compile(r'\bBurgundy\b', re.IGNORECASE)),
('12 Nectar', re.compile(r'\bNectar\b', re.IGNORECASE)),
('12 Port', re.compile(r'\bQuinta\b|\bPort\b', re.IGNORECASE)),
('12 Sherry', re.compile(r'\bLa\s?Santa\b|\bSherry\b', re.IGNORECASE)),
('12 Signet', re.compile(r'\bSignet\b', re.IGNORECASE)),
('15 Cadboll', re.compile(r'\bCadboll', re.IGNORECASE)),
('15', re.compile(r'\b15(YR)?\b', re.IGNORECASE)),
('18', re.compile(r'\b18(YR)?\b|\b18YEAR\b', re.IGNORECASE)),
('25 Astar', re.compile(r'\bAstar\b', re.IGNORECASE)),
('25', re.compile(r'\b25(YR)?\b', re.IGNORECASE)),
('Companta', re.compile(r'\bCompanta\b', re.IGNORECASE)),
('Finealta', re.compile(r'\bFinealta\b', re.IGNORECASE)),
('Milsean', re.compile(r'\bMilsean\b', re.IGNORECASE)),
('Sonnalta', re.compile(r'\bSonnalta\b', re.IGNORECASE)),
],
'zScotch Macallan' : [
('10 Fine', re.compile(r'\bFine.*\b10\b|\b10.*Fine')),
('10', re.compile(r'\b10\b')),
('12 Double Gold', re.compile(r'\bDbl\b.*Gold|\bDouble\b.*Gold', re.IGNORECASE)),
('12 Double', re.compile(r'\bDouble\s.*12(YR)?\b', re.IGNORECASE)),
('12 Double', re.compile(r'\b12\s.*Double\b', re.IGNORECASE)),
('12 Double', re.compile(r'\bDbl\b|\bDouble\b', re.IGNORECASE)),
('12 Edition 1', re.compile(r'\bEdition\s.*1\b', re.IGNORECASE)),
('12 Edition 2', re.compile(r'\bEdition\s.*2\b', re.IGNORECASE)),
('12 Edition 3', re.compile(r'\bEdition\s.*3\b', re.IGNORECASE)),
('12 Edition 4', re.compile(r'\bEdition\s.*4\b', re.IGNORECASE)),
('12 Sherry', re.compile(r'\b12\s.*Sherry\b|\bSherry\b\s.*\b12', re.IGNORECASE)),
('12 Triple', re.compile(r'\b12(YR)?\s.*Triple\b', re.IGNORECASE)),
('12 Triple', re.compile(r'\bTriple\s.*12\b', re.IGNORECASE)),
('12', re.compile(r'\b12(YR)?\b', re.IGNORECASE)),
('15 Triple', re.compile(r'\b15(YR)?\s.*Triple\b|Triple.+\b15(YR)?\b', re.IGNORECASE)),
('15 Fine', re.compile(r'\b15(YR)?\b.*\bFine\b', re.IGNORECASE)),
('15', re.compile(r'\b15(YR)?\b', re.IGNORECASE)),
('17 Sherry', re.compile(r'\b17(YR)?\s.*Sherry\b', re.IGNORECASE)),
('17 Fine', re.compile(r'\b17(YR)?\b.*\bFine\b', re.IGNORECASE)),
('17', re.compile(r'\b17(YR)?\b', re.IGNORECASE)),
('18 Sherry', re.compile(r'\b18(YR)?\s.*Sherry\b|Sherry\b.*18', re.IGNORECASE)),
('18 Triple', re.compile(r'\b18(YR)?\s.*Triple\b|Triple.+\b18(YR)?\b', re.IGNORECASE)),
('18 Fine', re.compile(r'\b18(YR)?\b.*\bFine\b', re.IGNORECASE)),
('18 Gran', re.compile(r'Gran\b.*\b18', re.IGNORECASE)),
('18', re.compile(r'\b18(YR)?\b', re.IGNORECASE)),
('21 Fine', re.compile(r'\b21.*Fine\b', re.IGNORECASE)),
('21', re.compile(r'\b21(YR)?\b', re.IGNORECASE)),
('25 Sherry', re.compile(r'\b25\s.*Sherry\b', re.IGNORECASE)),
('25', re.compile(r'\b25(YR)?\b')),
('30 Sherry', re.compile(r'\b30\s.*Sherry', re.IGNORECASE)),
('30 Triple', re.compile(r'\b30(YR)?\s.*Triple\b|Triple.+\b30(YR)?\b', re.IGNORECASE)),
('30 Fine', re.compile(r'\b30(YR)?\b.*\bFine\b|Fine.*30', re.IGNORECASE)),
('30', re.compile(r'\b30(YR)?\b')),
('Rare', re.compile(r'\bRare\b', re.IGNORECASE)),
],
'zTeq Cuervo' : [
('Especial Gold', re.compile(r'\bEspecial\b.*Gold\b|Gold.*Especial', re.IGNORECASE)),
('Especial Blue', re.compile(r'\bEspecial\b.*Blue\b', re.IGNORECASE)),
('Especial', re.compile(r'\bEspecial\b', re.IGNORECASE)),
('Familia Platino', re.compile(r'\bPlatino\b', re.IGNORECASE)),
('Familia Anejo', re.compile(r'\bFamilia\b|\bReserva\b', re.IGNORECASE)),
('Gold', re.compile(r'\bGold\b', re.IGNORECASE)),
('Reposado Lagavulin', re.compile(r'\bReposado.*Lagavulin', re.IGNORECASE)),
('Tradicional Anejo', re.compile(r'Tradicional.*Anejo|Anejo.*Tradicional', re.IGNORECASE)),
('Tradicional Reposado', re.compile(r'Tradicional.*Reposado|Reposado.*Tradicional', re.IGNORECASE)),
('Tradicional Silver', re.compile(r'\bTradicional\b', re.IGNORECASE)),
('Tradicional Silver', re.compile(r'\bTraditional\b', re.IGNORECASE)),
('Reposado', re.compile(r'\bReposado\b', re.IGNORECASE)),
('Silver', re.compile(r'\bSilver\b', re.IGNORECASE)),
],
'zTeq Don Julio' : [
('1942', re.compile(r'\b1942\b', re.IGNORECASE)),
('Real', re.compile(r'\bReal\b', re.IGNORECASE)),
('Anejo Claro 70th', re.compile(r'\b70th\b', re.IGNORECASE)),
('Anejo Claro', re.compile(r'\bAnejo\b\s*Claro\b', re.IGNORECASE)),
('Anejo', re.compile(r'\bAnejo\b', re.IGNORECASE)),
('Blanco', re.compile(r'\bBlanco\b', re.IGNORECASE)),
('Reposado Lagavulin', re.compile(r'\bRepo.+Lagvulin\b', re.IGNORECASE)),
('Reposado Dbl', re.compile(r'\bReposado.+Double\b', re.IGNORECASE)),
('Reposado Dbl', re.compile(r'\bReposado.+Dbl\b', re.IGNORECASE)),
('Reposado Dbl', re.compile(r'\bDouble.+Reposado\b', re.IGNORECASE)),
('Reposado Private', re.compile(r'\bReposado.+Private\b', re.IGNORECASE)),
('Reposado', re.compile(r'\bReposado\b', re.IGNORECASE)),
('Silver', re.compile(r'\bSilver\b', re.IGNORECASE)),
],
'zTeq Herradura' : [
('Ultra', re.compile(r'\bUltra\b', re.IGNORECASE)),
('Suprema', re.compile(r'\bSuprema\b', re.IGNORECASE)),
('Anejo', re.compile(r'\bAnejo\b', re.IGNORECASE)),
('Blanco', re.compile(r'\bBlanco\b', re.IGNORECASE)),
('Reposado Gold', re.compile(r'\bReposado\s+Gold\b|\bGold\s+Reposado\b', re.IGNORECASE)),
('Reposado Scotch', re.compile(r'\bReposado.+Scotch\b|\bScotch.+Reposado\b', re.IGNORECASE)),
('Reposado Port', re.compile(r'\bPort.+Reposado\b|\bReposado.+Port\b', re.IGNORECASE)),
('Reposado', re.compile(r'\bReposado\b', re.IGNORECASE)),
('Silver', re.compile(r'\bSilver\b', re.IGNORECASE)),
],
'zTeq Patron' : [
('Gran Piedra', re.compile(r'\bPiedra\b', re.IGNORECASE)),
('DELETE Roca DELETE', re.compile(r'\bRoca\b', re.IGNORECASE)),
('Anejo Extra Lalique', re.compile(r'\bLalique\b', re.IGNORECASE)),
('Anejo Extra 7yr', re.compile(r'\b7YR\b|\b7 anos\b|\b7 year\b', re.IGNORECASE)),
('Anejo Extra 5yr', re.compile(r'\b5YR\b|\b5 anos\b|\b5 year\b', re.IGNORECASE)),
('Anejo Extra 10yr', re.compile(r'\b10\b.+\bExtra\b|\bExtra\b.+10', re.IGNORECASE)),
('Anejo Extra', re.compile(r'\bExtra\s+Anejo\b', re.IGNORECASE)),
('Gran Anejo', re.compile(r'\bGran\s+Anejo\b', re.IGNORECASE)),
('Gran Anejo', re.compile(r'\bBurdeos\b', re.IGNORECASE)),
('Gran Smoky', re.compile(r'\bGran\s+.*Smoky\b', re.IGNORECASE)),
('Anejo', re.compile(r'\bAnejo\b', re.IGNORECASE)),
('Gran Platinum', re.compile(r'\bPlatinum\b', re.IGNORECASE)),
('Reposado', re.compile(r'\bReposado\b', re.IGNORECASE)),
('Silver LTD', re.compile(r'\bSilver.*Limited\b|\bLimited.*Silver\b', re.IGNORECASE)),
('Silver Estate', re.compile(r'\bEstate.*Silver\b|\bSilver.*Estate\b', re.IGNORECASE)),
('Silver', re.compile(r'\bSilver\b', re.IGNORECASE)),
('Blanco', re.compile(r'\bBlanco\b', re.IGNORECASE)),
# ('', re.compile(r'\b\b', re.IGNORECASE)),
],
'zTeq Padre Azul' : [
('Blanco', re.compile(r'\bsilver\b', re.IGNORECASE)),
],
'zWhiskey Balvenie' : [
('12 Double', re.compile(r'\bDouble.*12(YR)?\b', re.IGNORECASE)),
('12 Double', re.compile(r'\b12(YR)?\s.*Double', re.IGNORECASE)),
('12 First', re.compile(r'\b12(YR)?\s.*First', re.IGNORECASE)),
('12 USA', re.compile(r'\b12.*American|American.*12', re.IGNORECASE)),
('12 Toast', re.compile(r'\b12(YR)?\s.*Toast', re.IGNORECASE)),
('12', re.compile(r'\b12(YR)?\b', re.IGNORECASE)),
('14 Carib', re.compile(r'\b14(YR)?\s.*Carib', re.IGNORECASE)),
('14 Carib', re.compile(r'\b14(YR)?\s.*CB\s+Cask', re.IGNORECASE)),
('14 Carib', re.compile(r'\bCarr?ib', re.IGNORECASE)),
('14 Peat', re.compile(r'\b14(YR)?\s.*Peat', re.IGNORECASE)),
('15 Sherry', re.compile(r'\b15(YR)?\s.*Sherry\b', re.IGNORECASE)),
('15 Sherry', re.compile(r'\bSherry\s+.*15(YR)?\b', re.IGNORECASE)),
('15', re.compile(r'\b15(YR)?\b', re.IGNORECASE)),
('16 Triple', re.compile(r'\b16(YR)?\s.*Triple\b', re.IGNORECASE)),
('17 Sherry Double', re.compile
|
920cd41b18f5cfb45f46c44ed707cebe682d4dd9
|
Python
|
<|begin_of_text|># Software License Agreement (BSD License)
#
# Copyright (c) 2009-2011, Eucalyptus Systems, Inc.
# All rights reserved.
#
# Redistribution and use of this software in source and binary forms, with or
# without modification, are permitted provided that the following conditions
# are met:
#
# Redistributions of source code must retain the above
# copyright notice, this list of conditions and the
# following disclaimer.
#
# Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the
# following disclaimer in the documentation and/or other
# materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# Author: [email protected]
'''
@author: clarkmatthew
extension of the boto instance class, with added convenience methods + objects
Add common instance test routines to this class
Examples:
from eucaops import Eucaops
from nephoria.windows_instance import WinInstance
tester = Eucaops(credpath='eucarc-10.111.5.80-eucalyptus-sys_admin')
wins = WinInstance.make_euinstance_from_instance(tester.get_instances(idstring='i-89E13DA8')[0], tester=tester, keypair='test')
vol = tester.get_volume(status='available', zone=wins.placement)
wins.attach_volume(vol)
'''
import socket
import os
import re
import time
import copy
import types
import operator
from prettytable import PrettyTable, ALL
from boto.ec2.instance import Instance
from nephoria.aws.ec2.euvolume import EuVolume
from cloud_utils.log_utils import eulogger, get_line, markup
from nephoria.euca.taggedresource import TaggedResource
from boto.ec2.instance import InstanceState
from datetime import datetime
from cloud_utils.net_utils import winrm_connection
termline = get_line()
class WinInstanceDiskType():
gigabyte = 1073741824
megabyte = 1048576
def __init__(self, win_instance, wmic_dict):
self.check_dict_requires(wmic_dict)
self.__dict__ = self.convert_numbers_in_dict(copy.copy(wmic_dict))
self.win_instance = win_instance
self.size_in_gb = self.get_size_in_gb()
self.size_in_mb = self.get_size_in_mb()
self.size = long(self.size or 0)
self.last_updated = time.time()
self.setup()
def setup(self):
raise Exception('Not Implemented')
def check_dict_requires(self, wmic_dict):
raise Exception('Not Implemented')
def convert_numbers_in_dict(self, dict):
#convert strings representing numbers to ints
for key in dict:
value = str(dict[key])
if (re.search("\S", str(dict[key])) and not re.search("\D", str(dict[key]))):
dict[key] = long(dict[key])
return dict
def get_partition_ids(self):
retlist = []
for part in self.disk_partitions:
retlist.append(part.deviceid)
return retlist
def get_logicaldisk_ids(self):
retlist = []
for part in self.disk_partitions:
retlist.extend(part.get_logicaldisk_ids())
return retlist
def get_size_in_gb(self):
'''
Attempts to convert self.size from bytes to gigabytes as well as round up >.99 to account for a differences
in how the size is represented
'''
self.size = int(self.size or 0)
gigs = self.size / self.gigabyte
if (self.size % self.gigabyte) /float(self.gigabyte) >.99:
gigs += 1
return gigs
def get_size_in_mb(self):
'''
Attempts to convert self.size from bytes to gigabytes as well as round up >.99 to account for a differences
in how the size is represented
'''
self.size = int(self.size or 0)
mb = self.size / self.megabyte
if (self.size % self.megabyte) /float(self.megabyte) >.99:
mb += 1
return mb
def print_self(self):
self.get_summary(printmethod=self.win_instance.debug)
def get_summary(self, printheader=True, printmethod=None):
raise Exception('Method not implemented')
def print_self_full(self, printmethod=None):
'''
formats and prints self.dict
'''
self.win_instance.print_dict(dict=self.__dict__, printmethod=printmethod)
class WinInstanceDiskDrive(WinInstanceDiskType):
def setup(self):
if not hasattr(self,'serialnumber'):
self.serialnumber = ''
if not hasattr(self, 'caption'):
self.caption = ''
if hasattr(self,'model'):
self.caption = self.model
else:
self.model = self.caption
self.cygwin_scsi_drive = self.win_instance.get_cygwin_scsi_dev_for_windows_drive(windisk=self)
self.update_ebs_info()
self.disk_partitions = []
def check_dict_requires(self, wmic_dict):
if not ('deviceid' in wmic_dict and
'size' in wmic_dict and
('caption' in wmic_dict or'model in wmic_dict') and
'index' in wmic_dict):
raise Exception('wmic_dict passed does not contain needed attributes; deviceid, size, caption, and index')
def get_partition_ids(self):
retlist = []
for part in self.disk_partitions:
retlist.append(part.deviceid)
return retlist
def get_logicaldisk_ids(self):
retlist = []
for part in self.disk_partitions:
retlist.extend(part.get_logicaldisk_ids())
return retlist
def update_md5_info_from_ebs(self):
self.md5 = None
self.md5len = None
for vol in self.win_instance.attached_vols:
if vol.guestdev == self.deviceid:
if not vol.md5:
vol.md5len = 1024
vol.md5 = self.win_instance.get_dev_md5(self.cygwin_scsi_drive, vol.md5len)
self.md5 = vol.md5
self.md5len = vol.md5len
break
def update_ebs_info_from_serial_number(self):
'''
Attempts to parse the serial number field from an EBS volume and find the correlating ebs volume
example format: vol-81C13EA4-dev-sdg
'''
if re.match("^vol-", self.serialnumber):
split = self.serialnumber.split('-')
self.ebs_volume = str(split[0]) + "-" + str(split[1])
self.ebs_cloud_dev = "/" + str(split[2]) + "/" + str(split[3])
else:
self.ebs_volume = ''
self.ebs_cloud_dev = ''
def update_ebs_info(self):
self.update_ebs_info_from_serial_number()
if not self.ebs_volume:
if self.index == 0 and self.win_instance.root_device_type == 'ebs':
bdm = self.win_instance.block_device_mapping[self.win_instance.root_device_name]
self.ebs_volume = bdm.volume_id
else:
for vol in self.win_instance.attached_vols:
if vol.guestdev == self.deviceid:
self.ebs_volume = vol.id
break
if not self.ebs_cloud_dev and self.ebs_volume:
volume = self.win_instance.tester.get_volume(volume_id=self.ebs_volume)
if hasattr(volume,'attach_data') and volume.attach_data:
self.ebs_cloud_dev = volume.attach_data.device
self.update_md5_info_from_ebs()
def get_summary(self, printheader=True, printmethod=None):
buf = ""
deviceid = 20
size = 16
sizegb = 7
ebsvol = 12
serialnumber = 24
caption = 36
part_count = 6
logical_ids = 8
cygdrive = 10
md5 = 32
header = "DISKDRIVE DEV ID".center(deviceid) + "|" + \
"SIZE B".center(size) + "|" + \
"SIZE GB".center(sizegb) + "|" + \
"EBS VOL".center(ebsvol) + "|" + \
"CAPTION".center(caption) + "|" + \
"PARTS".center(part_count) + "|" + \
"LOGICAL".center(logical_ids) + "|" + \
"CYGDRIVE".center(cygdrive) + "|" + \
"SERIAL NUMBER".center(serialnumber) + "|" + \
"MD5 CHECK SUM".center(md5) + "|"
summary = str(self.deviceid).center(deviceid) + "|" + \
str(self.size).center(size) + "|" + \
str(self.size_in_gb).center(sizegb) + "|" + \
str(self.ebs_volume).center(ebsvol) + "|" + \
str(self.caption).center(caption) + "|" + \
str(self.partitions).center(part_count) + "|" + \
str(",".join(str(x) for x in self.get_logicaldisk_ids())).center(logical_ids) + "|" + \
str(self.cygwin_scsi_drive).center(cygdrive) + "|" + \
str(self.serialnumber).center(serialnumber) + "|" + \
str(self.md5).center(md5) + "|"
length = len(header)
if len(summary) > length:
length = len(summary)
line = get_line(length)
if printheader:
buf += line + header + line
buf += summary + line
if printmethod:
printmethod(buf)
return buf
class WinInstanceDiskPartition(WinInstanceDiskType):
def setup(self):
#self.cygwin_scsi_drive = self.win_instance.get_cygwin_scsi_dev_for_windows_drive(drive_id=self.deviceid)
self.logicaldisks = []
#Set values in case 'brief' was used when fetching partitions
if not hasattr(self,'deviceid'):
self.deviceid = self.name
if not hasattr(self,'bootable'):
self.bootable = self.bootpartition
if not hasattr(self,'diskindex'):
self.diskindex = self.get_disk_index_from_name()
def check_dict_requires(self, wmic_dict):
if not ('name' in wmic_dict and
'size' in wmic_dict and
'bootpartition' in wmic_dict and
'index' in wmic_dict):
raise Exception('wmic_dict passed does not contain needed attributes; deviceid, size, index and bootable')
def get_disk_index_from_name(self):
diskindex = None
diskindexstring = self.name.split(',')[0]
if re.search('disk', diskindexstring, re.IGNORECASE):
diskindex = int(diskindexstring.split('#')[1])
return diskindex
def get_logicaldisk_ids(self):
retlist = []
for disk in self.logicaldisks:
retlist.append(disk.deviceid)
return retlist
def get_summary(self, printheader=True, printmethod=None):
buf = ""
deviceid = 24
size = 16
sizegb = 12
sizemb = 12
bootable = 10
header = "PARTITION DEV ID".center(deviceid) + "|" + \
"SIZE B".center(size) + "|" + \
"SIZE GB".center(sizegb) + "|" + \
"SIZE MB".center(sizemb) + "|" + \
"BOOTABLE".center(bootable) + "|"
summary = str(self.deviceid).center(deviceid) + "|" + \
str(self.size).center(size) + "|" + \
str(self.size_in_gb).center(sizegb) + "|" + \
str(self.size_in_mb).center(sizemb) + "|" + \
str(self.bootable).center(bootable) + "|"
length = len(header)
if len(summary) > length:
length = len(summary)
line = get_line(length)
if printheader:
buf += line + header + line
buf += summary + line
if printmethod:
printmethod(buf)
return buf
class WinInstanceLogicalDisk(WinInstanceDiskType):
def setup(self):
self.cygwin_scsi_drive = self.win_instance.get_cygwin_scsi_dev_for_windows_drive(windisk=self)
self.partition = None
def check_dict_requires(self, wmic_dict):
if not ('deviceid' in wmic_dict and
'size' in wmic_dict and
'description' in wmic_dict and
'freespace' in wmic_dict and
'filesystem' in wmic_dict):
raise Exception('wmic_dict passed does not contain needed attributes; deviceid, size, and description')
def get_summary(self, printheader=True, printmethod=None):
buf = ""
deviceid = 24
size = 16
freespace = 16
filesystem = 24
description = 30
cygdrive = 10
header = "LOGICAL DEV ID".center(deviceid) + "|" + \
"SIZE".center(size) + "|" + \
"FREE SPACE".center(freespace) + "|" + \
"FILE SYSTEM".center(filesystem) + "|" + \
"DESCRIPTION".center(description) + "|" + \
"CYGDRIVE".center(cygdrive) + "|"
summary = str(self.deviceid).center(deviceid) + "|" + \
str(self.size).center(size) + "|" + \
str(self.freespace).center(freespace) + "|" + \
str(self.filesystem).center(filesystem) + "|" + \
str(self.description).center(description) + "|" + \
str(self.cygwin_scsi_drive).center(cygdrive) + "|"
length = len(header)
if len(summary) > length:
length = len(summary)
line = get_line(length)
if printheader:
buf += line + header + line
buf += summary + line
if printmethod:
printmethod(buf)
return buf
class WinInstance(Instance, TaggedResource):
gigabyte = 1073741824
megabyte = 1048576
@classmethod
def make_euinstance_from_instance(cls,
instance,
tester,
debugmethod = None,
keypair=None,
keypath=None,
password=None,
username="Administrator",
auto_connect = True,
verbose=True,
timeout=120,
private_addressing = False,
reservation = None,
cmdstart=None,
try_non_root_exec=True,
winrm_port='5985',
winrm_protocol='http',
rdp_port='3389',
rootfs_device = "sda",
block_device_prefix = "sd",
bdm_root_vol = None,
virtio_blk = True,
cygwin_path = None,
disk_update_interval=10,
retry=2,
brief=False
):
'''
Primary constructor for this class. Note: to avoid an ssh session within this method, provide keys, username/pass later.
Arguments:
instance - mandatory- a Boto instance object used to build this euinstance object
keypair - optional- a boto keypair object used for creating ssh connection to the instance
username - optional- string used to create ssh connection as an alternative to keypair
password - optional- string used to create ssh connection to this instance as an alternative to keypair
exec_password -optional -string used for su or sudo where prompted for password, will default to 'password'
auto_connect -optional -boolean, if True will attempt to automatically create an ssh session for this instance
try_non_root_exec -optional -boolean, if True will attempt to use sudo if available else su -c to execute privileged commands
timeout - optional- integer used for ssh connection timeout
debugmethod - optional - method, used for debug output
verbose - optional - boolean to determine if debug is to be printed using debug()
retry - optional - integer, ssh connection attempts for non-authentication failures
'''
newins = WinInstance(instance.connection)
newins.__dict__ = instance.__dict__
newins.tester = tester
newins.winrm_port = winrm_port
newins.rdp_port = rdp_port
newins.bdm_root_vol = None
newins.winrm_protocol = winrm_protocol
newins.debugmethod = debugmethod
if newins.debugmethod is None:
newins.log = eulogger.Eulogger(identifier= str(instance.id))
newins.debugmethod= newins.log.debug
if (keypair is not None):
if isinstance(keypair,types.StringTypes):
keyname = keypair
keypair = tester.get_keypair(keyname)
else:
keyname = keypair.name
newins.keypath = keypath or os.getcwd() + "/" + keyname + ".pem"
newins.keypair = keypair
newins.password = password
newins.username = username
newins.verbose = verbose
newins.attached_vols=[]
newins.timeout = timeout
newins.virtio_blk = virtio_blk
newins.disk_update_interval = disk_update_interval
newins.retry = retry
newins.brief = brief
newins.rootfs_device = rootfs_device
newins.block_device_prefix = block_device_prefix
newins.private_addressing = private_addressing
newins.reservation = reservation or newins.get_reservation()
if newins.reservation:
newins.security_groups = newins.tester.get_instance_security_groups(newins)
else:
newins.security_groups = None
newins.laststate = newins.state
newins.cmdstart = cmdstart
newins.auto_connect = auto_connect
newins.set_last_status()
newins.update_vm_type_info()
newins.cygwin_path = cygwin_path
newins.system_info = None
newins.diskdrives = []
newins.disk_partitions = []
newins.logicaldisks = []
newins.cygwin_dev_map = {}
#newins.set_block_device_prefix()
if newins.root_device_type == 'ebs':
try:
volume = newins.tester.get_volume(volume_id = newins.block_device_mapping.get(newins.root_device_name).volume_id)
newins.bdm_root_vol = EuVolume.make_euvol_from_vol(volume, tester=newins.tester,cmdstart=newins.cmdstart)
except:pass
newins.winrm = None
if newins.auto_connect and newins.state == 'running':
newins.connect_to_instance(timeout=timeout)
return newins
@property
def age(self):
launchtime = self.tester.get_datetime_from_resource_string(self.launch_time)
# return the elapsed time in seconds
return (time.mktime(datetime.utcnow().utctimetuple()) -
time.mktime(launchtime.utctimetuple()))
def update(self, validate=False, dry_run=False,
err_state='terminated', err_code=-1):
ret = None
tb = ""
retries = 2
for x in xrange(0, retries):
try:
#send with validation True, fail later...
ret = super(WinInstance, self).update(validate=True,
dry_run=dry_run)
break
except ValueError:
if validate:
raise
tb = self.tester.get_traceback()
self.debug('Failed to update instance. Attempt:{0}/{1}'
.format(x, retries))
if not ret:
failmsg = 'Failed to update instance. Instance may no longer'\
'be present on system"{0}"'.format(self.id)
self.debug('{0}\n{1}'.format(tb, failmsg))
self.debug('{0} setting fake state to:"{1}"'.format(self.id,
err_state))
state = InstanceState(name=err_state, code=err_code)
self._state = state
ret = self.state
self.set_last_status()
return ret
def update_vm_type_info(self):
self.vmtype_info = self.tester.get_vm_type_from_zone(self.placement,self.instance_type)
return self.vmtype_info
def set_last_status(self,status=None):
self.laststate = self.state
self.laststatetime = time.time()
self.age_at_state = self.tester.get_instance_time_launched(self)
#Also record age from user's perspective, ie when they issued the run instance request (if this is available)
if self.cmdstart:
self.age_from_run_cmd = "{0:.2f}".format(time.time() - self.cmdstart)
else:
self.age_from_run_cmd = None
def print_dict(self, dict=None, printmethod=None):
'''
formats and prints
'''
printmethod = printmethod or self.debug
buf = "\n"
dict = dict or self.__dict__
longest_key = 0
for key in dict:
if len(key) > longest_key:
longest_key = len(key)
for key in dict:
buf += str(key).ljust(longest_key) + " -----> :" + str(dict[key]) + "\n"
printmethod(buf)
def printself(self, title=True, footer=True, printmethod=None, printme=True):
def state_markup(state):
# Markup instance state...
if state == 'running':
return markup(state, markups=[1, 92])
if state == 'terminated':
return markup(state, markups=[1, 97])
if state =='shutting-down':
return markup(state, markups=[1, 95])
if state == 'pending':
return markup(state, markups=[1, 93])
if state =='stopped':
return markup(state, markups=[1, 91])
else:
return markup(state, markups=[1, 91])
def multi_line(lines):
# Utility method for creating multi line table entries...
buf = ""
maxlen = 0
for line in lines:
if len(line) + 2 > maxlen:
maxlen = len(line) + 2
for line in lines:
buf += str(line).ljust(maxlen) + "\n"
buf = buf.rstrip()
return (buf, maxlen)
bdmvol = self.root_device_type
if self.bdm_root_vol:
bdmvol += ":" + self.bdm_root_vol.id
reservation_id = None
if self.reservation:
reservation_id = self.reservation.id
owner_id = self.reservation.owner_id
else:
owner_id = "???"
# Create a multi line field for instance's run info
idlist = [markup("{0} {1}".format('ID:', self.id), markups=[1, 4, 94]),
"{0} {1}".format(markup('TYPE:'), self.instance_type),
"{0} {1}".format(markup('RES:'), reservation_id),
"{0}".format(markup("ACCOUNT ID:")), owner_id]
id_string, idlen = multi_line(idlist)
try:
emi = self.tester.get_emi(self.image_id)
emi_name = str(emi.name[0:18]) + ".."
except:
emi_name = ""
# Create a multi line field for the instance's image info
virt_type = 'PV'
if self.virtualization_type == 'hvm':
virt_type = 'HVM'
emi_string, emilen = multi_line(
[markup("{0} {1}".format('EMI:', self.image_id)),
"{0} {1}".format(markup('OS:'), self.platform or 'linux'),
"{0} {1}".format(markup('VIRT:'), virt_type),
"{0}".format(markup('IMAGE NAME:')),
emi_name])
# Create a multi line field for the instance's state info
if self.age:
age = int(self.age)
state_string, state_len = multi_line(["STATE: " + state_markup(self.laststate),
"{0} {1}".format(markup('AGE:'), age),
"{0} {1}".format(markup("ZONE:"), self.placement),
markup('ROOTDEV:'), bdmvol])
# Create the primary table called pt...
netinfo = 'INSTANCE NETWORK INFO:'
idheader = 'INSTANCE ID'
imageheader = 'INSTANCE IMAGE'
stateheader = 'INSTANCE STATE'
pt = PrettyTable([idheader, imageheader, stateheader, netinfo])
pt.align[netinfo] = 'l'
pt.valign[netinfo] ='m'
pt.align[idheader] = 'l'
pt.align[imageheader] = 'l'
pt.align[stateheader] = 'l'
pt.max_width[idheader] = idlen
pt.max_width[imageheader] = emilen
pt.max_width[stateheader] = state_len
pt.padding_width = 0
pt.hrules = ALL
# PrettyTable headers do not work with ascii markups, so make a sudo header
new_header = []
for field in pt._field_names:
new_header.append(markup(field, markups=[1, 4]))
pt.add_row(new_header)
pt.header = False
# Create a subtable 'netpt' to summarize and format the networking portion...
# Set the maxwidth of each column so the tables line up when showing multiple instances
vpc_col = ('VPC', 4)
subnet_col = ('SUBNET', 6)
if self.vpc_id:
vpc_col = ('VPC', 12)
subnet_col = ('SUBNET', 15)
secgrp_col = ('SEC GRPS', 11)
privaddr_col = ('P', 1)
privip_col = ('PRIV IP', 15)
pubip_col = ('PUB IP', 15)
net_cols = [vpc_col, subnet_col, secgrp_col, privaddr_col, privip_col, pubip_col]
# Get the Max width of the main tables network summary column...
# Start with 2 to account for beginning and end column borders
netinfo_width = 2
netinfo_header = []
for col in net_cols:
netinfo_width += col[1] + 1
netinfo_header.append(col[0])
pt.max_width[netinfo] = netinfo_width
netpt = PrettyTable([vpc_col[0], subnet_col[0], secgrp_col[0], privaddr_col[0],
privip_col[0], pubip_col[0]])
netpt.padding_width = 0
netpt.vrules = ALL
for col in net_cols:
netpt.max_width[col[0]] = col[1]
sec_grps = []
for grp in self.groups:
sec_grps.append(str(grp.id))
sec_grps = ",".join(sec_grps)
private_addressing = "N"
if self.private_addressing:
private_addressing = "Y"
netpt.add_row([str(self.vpc_id).center(vpc_col[1]),
str(self.subnet_id).center(subnet_col[1]),
str(sec_grps).center(secgrp_col[1]),
str(private_addressing).center(privaddr_col[1]),
str(self.private_ip_address).center(privip_col[1]),
str(self.ip_address).center(pubip_col[1])])
# To squeeze a potentially long keyname under the network summary table, get the length
# and format this column to allow for wrapping a keyname under the table...
# netbuf = netpt.get_string()
netbuf = "{0}:{1} {2}:{3}\n".format(markup("NODE"),
self.tags.get('euca:node', "???").ljust(16),
markup("KEYPAIR"), self.key_name)
netbuf += "\n".join(netpt.get_string().splitlines()[0:-1])
# Create the row in the main table...
pt.add_row([id_string, emi_string, state_string, netbuf])
if printme:
printmethod = printmethod or self.log.debug
printmethod("\n" + str(pt) + "\n")
return pt
def get_password(self,
private_key_path=None,
key=None,
dir=None,
exten=".pem",
encoded=True,
force_update=False):
'''
:param private_key_path: private key file used to decrypt password
:param key: name of private key
:param dir: Path to private key
:param exten: extension of private key
:param encoded: boolean of whether string returned from server is
Base64 encoded
:return: decrypted password
'''
if self.password is None or force_update:
self.password = self.tester.get_windows_instance_password(
self,
private_key_path=private_key_path,
key=key,
dir=dir,
exten=exten,
encoded=encoded)
return self.password
def reset_ssh_connection(self, timeout=None):
# todo: Remove ssh reference from this method, use something like
# reset_instance_connection, etc..
self.debug('Note ssh not implemented at this time, using winrm for '
'shell access instead...')
return self.reset_winrm_connection(timeout=timeout)
def reset_winrm_connection(self, timeout=None, force=False):
# todo:
timeout = timeout or self.timeout
self.debug('reset_winrm_connection for:'+str(self.id))
self.get_password(force_update=True)
if self.username is None or self.password is None:
#Allow but warn here as this may be a valid negative test
self.debug('Warning username and/or password were None in '
'winrm connnection?')
# Create a new winrm interface if this is a new instance or
# an attribute has changed...
try:
#Check the port in order to provide debug if the connection fails
self.test_port_status(port=self.winrm_port, ip=self.ip_address)
except:pass
if force or not (self.winrm and \
self.winrm.hostname == self.ip_address and \
self.winrm.username == self.username and \
self.winrm.password == self.password):
if self.winrm:
self.winrm.close_shell()
self.winrm = winrm_connection.Winrm_Connection(
hostname = self.ip_address,
username = self.username,
password = self.password,
port = self.winrm_port,
protocol = self.winrm_protocol,
debug_method = self.debug,
verbose=True
)
def get_reservation(self):
res = None
try:
res = self.tester.get_reservation_for_instance(self)
except Exception, e:
self.update()
self.debug('Could not get reservation for instance in state:' +
str(self.state) + ", err:" + str(e))
return res
def connect_to_instance(self, wait_for_boot=180, timeout=120):
'''
Attempts to connect to an instance via ssh.
:params wait_for_boot: time to wait, allowing guest to boot before
attempting to poll for ports active status
:params timeout: -optional - time in seconds to wait when polling
port(s) status(s) before failure
'''
self.debug("{0}connect_to_instance starting.\nwait_for_boot:{1} "
"seconds\ntimeout from boot:{2}{3}"
.format(termline, wait_for_boot, timeout, termline))
try:
self.poll_for_port_status_with_boot_delay(waitforboot=wait_for_boot,
timeout=timeout)
except Exception, e:
self.debug('Warning failed to poll port status:' + str(e))
self.debug("Attempting to create connection to instance:" + self.id)
attempts = 0
start = time.time()
elapsed = 0
if self.winrm is not None:
self.winrm.close_shell()
self.winrm = None
while (elapsed < timeout):
attempts += 1
try:
self.update()
self.reset_winrm_connection()
self.debug('Try some sys...')
self.sys("whoami")
except Exception, se:
tb = self.tester.get_traceback()
self.debug('Caught exception attempting to connect '
'winrm shell:\n'+ str(tb) + str(se))
elapsed = int(time.time()-start)
self.debug('connect_to_instance: Attempts:' + str(attempts) +
', elapsed:'+str(elapsed)+'/'+str(timeout))
if self.winrm is not None:
self.winrm.close_shell()
self.winrm = None
time.sleep(5)
pass
else:
break
elapsed = int(time.time()-start)
if self.winrm is None:
self.get_connection_debug()
raise RuntimeError(str(self.id) +
":Failed establishing management connection to "
"instance, elapsed:" + str(elapsed) +
"/" + str(timeout))
self.debug('Connect_to_instance updating attached volumes/disk '
'info for vols:'+ str(self.attached_vols))
if self.brief:
self.update_system_info()
else:
self.update_system_and_disk_info()
self.init_attached_volumes()
self.debug("{0}connect_to_instance completed{1}"
.format(termline, termline))
def get_connection_debug(self):
# Add network debug/diag info here...
# First show arp cache from local machine
# todo Consider getting info from relevant euca components:
# - iptables info
# - route info
# - instance xml
try:
# Show local ARP info...
arp_out = "\nLocal ARP cache for instance ip: " \
+ str(self.ip_address) + "\n"
arp_fd = os.popen('arp'+ str(self.ip_address))
for line in arp_fd:
arp_out += line
self.debug(arp_out)
except Exception as AE:
self.log.debug('Failed to get arp info:' + str(AE))
try:
self.tester.get_console_output(self)
except Exception as CE:
self.log.debug('Failed to get console output:' + str(CE))
def update_root_device_diskdrive(self):
if not self.root_device_type == 'ebs':
return
for disk in self.diskdrives:
if disk.index == 0:
if disk.ebs_volume:
for vol in self.attached_vols:
if vol.id == disk.ebs_volume:
if not disk.md5:
disk.update_md5_info_from_ebs()
return
volume = self.tester.get_volume(volume_id=disk.ebs_volume)
if not isinstance(volume, EuVolume):
volume = EuVolume.make_euvol_from_vol(volume, self.tester)
volume.guestdev = disk.deviceid
volume.md5len = 1024
volume.md5 = self.get_dev_md5(disk.cygwin_scsi_drive, volume.md5len)
if not self.get_volume_from_attached_list_by_id(volume.id):
self.debug("{0} updating with root vol:{1}{2}"
.format(termline,
volume.id,
termline))
self.attached_vols.append(volume)
disk.update_md5_info_from_ebs()
return
def get_volume_from_attached_list_by_id(self, volume_id):
for vol in self.attached_vols:
if vol.id == volume_id:
return vol
def update_system_and_disk_info(self):
try:
self.update_system_info()
except Exception, sie:
tb = self.tester.get_traceback()
self.debug(str(tb) + "\nError updating system info:" + str(sie))
try:
self.update_disk_info()
self.update_root_device_diskdrive()
self.print_partition_summary()
self.print_logicaldisk_summary()
self.print_diskdrive_summary()
except Exception, ude:
tb = self.tester.get_traceback()
self.debug(str(tb) + "\nError updating disk info:" + str(ude))
def has_sudo(self):
return False
def debug(self,msg,traceback=1,method=None,frame=False):
'''
Used to print debug, defaults to print() but over ridden by self.debugmethod if not None
msg - mandatory -string, message to be printed
'''
if ( self.verbose is True ):
self.debugmethod(msg)
def sys(self, cmd, verbose=True, code=None, include_stderr=False, enable_debug=False, timeout=None):
'''
Issues a command against the ssh connection to this instance
Returns a list of the lines from stdout+stderr as a result of the command
cmd - mandatory - string, the command to be executed
verbose - optional - boolean flag to enable debug
timeout - optional - command timeout in seconds
'''
if (self.winrm is None):
raise Exception("WinInstance winrm connection is None")
return self.winrm.sys(command=cmd, include_stderr=include_stderr, timeout=timeout, verbose=verbose, code=code)
def test_rdp_port_status(self, ip=None, port=3389, timeout=10):
'''
Description: Attempts to test that the host is accepting tcp connections to the RDP port
'''
ip = ip or self.ip_address
return self.test_port_status(ip=ip, port=port, timeout=timeout)
def test_port_status(self, port, ip=None, timeout=5, tcp=True, verbose=True):
ip = ip or self.ip_address
return self.tester.test_port_status(ip, int(port), timeout=timeout, tcp=tcp, verbose=verbose)
def poll_for_port_status_with_boot_delay(self, interval=15, ports=[], socktimeout=5,timeout=180, waitforboot=300):
'''
Make sure some time has passed before we test on the guest side before running guest test...
'''
launch_seconds = self.tester.get_instance_time_launched(self)
sleeptime = 0 if launch_seconds > waitforboot else (waitforboot - launch_seconds)
self.debug("Instance was launched "+str(launch_seconds)+" seconds ago, waiting:"+str(sleeptime)+" for instance to boot")
time.sleep(sleeptime)
return self.poll_for_ports_status(ports,
ip=self.ip_address,
interval=interval,
socktimeout=socktimeout,
timeout=timeout)
def wait_for_time_since_launch(self,waitforboot=420):
'''
When using larger instance store images, this can allow for the delays caused by image size/transfer.
'''
boot_seconds = self.tester.get_instance_time_launched(self)
sleeptime = 0 if boot_seconds > waitforboot else (waitforboot - boot_seconds)
self.debug("Instance was launched "+str(boot_seconds)+"/"+str(waitforboot) + " seconds ago, waiting:"+str(sleeptime)+" for instance to boot")
start = time.time()
elapsed = 0
print "Waiting for Windows to fully boot:",
while elapsed < sleeptime:
print "Waiting for Windows to fully boot:"+str(sleeptime-elapsed),
time.sleep(5)
elapsed=int(time.time()-start)
self.debug("test_wait_for_instance_boot: done waiting, instance up for "+str(waitforboot)+" seconds")
def poll_for_ports_status(self, ports=[], ip=None, interval=10, socktimeout=5, timeout=180):
ip = ip or self.ip_address
ports = ports or [self.rdp_port, self.winrm_port]
start = time.time()
elapsed = 0
attempt = 0
while elapsed < timeout:
attempt +=1
self.debug('test_poll_for_ports_status, ports:'+ ",".join(str(x) for x in ports) + ", attempt:" + str(attempt))
for port in ports:
if elapsed < timeout:
try:
self.debug('Trying ip:port:' + str(self.ip_address) + ':' + str(port) + ", elapsed:" + str(elapsed))
self.test_port_status(ip=ip, port=int(port), timeout=5)
return
except socket.error, se:
self.debug('test_ports_status failed socket error:'+str(se[0]))
#handle specific errors here, for now just for debug...
ecode=se[0]
if ecode == socket.errno.ETIMEDOUT or ecode == "timed out":
self.debug("test_poll_for_ports_status: Connect "+str(ip)+":" +str(port)+ " timed out retrying. Time remaining("+str(timeout-elapsed)+")")
except Exception, e:
tb = self.tester.get_traceback()
self.debug(tb)
self.debug('test_poll_for_ports_status:'+str(ip)+':'+str(port)+' FAILED after attempts:'+str(attempt)+', elapsed:'+str(elapsed)+', err:'+str(e) )
elapsed = int(time.time() -start)
if elapsed < timeout:
time.sleep(interval)
raise Exception('test_poll_for_ports_status:'+str(ip)+':'+str(port)+' FAILED after attempts:'+str(attempt)+', elapsed:'+str(elapsed)+' seconds')
def init_attached_volumes(self):
self.debug('init_attahced_volumes... attached_vols:'+ str(self.attached_vols))
syncdict = self.sync_attached_volumes_with_clouds_view()
if syncdict['errors']:
errmsg = 'Errors syncing guest volumes with cloud at init:' + ",".join(str(e) for e in syncdict['errors'])
errmsg += 'Failed to sync guest volumes with cloud at init:' + ",".join(str(x) for x in syncdict['badvols'])
self.debug(errmsg)
time.sleep(60)
raise Exception(errmsg)
def sync_attached_volumes_with_clouds_view(self):
self.debug(termline +
"Starting sync_attached_volumes_with_clouds_view"
+ termline )
badvols = []
errors = []
ret = {'errors':errors, 'badvols':badvols}
#Get a list of volumes that the cloud believes are currently attached
cloud_volumes = self.tester.get_volumes(attached_instance=self.id)
#Make a copy of a list of volumes this instance thinks are currenlty attached
locallist = copy.copy(self.attached_vols)
self.debug('Cloud list:' + str(cloud_volumes))
self.debug('Local list:' + str(locallist))
for vol in cloud_volumes:
for local_vol in locallist:
if local_vol.id == vol.id:
locallist.remove(local_vol)
if not isinstance(vol, EuVolume):
vol = EuVolume.make_euvol_from_vol(vol, self.tester)
try:
self.update_volume_guest_info(volume=vol)
except Exception, e:
badvols.append(vol)
errors.append(vol.id +'Error syncing with cloud:' + str (e) + '. \n')
for local_vol in locallist:
badvols.append(local_vol)
errors.append(local_vol.id +'Error unattached volume found in guests attach list. \n')
self.debug(termline +
"Finishing sync_attached_volumes_with_clouds_view"
+ termline )
return ret
def update_system_info(self):
'''
Gather basic system info for this windows instance object and store in self.system_info
Example:
# print wins.system_info.OS_NAME
'Microsoft Windows 7 Professional'
'''
currentkey = None
swap = re.compile('([!@#$%^&*. ])')
info = self.sys('systeminfo')
if self.system_info:
system_info = self.system_info
else:
system_info = type('obj', (object,),{})
if info:
for line in info:
if re.match("^\w.+:", line):
linevals = line.split(':')
currentkey = linevals.pop(0)
#clean up the key string...
currentkey = re.sub('[()]', '', currentkey)
currentkey = re.sub(swap, '_', currentkey)
currentkey = currentkey.lower()
value = ":".join(str(x) for x in linevals) or ""
setattr(system_info, currentkey, str(value).strip())
elif currentkey:
#this is an additional value to our previous key
prev_value = getattr(system_info, currentkey)
if not isinstance(prev_value, types.ListType):
updated_value = [prev_value]
updated_value.append(str(line).strip())
setattr(system_info, currentkey, updated_value)
self.system_info = system_info
def get_cygwin_path(self, prefix="c:\\"):
if self.cygwin_path:
return self.cygwin_path
path = None
self.debug('Trying to find cygwin path...')
out = self.sys('dir'+
|
33867677611ceb757f6973eb70368c9f75f3ce92
|
Python
|
<|begin_of_text|># system
import os
import numpy as np
import random
import copy
import time
# ROS
import rospy
import std_msgs.msg
import sensor_msgs.msg
import geometry_msgs.msg
import visualization_msgs.msg
import tf2_ros
import rosbag
import actionlib
from actionlib_msgs.msg import GoalStatus
import ros_numpy
# spartan ROS
import spartan_grasp_msgs.msg
import spartan_grasp_msgs.srv
import pdc_ros_msgs.msg
import fusion_server.msg
import fusion_server.srv
# spartan
import spartan.utils.utils as spartanUtils
import spartan.utils.ros_utils as rosUtils
import spartan.utils.director_utils as director_utils
import spartan.utils.control_utils as control_utils
from spartan.manipulation.schunk_driver import SchunkDriver
import fusion_server
from fusion_server.srv import *
import spartan.manipulation.gripper
from spartan.poser.poser_visualizer import PoserVisualizer
from spartan.manipulation.grasp_data import GraspData
from spartan.manipulation.object_manipulation import ObjectManipulation
from spartan.manipulation.category_manipulation_type import CategoryManipulationType
from spartan.utils.director_ros_visualizer import DirectorROSVisualizer
# director
from director import transformUtils
from director import visualization as vis
import director.objectmodel as om
import director.vtkNumpy as vnp
from director.debugVis import DebugData
import director.vtkAll as vtk
import director.segmentation as segmentation
import director.filterUtils as filterUtils
USING_DIRECTOR = True
if USING_DIRECTOR:
from spartan.utils.taskrunner import TaskRunner
MUG_RACK_CONFIG_FILE = os.path.join(spartanUtils.getSpartanSourceDir(), "src/catkin_projects/station_config/RLG_iiwa_1/manipulation/mug_rack.yaml")
# IF true limits you to this speed
DEBUG_SPEED = 20 # degrees per second
USE_DEBUG_SPEED = False
MANIP_TYPE = CategoryManipulationType.SHOE_ON_RACK
# MANIP_TYPE = CategoryManipulationType.MUG_ON_SHELF_3D
EXPERIMENT_MODE = True
class GraspSupervisorState(object):
STATUS_LIST = ["ABOVE_TABLE", "PRE_GRASP", "GRASP", "IK_FAILED", "NO_GRASP_FOUND", "GRASP_FOUND", "OBJECT_IN_GRIPPER", "GRASP_FAILED", "SAFETY_CHECK_FAILED", "PLANNING_FAILED", "FAILED"]
def __init__(self):
self.setPickFront()
self.clear()
def setPickFront(self):
self.graspingLocation = "front"
self.stowLocation = "left"
def setPickLeft(self):
self.graspingLocation = "left"
self.stowLocation = "front"
@property
def grasp_data(self):
return self._grasp_data
@grasp_data.setter
def grasp_data(self, value):
"""
:param value: GraspData
:return:
"""
self._grasp_data = value
@property
def cache(self):
return self._cache
def clear(self):
"""
Clear any stateful elements of the state
:return:
"""
self._grasp_data = None
self._status = None
self._cache = dict()
self._trajectory_result = None
def clear_cache(self):
"""
Clears only the cache
:return:
"""
self._cache = dict()
def set_status(self, status):
assert status in GraspSupervisorState.STATUS_LIST
self._status = status
@property
def status(self):
return self._status
@status.setter
def status(self, status):
assert status in GraspSupervisorState.STATUS_LIST
self._status = status
def set_status_ik_failed(self):
self.status = "IK_FAILED"
def print_status(self):
"""
Prints the status
:return:
"""
if self._status is None:
print "Current Status: None"
else:
print "Current Status: " + self._status
class GraspSupervisor(object):
def __init__(self, graspingParamsFile=None, cameraSerialNumber="carmine_1", tfBuffer=None):
self.graspingParamsFile = graspingParamsFile
self.reloadParams()
self.cameraSerialNumber = cameraSerialNumber
self.cameraName = 'camera_' + str(cameraSerialNumber)
self.pointCloudTopic = '/' + str(self.cameraName) + '/depth/points'
self.rgbImageTopic = '/' + str(self.cameraName) + '/rgb/image_rect_color'
self.depthImageTopic = '/' + str(self.cameraName) + '/depth_registered/sw_registered/image_rect'
self.camera_info_topic = '/' + str(self.cameraName) + '/rgb/camera_info'
self.graspFrameName = 'base'
self.ggcnn_grasp_frame_camera_axes_id = "ggcnn_grasp"
self.depthOpticalFrameName = self.cameraName + "_depth_optical_frame"
self.rgbOpticalFrameName = self.cameraName + "_rgb_optical_frame"
self.state = GraspSupervisorState()
self.robotService = rosUtils.RobotService.makeKukaRobotService()
self.robotService._use_debug_speed = USE_DEBUG_SPEED
self.robotService._debug_speed = DEBUG_SPEED
self.usingDirector = True
self.tfBuffer = tfBuffer # don't create a new one if it is passed in
self.setupConfig()
self._grasp_point = None # stores the grasp point to be used in grasp3DLocation
self._cache = dict()
self._gripper = spartan.manipulation.gripper.Gripper.make_schunk_gripper()
self._poser_visualizer = PoserVisualizer.make_default()
self.poser_result = None
self._object_manipulation = None
self._category_manip = None # can be assigned later as needed
self._shoe_manipulation_counter = 0
filename = os.path.join(os.path.join(spartanUtils.getSpartanSourceDir(),'src/catkin_projects/station_config/RLG_iiwa_1/stored_poses.yaml'))
self._stored_poses_director = spartanUtils.getDictFromYamlFilename(filename)
if USING_DIRECTOR:
self.taskRunner = TaskRunner()
self.taskRunner.callOnThread(self.setup)
else:
self.setup()
self.debugMode = False
if self.debugMode:
print "\n\n----------WARNING GRASP SUPERVISOR IN DEBUG MODE----------\n"
# if self.debugMode:
# self.pointCloudListMsg = GraspSupervisor.getDefaultPointCloudListMsg()
def reloadParams(self):
self.graspingParams = spartanUtils.getDictFromYamlFilename(self.graspingParamsFile)
def setup(self):
self.setupSubscribers()
self.setupPublishers()
self.setupTF()
self.setupROSActions()
self.gripperDriver = SchunkDriver()
self.setup_visualization()
def _clear_cache(self):
"""
Clears our local cache of variables
:return:
"""
self._cache = dict()
def setupDirector(self):
self.taskRunner.callOnThread(self.setup)
def setupConfig(self):
self.config = dict()
self.config['base_frame_id'] = "base"
self.config['end_effector_frame_id'] = "iiwa_link_ee"
self.config['pick_up_distance'] = 0.25 # distance to move above the table after grabbing the object
self.config["sleep_time_for_sensor_collect"] = 0.1
self.config['scan'] = dict()
self.config['scan']['pose_list'] = ['scan_left_close','scan_above_table','scan_right']
self.config['scan']['joint_speed'] = 45
self.config['grasp_speed'] = 20
normal_speed = 30
self.config['speed'] = dict()
self.config['speed']['stow'] = normal_speed
self.config['speed']['pre_grasp'] = normal_speed
self.config['speed']['grasp'] = 10
self.config['home_pose_name'] = 'above_table_pre_grasp'
self.config['grasp_nominal_direction'] = np.array([1, 0, 0]) # x forwards
self.config['grasp_to_ee'] = dict()
self.config["object_interaction"] = dict()
self.config["object_interaction"]["speed"] = 10
self.config["object_interaction"]["rotate_speed"] = 30
self.config["object_interaction"]["pickup_distance"] = 0.15
# self.config["object_interaction"]["drop_distance_above_grasp"] = 0.035 # good for shoes
self.config["object_interaction"]["drop_distance_above_grasp"] = 0.002 # good for mugs
self.config["object_interaction"]["drop_location"] = [0.65, 0, 0.5] # z coordinate is overwritten later
self.graspToIiwaLinkEE = spartanUtils.transformFromPose(
self.graspingParams['gripper_palm_to_ee'])
self.iiwaLinkEEToGraspFrame = self.graspToIiwaLinkEE.GetLinearInverse()
self.gripper_fingertip_to_iiwa_link_ee = spartanUtils.transformFromPose(
self.graspingParams['gripper_fingertip_to_ee'])
self.T_gripper_fingertip__iiwa_link_ee = self.gripper_fingertip_to_iiwa_link_ee.GetLinearInverse()
pos = [-0.15, 0, 0]
quat = [1, 0, 0, 0]
self.preGraspToGraspTransform = transformUtils.transformFromPose(pos, quat)
def setupSubscribers(self):
self.pointCloudSubscriber = rosUtils.SimpleSubscriber(self.pointCloudTopic, sensor_msgs.msg.PointCloud2)
self.rgbImageSubscriber = rosUtils.SimpleSubscriber(self.rgbImageTopic, sensor_msgs.msg.Image)
self.depthImageSubscriber = rosUtils.SimpleSubscriber(self.depthImageTopic, sensor_msgs.msg.Image)
self.camera_info_subscriber = rosUtils.SimpleSubscriber(self.camera_info_topic, sensor_msgs.msg.CameraInfo)
self.pointCloudSubscriber.start()
self.rgbImageSubscriber.start()
self.depthImageSubscriber.start()
self.camera_info_subscriber.start()
self.clicked_point_subscriber = rosUtils.SimpleSubscriber("/clicked_point", geometry_msgs.msg.PointStamped,
self.on_clicked_point)
self.clicked_point_subscriber.start()
self.ggcnn_subscriber = rosUtils.SimpleSubscriber('ggcnn/out/command', std_msgs.msg.Float32MultiArray)
def setupPublishers(self):
"""
Sets up some ROS publishers
"""
self.rviz_marker_publisher = rospy.Publisher("/spartan_grasp/visualization_marker",
visualization_msgs.msg.Marker, queue_size=1)
self.rviz_marker_array_publisher = rospy.Publisher("/grasp_supervisor/visualization_marker_array",
visualization_msgs.msg.MarkerArray, queue_size=1)
self.grasp_pointcloud_publisher = rospy.Publisher("/grasp_supervisor/points", sensor_msgs.msg.PointCloud2,
queue_size=1)
def setup_visualization(self):
self._vis_container = om.getOrCreateContainer("grasp supervisor")
def on_clicked_point(self, clicked_point_msg):
"""
Visualizes the clicked point in rviz
"""
print "received a /clicked_point message... visualizing"
pos = clicked_point_msg.point
x, y, z = pos.x, pos.y, pos.z
marker = visualization_msgs.msg.Marker()
marker.header.frame_id = "base"
marker.header.stamp = rospy.Time.now()
marker.ns = "clicked_point"
marker.id = 0
marker.type = visualization_msgs.msg.Marker.SPHERE
marker.action = visualization_msgs.msg.Marker.ADD
marker.pose.position.x = x
marker.pose.position.y = y
marker.pose.position.z = z
marker.pose.orientation.x = 0.0
marker.pose.orientation.y = 0.0
marker.pose.orientation.z = 0.0
marker.pose.orientation.w = 1.0
marker.scale.x = 0.03
marker.scale.y = 0.03
marker.scale.z = 0.03
marker.color.a = 1.0
marker.color.r = 1.0
marker.color.g = 0.0
marker.color.b = 0.0
# hack to get around director funny business
for i in xrange(0, 5):
self.rviz_marker_publisher.publish(marker)
rospy.sleep(0.02)
def get_clicked_point(self):
"""
Returns the stored clicked point. If there is none it raises and error
rtype: geometry_msgs.Point
"""
lastMsg = self.clicked_point_subscriber.lastMsg
if lastMsg is None:
raise ValueError("No /clicked_point messages found.")
return lastMsg.point
def setupROSActions(self):
actionName = '/spartan_grasp/GenerateGraspsFromPointCloudList'
self.generate_grasps_client = actionlib.SimpleActionClient(actionName,
spartan_grasp_msgs.msg.GenerateGraspsFromPointCloudListAction)
actionName = '/spartan_grasp/Grasp3DLocation'
self.grasp_3D_location_client = actionlib.SimpleActionClient(actionName,
spartan_grasp_msgs.msg.Grasp3DLocationAction)
findBestBatchActionName = '/FindBestMatch'
self.find_best_match_client = actionlib.SimpleActionClient(findBestBatchActionName,
pdc_ros_msgs.msg.FindBestMatchAction)
poser_action_name = '/Poser'
self.poser_client = actionlib.SimpleActionClient(poser_action_name,
pdc_ros_msgs.msg.DeformableRegistrationAction)
category_manipulation_name = "/CategoryManipulation"
self.category_manip_client = actionlib.SimpleActionClient(category_manipulation_name, pdc_ros_msgs.msg.CategoryManipulationAction)
action_name = "/KeypointDetection"
self.keypoint_detection_client = actionlib.SimpleActionClient(action_name, pdc_ros_msgs.msg.KeypointDetectionAction)
action_name = "/PoseEstimation"
self.pose_estimation_client = actionlib.SimpleActionClient(action_name,
pdc_ros_msgs.msg.EstimatePoseAction)
action_name = "/SaveRGBD"
self.save_RGBD_client = actionlib.SimpleActionClient(action_name,
pdc_ros_msgs.msg.KeypointDetectionAction)
def setupTF(self):
if self.tfBuffer is None:
self.tfBuffer = tf2_ros.Buffer()
self.tfListener = tf2_ros.TransformListener(self.tfBuffer)
self.tfBroadcaster = tf2_ros.TransformBroadcaster()
def getDepthOpticalFrameToWorldTransform(self):
depth_optical_frame_to_world = self.tfBuffer.lookup_transform("base", self.depthOpticalFrameName,
rospy.Time(0))
return depth_optical_frame_to_world
def get_transform(self, from_name, to_name, ros_time=None):
if ros_time is None:
ros_time = rospy.Time(0)
transform_stamped_msg = self.tfBuffer.lookup_transform(to_name, from_name, ros_time)
# convert to vtkTransform
pos, quat = rosUtils.poseFromROSTransformMsg(transform_stamped_msg.transform)
return pos, quat
def getRgbOpticalFrameToWorldTransform(self, time=None):
"""
:param time:
:type time:
:return: geometry_msgs/TransformStamped
:rtype:
"""
if time is None:
time = rospy.Time(0)
rgb_optical_frame_to_world = self.tfBuffer.lookup_transform("base", self.rgbOpticalFrameName,
time)
return rgb_optical_frame_to_world
def capturePointCloudAndCameraTransform(self, cameraOrigin=[0, 0, 0]):
"""
Captures the current PointCloud2 from the sensor. Also records the pose of camera frame.
"""
# sleep to transforms can update
msg = spartan_grasp_msgs.msg.PointCloudWithTransform()
msg.header.stamp = rospy.Time.now()
msg.camera_origin.x = cameraOrigin[0]
msg.camera_origin.y = cameraOrigin[1]
msg.camera_origin.z = cameraOrigin[2]
msg.point_cloud_to_base_transform = self.getDepthOpticalFrameToWorldTransform()
msg.point_cloud = self.pointCloudSubscriber.waitForNextMessage()
self.testData = msg # for debugging
return msg
def captureRgbdAndCameraTransform(self, cameraOrigin=[0, 0, 0]):
# sleep to transforms can update
msg = pdc_ros_msgs.msg.RGBDWithPose()
msg.header.stamp = rospy.Time.now()
msg.camera_pose = self.getRgbOpticalFrameToWorldTransform()
msg.rgb_image = self.rgbImageSubscriber.waitForNextMessage()
msg.depth_image = self.depthImageSubscriber.waitForNextMessage()
# maybe be careful about rostime here
msg.point_cloud = self.pointCloudSubscriber.waitForNextMessage()
msg.point_cloud_pose = self.getDepthOpticalFrameToWorldTransform()
return msg
def moveHome(self, speed=None):
rospy.loginfo("moving home")
if speed is None:
speed = self.graspingParams['speed']['nominal']
homePose = self.graspingParams[self.state.graspingLocation]['poses']['scan_above_table']
self.robotService.moveToJointPosition(homePose,
maxJointDegreesPerSecond=speed)
def getStowPose(self):
stow_location = self.state.stowLocation
params = self.graspingParams[stow_location]
return params['poses']['stow']
# scans to several positions
def collectSensorData(self, saveToBagFile=False, **kwargs):
"""
Collects PointCloud Messages, also RGB and Depth images.
Writes the result to two class variables
- self.pointCloudListMsg
- self.listOfRgbdWithPose
also returns these two values
"""
self.moveHome()
rospy.loginfo("collecting sensor data")
graspLocationData = self.graspingParams[self.state.graspingLocation]
pointCloudListMsg = spartan_grasp_msgs.msg.PointCloudList()
pointCloudListMsg.header.stamp = rospy.Time.now()
data = dict()
pose_list = graspLocationData['scan_pose_list']
listOfRgbdWithPoseMsg = []
for poseName in pose_list:
rospy.loginfo("moving to pose = " + poseName)
joint_positions = graspLocationData['poses'][poseName]
self.robotService.moveToJointPosition(joint_positions,
maxJointDegreesPerSecond=self.config['scan']['joint_speed'])
rospy.sleep(self.config["sleep_time_for_sensor_collect"])
pointCloudWithTransformMsg = self.capturePointCloudAndCameraTransform()
pointCloudListMsg.point_cloud_list.append(pointCloudWithTransformMsg)
data[poseName] = pointCloudWithTransformMsg
rgbdWithPoseMsg = self.captureRgbdAndCameraTransform()
listOfRgbdWithPoseMsg.append(rgbdWithPoseMsg)
self.sensorData = data
self.pointCloudListMsg = pointCloudListMsg
self.listOfRgbdWithPoseMsg = listOfRgbdWithPoseMsg
if saveToBagFile:
self.saveSensorDataToBagFile(pointCloudListMsg=pointCloudListMsg, **kwargs)
return pointCloudListMsg, listOfRgbdWithPoseMsg
def findBestBatch(self):
"""
This function will:
- collect a small handful of RGBDWithPose msgs
- call the FindBestMatch service (a service of pdc-ros)
- return what was found from FindBestMatch
"""
self.moveHome()
_, listOfRgbdWithPoseMsg = self.collectSensorData()
self.list_rgbd_with_pose_msg = listOfRgbdWithPoseMsg
# request via a ROS Action
rospy.loginfo("waiting for find best match server")
self.find_best_match_client.wait_for_server()
goal = pdc_ros_msgs.msg.FindBestMatchGoal()
goal.rgbd_with_pose_list = listOfRgbdWithPoseMsg
goal.camera_info = self.camera_info_subscriber.waitForNextMessage()
rospy.loginfo("requesting best match from server")
self.find_best_match_client.send_goal(goal)
self.moveHome()
rospy.loginfo("waiting for find best match result")
self.find_best_match_client.wait_for_result()
result = self.find_best_match_client.get_result()
rospy.loginfo("received best match result")
self.best_match_result = result
if result.match_found:
print "match found"
print "location:", result.best_match_location
else:
print "NO MATCH FOUND"
return result
def run_poser(self):
"""
This function will:
- collect a small handful of RGBDWithPose msgs
- call the FindBestMatch service (a service of pdc-ros)
- return what was found from FindBestMatch
"""
# self.moveHome()
rgbdWithPoseMsg = self.captureRgbdAndCameraTransform()
listOfRgbdWithPoseMsg = [rgbdWithPoseMsg]
self.list_rgbd_with_pose_msg = listOfRgbdWithPoseMsg
# request via a ROS Action
rospy.loginfo("waiting for poser server")
self.poser_client.wait_for_server()
rospy.loginfo("connected to poser server")
goal = pdc_ros_msgs.msg.DeformableRegistrationGoal()
goal.rgbd_with_pose_list = listOfRgbdWithPoseMsg
goal.camera_info = self.camera_info_subscriber.waitForNextMessage()
rospy.loginfo("requesting registration from poser")
self.poser_client.send_goal(goal)
self.moveHome()
rospy.loginfo("waiting for poser result")
self.poser_client.wait_for_result()
result = self.poser_client.get_result()
state = self.poser_client.get_state()
rospy.loginfo("received poser result")
print("result:\n", result)
succeeded = (state == GoalStatus.SUCCEEDED)
if not succeeded:
rospy.loginfo("Poser failed")
self.poser_result = result
self._cache['poser_result'] = result
result_dict = dict()
result_dict['result'] = result
result_dict['output_dir'] = result.output_dir
result_dict['state'] = state
result_dict['succeeded'] = succeeded
result_dict['type'] = "mankey"
self._cache["keypoint_detection_result"] = result_dict
self.taskRunner.callOnMain(self.visualize_poser_result)
def run_keypoint_detection(self, wait_for_result=True, move_to_stored_pose=True, clear_state=True):
"""
Runs keypoint detection using ManKey in pdc-ros. Note that this clears the cache
:return:
:rtype:
"""
if clear_state:
self._clear_cache()
self.state.clear()
if move_to_stored_pose:
CMT = CategoryManipulationType
q = self._stored_poses_director["General"]["home"] # for mugs
if MANIP_TYPE in [CMT.SHOE_ON_RACK, CMT.SHOE_ON_TABLE]:
q = self._stored_poses_director['General']['center_back']
else: # basically all mugs
q = self._stored_poses_director["General"]["home"]
self.robotService.moveToJointPosition(q,
maxJointDegreesPerSecond=self.graspingParams['speed']['fast'])
rgbdWithPoseMsg = self.captureRgbdAndCameraTransform()
self.state.cache['rgbd_with_pose_list'] = []
self.state.cache['rgbd_with_pose_list'].append(rgbdWithPoseMsg)
# request via a ROS Action
rospy.loginfo("waiting for KeypointDetection server")
self.keypoint_detection_client.wait_for_server()
rospy.loginfo("connected to KeypointDetection server")
goal = pdc_ros_msgs.msg.KeypointDetectionGoal()
goal.rgbd_with_pose_list = self.state.cache['rgbd_with_pose_list']
goal.camera_info = self.camera_info_subscriber.waitForNextMessage()
if EXPERIMENT_MODE:
goal.output_dir = "mankey_experiments/%s" %(spartanUtils.get_current_YYYY_MM_DD_hh_mm_ss())
rospy.loginfo("requesting action from KeypointDetection server")
self.keypoint_detection_client.send_goal(goal)
self.state.set_status("ABOVE_TABLE")
if wait_for_result:
self.wait_for_keypoint_detection_result()
def wait_for_keypoint_detection_result(self):
"""
Wait for keypont detection result, save it to cache
"""
rospy.loginfo("waiting for KeypointDetection result")
self.keypoint_detection_client.wait_for_result()
result = self.keypoint_detection_client.get_result()
state = self.keypoint_detection_client.get_state()
rospy.loginfo("received KeypointDetection result")
print "result:\n", result
self.keypoint_detection_result = result
succeeded = (state == GoalStatus.SUCCEEDED)
if not succeeded:
rospy.loginfo("KeypointDetection failed")
result_dict = dict()
result_dict['result'] = result
result_dict['output_dir'] = result.output_dir
result_dict['state'] = state
result_dict['succeeded'] = succeeded
result_dict['type'] = "mankey"
self._cache["keypoint_detection_result"] = result_dict
self.state._cache["keypoint_detection_result"] = result_dict
return result_dict
def check_keypoint_detection_succeeded(self):
"""
Checks whether keypoint detection succeeded or not
:return:
:rtype:
"""
# you should have run keypoint detection before this
keypoint_detection_result = self.state.cache['keypoint_detection_result']
if keypoint_detection_result["state"] == GoalStatus.SUCCEEDED:
return True
else:
print("keypoint detection failed, ABORTING")
return False
def check_category_goal_estimation_succeeded(self):
"""
Returns a bool as to whether category goal estimation succeeded or not
:return:
:rtype:
"""
state = self.state.cache['category_manipulation_goal']['state']
if state == GoalStatus.SUCCEEDED:
return True
else:
print("category goal estimation failed, ABORTING")
return False
def estimate_mug_rack_pose(self):
"""
:return:
:rtype:
"""
# fusion_params_file = os.path.join(spartanUtils.getSpartanSourceDir(), "src/catkin_projects/station_config/RLG_iiwa_1/fusion/fusion_params.yaml")
#
#
# fusion_params = spartanUtils.getDictFromYamlFilename(fusion_params_file)
# bbox_min = np.array(fusion_params['left']['bbox_min'])
# bbox_min[2] += 0.05 # be conservative on where bottom of table is
# bbox_max = np.array(fusion_params['left']['bbox_max'])
bbox_min = np.array([0.07001, 0.49, 0.01026])
bbox_max = np.array([0.47195, 0.85201, 0.75])
rgbd_with_pose_list = []
# move to pose 1, capture RGBD
q = self._stored_poses_director["left_table"]["look_at_rack"]
speed = self.graspingParams["speed"]["fast"]
self.robotService.moveToJointPosition(q, maxJointDegreesPerSecond=speed)
rgbd_with_pose = self.captureRgbdAndCameraTransform()
rgbd_with_pose_list.append(rgbd_with_pose)
# move to pose 2, capture RGBD
q = self._stored_poses_director["left_table"]["look_at_rack_2"]
speed = self.graspingParams["speed"]["fast"]
self.robotService.moveToJointPosition(q, maxJointDegreesPerSecond=speed)
rgbd_with_pose = self.captureRgbdAndCameraTransform()
rgbd_with_pose_list.append(rgbd_with_pose)
# convert to VTK poly data and crop
d = DebugData()
for msg in rgbd_with_pose_list:
pointcloud_numpy = DirectorROSVisualizer.numpy_from_pointcloud2_msg(msg.point_cloud)
pointcloud_vtk = vnp.getVtkPolyDataFromNumpyPoints(pointcloud_numpy)
T_world_pointcloud = ros_numpy.numpify(msg.point_cloud_pose.transform)
T_world_pointcloud_vtk = transformUtils.getTransformFromNumpy(T_world_pointcloud)
pointcloud_vtk = filterUtils.transformPolyData(pointcloud_vtk, T_world_pointcloud_vtk)
d.addPolyData(pointcloud_vtk)
pointcloud = d.getPolyData()
print "pointcloud.GetNumberOfPoints()", pointcloud.GetNumberOfPoints()
# crop
transform = vtk.vtkTransform()
bounds = np.zeros([2,3])
bounds[0,:] = bbox_min
bounds[1,:] = bbox_max
print "bounds", bounds
cropped_pointcloud = segmentation.cropToBounds(pointcloud, transform, bounds)
print "cropped_pointcloud.GetNumberOfPoints()", cropped_pointcloud.GetNumberOfPoints()
# visualize it
def vis_function():
print "visualizing pointcloud"
vis.showPolyData(pointcloud, "pointcloud")
vis.showPolyData(cropped_pointcloud, "Mug rack pointcloud")
self.mug_rack_pointcloud = cropped_pointcloud
# not working for some reason
print "visualizing"
self.taskRunner.callOnMain(vis_function)
return
rgbd_with_pose = pdc_ros_msgs.msg.RGBDWithPose()
# N x 3
cropped_pointcloud_numpy = vnp.getNumpyFromVtk(cropped_pointcloud)
print "cropped_pointcloud_numpy.shape", cropped_pointcloud_numpy.shape
# save numpy to file
save_file = "/home/manuelli/sandbox/spartan/pointcloud.npy"
np.save(save_file, cropped_pointcloud_numpy)
return
# it's already in world frame
rgbd_with_pose.point_cloud = DirectorROSVisualizer.pointcloud2_msg_from_numpy(cropped_pointcloud_numpy)
# convert it back to ROS msg
goal = pdc_ros_msgs.msg.EstimatePoseGoal()
goal.rgbd_with_pose_list.append(rgbd_with_pose)
T_world_rack_vtk = self._category_manip.mug_rack_vis_obj.getChildFrame().transform
T_world_rack = transformUtils.getNumpyFromTransform(T_world_rack_vtk)
goal.T_init = ros_numpy.msgify(geometry_msgs.Pose, T_world_rack)
# send out service call
self.pose_estimation_client.wait_for_server()
self.pose_estimation_client.send_goal(goal)
# wait for result
self.pose_estimation_client.wait_for_result()
result = self.pose_estimation_client.get_result()
T_world_rack_estimated = ros_numpy.numpify(result.T_world_model)
T_world_rack_estimated_vtk = transformUtils.getTransformFromNumpy(T_world_rack_estimated)
self._category_manip.mug_rack_vis_obj.getChildFrame().copyFrame(T_world_rack_estimated_vtk)
def run_category_manipulation_goal_estimation(self, wait_for_result=True, capture_rgbd=True):
"""
Calls the CategoryManipulation service of pdc-ros
which is provided by category_manip_server.py.
Uses the keypoint detection result from either
`run_poser` or `run_keypoint_detection`
:return: bool
:rtype:
"""
if not self.check_keypoint_detection_succeeded():
return False
keypoint_detection_result = self.state.cache['keypoint_detection_result']
# don't specify poser output dir for now
goal = pdc_ros_msgs.msg.CategoryManipulationGoal()
goal.output_dir = keypoint_detection_result['output_dir']
goal.keypoint_detection_type = keypoint_detection_result['type']
if capture_rgbd:
self.moveHome()
rgbd_with_pose = self.captureRgbdAndCameraTransform()
self.state.cache['rgbd_with_pose_list'].append(rgbd_with_pose)
goal.rgbd_with_pose_list = self.state.cache['rgbd_with_pose_list']
if 'rgbd_with_pose_list' in self.state.cache:
goal.rgbd_with_pose_list = self.state.cache['rgbd_with_pose_list']
if MANIP_TYPE == CategoryManipulationType.SHOE_ON_RACK:
print("applying T_adjust")
print("self._shoe_manipulation_counter", self._shoe_manipulation_counter)
goal.apply_T_adjust = True
pos = np.array([self.graspingParams["shoe_offset"], 0, 0]) * self._shoe_manipulation_counter
quat = [1,0,0,0]
T_adjust_vtk = transformUtils.transformFromPose(pos, quat)
T_adjust = transformUtils.getNumpyFromTransform(T_adjust_vtk)
goal.T_adjust = ros_numpy.msgify(geometry_msgs.msg.Pose, T_adjust)
else:
goal.apply_T_adjust =False
rospy.loginfo("waiting for CategoryManip server")
self.category_manip_client.wait_for_server()
rospy.loginfo("connected to CategoryManip server")
self.category_manip_client.send_goal(goal)
if wait_for_result:
self.wait_for_category_manipulation_goal_result()
return True
def wait_for_category_manipulation_goal_result(self):
"""
Waits for category manipulation goal result
"""
print("waiting for category manipulation result")
self.category_manip_client.wait_for_result()
result = self.category_manip_client.get_result()
state = self.category_manip_client.get_state()
T_goal_obs = ros_numpy.numpify(result.T_goal_obs)
print "T_goal_obs:\n", T_goal_obs
T_goal_obs_vtk = transformUtils.getTransformFromNumpy(T_goal_obs)
print transformUtils.poseFromTransform(T_goal_obs_vtk)
self.state.cache['category_manipulation_goal'] = dict()
self.state.cache['category_manipulation_goal']['result'] = result
self.state.cache['category_manipulation_goal']["T_goal_obs"] = T_goal_obs_vtk
self.state.cache['category_manipulation_goal']['state'] = state
self.state.cache['category_manipulation_goal']["type"] = CategoryManipulationType.from_string(result.category_manipulation_type)
def run_mug_shelf_3D_pipeline(self):
"""
Runs entire pipeline for mug shelf 3D
:return:
:rtype:
"""
self.state.clear()
self._clear_cache()
# move home
speed = self.graspingParams['speed']['fast']
super_fast_speed = self.graspingParams['speed']['fast']
# q = self._stored_poses_director["General"]["home"]
# q = self._stored_poses_director["mug"]["image_capture_for_mug_shelf"]
q = self._stored_poses_director["General"]["center_back"]
self.robotService.moveToJointPosition(q,
maxJointDegreesPerSecond=super_fast_speed)
self.run_keypoint_detection(wait_for_result=False, move_to_stored_pose=False, clear_state=False)
# run keypoint detection
# move to center back to capture another RGBD image
q = self._stored_poses_director["General"]["home"]
self.robotService.moveToJointPosition(q,
maxJointDegreesPerSecond=super_fast_speed)
rgbd_with_pose = self.captureRgbdAndCameraTransform()
self.state.cache['rgbd_with_pose_list'].append(rgbd_with_pose)
self.wait_for_keypoint_detection_result()
if not self.check_keypoint_detection_succeeded():
self.state.set_status("FAILED")
return False
# run category manip
code = self.run_category_manipulation_goal_estimation(capture_rgbd=False)
if not code:
self.state.set_status("FAILED")
return False
self.wait_for_category_manipulation_goal_result()
if not self.check_category_goal_estimation_succeeded():
self.state.set_status("PLANNING_FAILED")
return False
# run the manipulation
# need safety checks in there before running autonomously
code = self.run_mug_shelf_manipulation()
if not (code == True):
self.state.set_status("FAILED")
return False
# if the place was successful then retract
self.retract_from_mug_shelf()
if EXPERIMENT_MODE:
output_dir = self.state.cache['keypoint_detection_result']['output_dir']
print "\n\n", os.path.split(output_dir)[1]
def run_mug_on_rack_pipeline(self, side_view=False):
"""
Runs entire pipeline for mug shelf 3D
:return:
:rtype:
"""
self.state.clear()
self._clear_cache()
# move home
speed = self.graspingParams['speed']['fast']
q = self._stored_poses_director["General"]["home"]
if side_view:
print "\nusing side view\n"
q = self._stored_poses_director["General"]["center_back"]
self.robotService.moveToJointPosition(q,
maxJointDegreesPerSecond=speed)
# run keypoint detection
self.run_keypoint_detection(wait_for_result=False, move_to_stored_pose=False, clear_state=False)
self.wait_for_keypoint_detection_result()
# move to center back to capture another RGBD image
q = self._stored_poses_director["General"]["center_back"]
if side_view:
q = self._stored_poses_director["General"]["home"]
self.robotService.moveToJointPosition(q,
maxJointDegreesPerSecond=speed)
rgbd_with_pose = self.captureRgbdAndCameraTransform()
self.state.cache['rgbd_with_pose_list'].append(rgbd_with_pose)
q = self._stored_poses_director["General"]["home"]
self.robotService.moveToJointPosition(q,
maxJointDegreesPerSecond=speed)
if not self.check_keypoint_detection_succeeded():
self.state.set_status("FAILED")
return False
# run category manip
code = self.run_category_manipulation_goal_estimation(capture_rgbd=False)
if not code:
self.state.set_status("FAILED")
return False
self.wait_for_category_manipulation_goal_result()
if not self.check_category_goal_estimation_succeeded():
self.state.set_status("PLANNING_FAILED")
return False
# run the manipulation
# need safety checks in there before running autonomously
code = self.run_mug_on_rack_manipulation()
if not (code == True):
self.state.set_status("FAILED")
return False
if EXPERIMENT_MODE:
output_dir = self.state.cache['keypoint_detection_result']['output_dir']
print "\n\n", os.path.split(output_dir)[1]
def run_shoe_on_rack_pipeline(self):
"""
Runs entire pipeline for mug shelf 3D
:return:
:rtype:
"""
if EXPERIMENT_MODE:
self._shoe_manipulation_counter = 0 # for testing
self.state.clear()
self._clear_cache()
# move home
speed = self.graspingParams['speed']['fast']
# q = self._stored_poses_director["General"]["center_back"]
q = self._stored_poses_director["General"]["home"]
self.robotService.moveToJointPosition(q,
maxJointDegreesPerSecond=speed)
# run keypoint detection
self.run_keypoint_detection(wait_for_result=False, move_to_stored_pose=False, clear_state=False)
self.wait_for_keypoint_detection_result()
if not self.check_keypoint_detection_succeeded():
self.state.set_status("FAILED")
return False
# run category manip
code = self.run_category_manipulation_goal_estimation(capture_rgbd=False)
if not code:
self.state.set_status("FAILED")
return False
self.wait_for_category_manipulation_goal_result()
if not self.check_category_goal_estimation_succeeded():
self.state.set_status("PLANNING_FAILED")
return False
# run the manipulation
# need safety checks in there before running autonomously
code = self.run_shoe_rack_manipulation()
if not code:
self.state.set_status("FAILED")
return False
# if the place was successful then retract
self.retract_from_shoe_rack()
if EXPERIMENT_MODE:
print "\n\n", self.state.cache['keypoint_detection_result']['output_dir']
def run_manipulate_object(self, debug=False):
"""
Runs the object manipulation code. Will put the object into the
specified target pose from `run_category_manipulation_goal_estimation`
:return:
"""
# self.taskRunner.callOnMain(self._poser_visualizer.visualize_result)
if not self.check_category_goal_estimation_succeeded():
return False
if debug:
self._object_manipulation = ObjectManipulation()
self._object_manipulation.assign_defaults()
self._object_manipulation.compute_transforms()
return
self.moveHome()
grasp_found, grasp_data = self.request_spartan_grasp(clear_state=False)
if not grasp_found:
print "no grasp found, returning\n"
return False
# execute the grasp
object_in_gripper = self.execute_grasp(self.state.grasp_data, close_gripper=True, use_cartesian_plan=True)
print "object_in_gripper:", object_in_gripper
T_goal_obs = self.state.cache['category_manipulation_T_goal_obs']
T_W_G = self.state.cache['gripper_frame_at_grasp']
self._object_manipulation = ObjectManipulation(T_goal_object=T_goal_obs, T_W_G=T_W_G)
self._object_manipulation.grasp_data = self.state.grasp_data
self._object_manipulation.compute_transforms()
self.taskRunner.callOnMain(self._object_manipulation.visualize)
pre_grasp_pose = self.state.cache['pre_grasp_ik_response'].joint_state.position
pickup_speed = self.graspingParams['speed']['pickup']
if not object_in_gripper:
# open the gripper and back away
self.gripperDriver.send_open_gripper_set_distance_from_current()
self.robotService.moveToJointPosition(pre_grasp_pose,
maxJointDegreesPerSecond=
pickup_speed)
return False
# pickup the object
self.robotService.moveToJointPosition(pre_grasp_pose,
maxJointDegreesPerSecond=
pickup_speed)
# place the object
grasp_data_place = self._object_manipulation.get_place_grasp_data()
self.execute_place(grasp_data_place)
# open the gripper and back away
pre_grasp_pose = self.state.cache['pre_grasp_ik_response'].joint_state.position
pickup_speed = self.graspingParams['speed']['pickup']
self.gripperDriver.send_open_gripper_set_distance_from_current()
# pickup the object
self.robotService.moveToJointPosition(pre_grasp_pose,
maxJointDegreesPerSecond=
pickup_speed)
# move home
self.moveHome()
def run_shoe_rack_manipulation(self, debug=False, push_in_distance=0.00):
"""
Runs the object manipulation code. Will put the object into the
specified target pose from `run_category_manipulation_goal_estimation`
:return:
"""
print("\n\n--- Running Shoe Manipulation-------\n\n")
# self.taskRunner.callOnMain(self._poser_visualizer.visualize_result)
if not self.check_category_goal_estimation_succeeded():
return False
# check that we really are doing mug
category_manipulation_type = self.state.cache['category_manipulation_goal']['type']
assert category_manipulation_type == CategoryManipulationType.SHOE_ON_RACK
speed = self.graspingParams['speed']['fast']
self.moveHome(speed=speed)
result = self.state.cache['category_manipulation_goal']['result']
T_W_fingertip = ros_numpy.numpify(result.T_world_gripper_fingertip)
T_W_fingertip_vtk = transformUtils.getTransformFromNumpy(T_W_fingertip)
grasp_data = GraspData.from_gripper_fingertip_frame(T_W_fingertip)
grasp_data.gripper.params["hand_inner_diameter"] = result.gripper_width
grasp_data.gripper.params["hand_inner_diameter"] = 0.07
self.state.grasp_data = grasp_data
# rotate the grasp to align with nominal
params = self.getParamsForCurrentLocation()
grasp_z_axis_nominal = np.array(params['grasp']['grasp_nominal_direction'])
grasp_data.rotate_grasp_frame_to_nominal(grasp_z_axis_nominal)
def vis_function():
vis.updateFrame(T_W_fingertip_vtk, "gripper fingertip frame", scale=0.15, parent=self._vis_container)
vis.updateFrame(grasp_data.grasp_frame, "grasp frame", scale=0.15, parent=self._vis_container)
self.visualize_grasp(grasp_data)
self.taskRunner.callOnMain(vis_function)
# execute the grasp
force_threshold_magnitude = 30
object_in_gripper = self.execute_grasp(grasp_data, close_gripper=True, use_cartesian_plan=True, force_threshold_magnitude=force_threshold_magnitude, push_in_distance=0.04, ee_speed_m_s=0.1)
if not object_in_gripper:
print("gr
|
bd179fda18551d4f3d8a4d695a9da38ee607ef1d
|
Python
|
<|begin_of_text|>import datetime
import json
from dateutil import parser
import mock
from python_http_client.exceptions import ForbiddenError
from rdr_service import clock, config
from rdr_service.api_util import open_cloud_file
from rdr_service.clock import FakeClock
from rdr_service.dao.database_utils import format_datetime
from rdr_service.dao.genomics_dao import GenomicGcDataFileDao, GenomicGCValidationMetricsDao, GenomicIncidentDao, \
GenomicSetMemberDao, UserEventMetricsDao, GenomicJobRunDao, GenomicResultWithdrawalsDao, \
GenomicMemberReportStateDao, GenomicAppointmentEventMetricsDao, GenomicAppointmentEventDao, GenomicResultViewedDao, \
GenomicInformingLoopDao, GenomicAppointmentEventNotifiedDao, GenomicDefaultBaseDao
from rdr_service.dao.message_broker_dao import MessageBrokenEventDataDao
from rdr_service.genomic_enums import GenomicIncidentCode, GenomicJob, GenomicWorkflowState, GenomicSubProcessResult, \
GenomicSubProcessStatus, GenomicManifestTypes, GenomicQcStatus, GenomicReportState
from rdr_service.genomic.genomic_job_components import GenomicFileIngester
from rdr_service.genomic.genomic_job_controller import GenomicJobController
from rdr_service.model.genomics import GenomicGcDataFile, GenomicIncident, GenomicSetMember, GenomicGCValidationMetrics,\
GenomicGCROutreachEscalationNotified
from rdr_service.offline.genomics import genomic_pipeline, genomic_cvl_pipeline
from rdr_service.participant_enums import WithdrawalStatus
from tests import test_data
from tests.genomics_tests.test_genomic_utils import create_ingestion_test_file
from tests.helpers.unittest_base import BaseTestCase
class GenomicJobControllerTest(BaseTestCase):
def setUp(self):
super(GenomicJobControllerTest, self).setUp()
self.data_file_dao = GenomicGcDataFileDao()
self.event_data_dao = MessageBrokenEventDataDao()
self.incident_dao = GenomicIncidentDao()
self.member_dao = GenomicSetMemberDao()
self.metrics_dao = GenomicGCValidationMetricsDao()
self.user_event_metrics_dao = UserEventMetricsDao()
self.job_run_dao = GenomicJobRunDao()
self.report_state_dao = GenomicMemberReportStateDao()
self.appointment_event_dao = GenomicAppointmentEventDao()
self.appointment_metrics_dao = GenomicAppointmentEventMetricsDao()
def test_incident_with_long_message(self):
"""Make sure the length of incident messages doesn't cause issues when recording them"""
incident_message = "1" * (GenomicIncident.message.type.length + 20)
mock_slack_handler = mock.MagicMock()
job_controller = GenomicJobController(job_id=1)
job_controller.genomic_alert_slack = mock_slack_handler
job_controller.create_incident(message=incident_message, slack=True)
# Double check that the incident was saved successfully, with part of the message
incident: GenomicIncident = self.session.query(GenomicIncident).one()
self.assertTrue(incident_message.startswith(incident.message))
# Make sure Slack received the full message
mock_slack_handler.send_message_to_webhook.assert_called_with(
message_data={
'text': incident_message
}
)
def test_gvcf_files_ingestion(self):
job_controller = GenomicJobController(job_id=38)
bucket_name = "test_bucket"
file_path = "Wgs_sample_raw_data/SS_VCF_research/BCM_A100153482_21042005280_SIA0013441__1.hard-filtered.gvcf.gz"
file_path_md5 = "Wgs_sample_raw_data/SS_VCF_research/" \
"BCM_A100153482_21042005280_SIA0013441__1.hard-filtered.gvcf.gz.md5sum"
full_path = f'{bucket_name}/{file_path}'
full_path_md5 = f'{bucket_name}/{file_path_md5}'
gen_set = self.data_generator.create_database_genomic_set(
genomicSetName=".",
genomicSetCriteria=".",
genomicSetVersion=1
)
gen_member = self.data_generator.create_database_genomic_set_member(
genomicSetId=gen_set.id,
biobankId="100153482",
sampleId="21042005280",
genomeType="aou_wgs",
genomicWorkflowState=GenomicWorkflowState.AW1
)
gen_job_run = self.data_generator.create_database_genomic_job_run(
jobId=GenomicJob.AW1_MANIFEST,
startTime=clock.CLOCK.now(),
runResult=GenomicSubProcessResult.SUCCESS
)
gen_processed_file = self.data_generator.create_database_genomic_file_processed(
runId=gen_job_run.id,
startTime=clock.CLOCK.now(),
filePath='/test_file_path',
bucketName='test_bucket',
fileName='test_file_name',
)
self.data_generator.create_database_genomic_gc_validation_metrics(
genomicSetMemberId=gen_member.id,
genomicFileProcessedId=gen_processed_file.id
)
job_controller.ingest_data_files_into_gc_metrics(file_path_md5, bucket_name)
metrics = self.metrics_dao.get_metrics_by_member_id(gen_member.id)
self.assertIsNotNone(metrics.gvcfMd5Path)
self.assertEqual(metrics.gvcfMd5Path, full_path_md5)
job_controller.ingest_data_files_into_gc_metrics(file_path, bucket_name)
metrics = self.metrics_dao.get_metrics_by_member_id(gen_member.id)
self.assertIsNotNone(metrics.gvcfPath)
self.assertEqual(metrics.gvcfPath, full_path)
def test_gvcf_files_ingestion_create_incident(self):
bucket_name = "test_bucket"
file_path = "Wgs_sample_raw_data/SS_VCF_research/BCM_A100153482_21042005280_SIA0013441__1.hard-filtered.gvcf.gz"
gen_set = self.data_generator.create_database_genomic_set(
genomicSetName=".",
genomicSetCriteria=".",
genomicSetVersion=1
)
gen_member = self.data_generator.create_database_genomic_set_member(
genomicSetId=gen_set.id,
biobankId="111111111",
sampleId="222222222222",
genomeType="aou_wgs",
genomicWorkflowState=GenomicWorkflowState.AW1
)
gen_job_run = self.data_generator.create_database_genomic_job_run(
jobId=GenomicJob.AW1_MANIFEST,
startTime=clock.CLOCK.now(),
runResult=GenomicSubProcessResult.SUCCESS
)
gen_processed_file = self.data_generator.create_database_genomic_file_processed(
runId=gen_job_run.id,
startTime=clock.CLOCK.now(),
filePath='/test_file_path',
bucketName=bucket_name,
fileName='test_file_name',
)
self.data_generator.create_database_genomic_gc_validation_metrics(
genomicSetMemberId=gen_member.id,
genomicFileProcessedId=gen_processed_file.id
)
with GenomicJobController(GenomicJob.INGEST_DATA_FILES) as controller:
controller.ingest_data_files_into_gc_metrics(file_path, bucket_name)
incident = self.incident_dao.get(1)
self.assertIsNotNone(incident)
self.assertEqual(incident.code, GenomicIncidentCode.UNABLE_TO_FIND_METRIC.name)
self.assertEqual(incident.data_file_path, file_path)
self.assertEqual(incident.message, 'INGEST_DATA_FILES: Cannot find '
'genomics metric record for sample id: '
'21042005280')
def test_accession_data_files(self):
test_bucket_baylor = "fake-data-bucket-baylor"
test_idat_file = "fake-data-bucket-baylor/Genotyping_sample_raw_data/204027270091_R02C01_Grn.idat"
test_vcf_file = "fake-data-bucket-baylor/Genotyping_sample_raw_data/204027270091_R02C01.vcf.gz"
test_cram_file = "fake-data-bucket-baylor/Wgs_sample_raw_data/" \
"CRAMs_CRAIs/BCM_A100134256_21063006771_SIA0017196_1.cram"
test_files = [test_idat_file, test_vcf_file, test_cram_file]
test_time = datetime.datetime(2021, 7, 9, 14, 1, 1)
# run job controller method on each file
with clock.FakeClock(test_time):
for file_path in test_files:
with GenomicJobController(GenomicJob.ACCESSION_DATA_FILES) as controller:
controller.accession_data_files(file_path, test_bucket_baylor)
inserted_files = self.data_file_dao.get_all()
# idat
expected_idat = GenomicGcDataFile(
id=1,
created=test_time,
modified=test_time,
file_path=test_idat_file,
gc_site_id='jh',
bucket_name='fake-data-bucket-baylor',
file_prefix='Genotyping_sample_raw_data',
file_name='204027270091_R02C01_Grn.idat',
file_type='Grn.idat',
identifier_type='chipwellbarcode',
identifier_value='204027270091_R02C01',
ignore_flag=0,
)
# vcf
expected_vcf = GenomicGcDataFile(
id=2,
created=test_time,
modified=test_time,
file_path=test_vcf_file,
gc_site_id='jh',
bucket_name='fake-data-bucket-baylor',
file_prefix='Genotyping_sample_raw_data',
file_name='204027270091_R02C01.vcf.gz',
file_type='vcf.gz',
identifier_type='chipwellbarcode',
identifier_value='204027270091_R02C01',
ignore_flag=0,
)
# cram
expected_cram = GenomicGcDataFile(
id=3,
created=test_time,
modified=test_time,
file_path=test_cram_file,
gc_site_id='bcm',
bucket_name='fake-data-bucket-baylor',
file_prefix='Wgs_sample_raw_data/CRAMs_CRAIs',
file_name='BCM_A100134256_21063006771_SIA0017196_1.cram',
file_type='cram',
identifier_type='sample_id',
identifier_value='21063006771',
ignore_flag=0,
)
# obj mapping
expected_objs = {
0: expected_idat,
1: expected_vcf,
2: expected_cram
}
# verify test objects match expectations
for i in range(3):
self.assertEqual(expected_objs[i].bucket_name, inserted_files[i].bucket_name)
self.assertEqual(expected_objs[i].created, inserted_files[i].created)
self.assertEqual(expected_objs[i].file_name, inserted_files[i].file_name)
self.assertEqual(expected_objs[i].file_path, inserted_files[i].file_path)
self.assertEqual(expected_objs[i].file_prefix, inserted_files[i].file_prefix)
self.assertEqual(expected_objs[i].file_type, inserted_files[i].file_type)
self.assertEqual(expected_objs[i].gc_site_id, inserted_files[i].gc_site_id)
self.assertEqual(expected_objs[i].id, inserted_files[i].id)
self.assertEqual(expected_objs[i].identifier_type, inserted_files[i].identifier_type)
self.assertEqual(expected_objs[i].identifier_value, inserted_files[i].identifier_value)
self.assertEqual(expected_objs[i].ignore_flag, inserted_files[i].ignore_flag)
self.assertEqual(expected_objs[i].metadata, inserted_files[i].metadata)
self.assertEqual(expected_objs[i].modified, inserted_files[i].modified)
def test_updating_members_blocklists(self):
gen_set = self.data_generator.create_database_genomic_set(
genomicSetName=".",
genomicSetCriteria=".",
genomicSetVersion=1
)
ids_should_be_updated = []
# for just created and wf state query and MATCHES criteria
for i in range(4):
ids_should_be_updated.append(
self.data_generator.create_database_genomic_set_member(
genomicSetId=gen_set.id,
biobankId="100153482",
sampleId="21042005280",
genomeType='test_investigation_one' if i & 2!= 0 else 'aou_wgs',
genomicWorkflowState=GenomicWorkflowState.AW0,
ai_an='Y' if i & 2 == 0 else 'N'
).id
)
# for just created and wf state query and DOES NOT MATCH criteria
for i in range(2):
self.data_generator.create_database_genomic_set_member(
genomicSetId=gen_set.id,
biobankId="100153482",
sampleId="21042005280",
genomeType='aou_array',
genomicWorkflowState=GenomicWorkflowState.AW0,
ai_an='N'
)
with GenomicJobController(GenomicJob.UPDATE_MEMBERS_BLOCKLISTS) as controller:
controller.update_members_blocklists()
# current config json in base_config.json
created_members = self.member_dao.get_all()
blocklisted = list(filter(lambda x: x.blockResults == 1 or x.blockResearch == 1, created_members))
self.assertTrue(ids_should_be_updated.sort() == [obj.id for obj in blocklisted].sort())
# should be RESEARCH blocked
self.assertTrue(all(
obj.blockResearch == 1 and obj.blockResearchReason is not None and obj.blockResearchReason == 'aian'
for obj in created_members if obj.ai_an == 'Y' and obj.genomicWorkflowState == GenomicWorkflowState.AW0)
)
# should NOT be RESULTS blocked
self.assertTrue(all(
obj.blockResults == 0 and obj.blockResultsReason is None
for obj in created_members if obj.ai_an == 'Y' and obj.genomicWorkflowState == GenomicWorkflowState.AW0)
)
# should be RESEARCH blocked
self.assertTrue(all(
obj.blockResearch == 1 and obj.blockResearchReason is not None and obj.blockResearchReason == 'test_sample_swap'
for obj in created_members if obj.genomeType == 'test_investigation_one' and obj.genomicWorkflowState ==
GenomicWorkflowState.AW0)
)
# should be RESULTS blocked
self.assertTrue(all(
obj.blockResults == 1 and obj.blockResultsReason is not None and obj.blockResultsReason == 'test_sample_swap'
for obj in created_members if obj.genomeType == 'test_investigation_one' and obj.genomicWorkflowState ==
GenomicWorkflowState.AW0)
)
# should NOT be RESEARCH/RESULTS blocked
self.assertTrue(all(
obj.blockResearch == 0 and obj.blockResearchReason is None
for obj in created_members if obj.genomeType == 'aou_array' and obj.genomicWorkflowState ==
GenomicWorkflowState.AW0)
)
self.assertTrue(all(
obj.blockResults == 0 and obj.blockResultsReason is None
for obj in created_members if obj.genomeType == 'aou_array' and obj.genomicWorkflowState ==
GenomicWorkflowState.AW0)
)
# clear current set member records
with self.member_dao.session() as session:
session.query(GenomicSetMember).delete()
run_result = self.job_run_dao.get(1)
self.assertEqual(run_result.runStatus, GenomicSubProcessStatus.COMPLETED)
self.assertEqual(run_result.runResult, GenomicSubProcessResult.SUCCESS)
# for modified data query and MATCHES criteria
for i in range(4):
self.data_generator.create_database_genomic_set_member(
genomicSetId=gen_set.id,
biobankId="100153482",
sampleId="21042005280",
genomeType='test_investigation_one' if i & 2!= 0 else 'aou_wgs',
genomicWorkflowState=GenomicWorkflowState.AW1,
ai_an='Y' if i & 2 == 0 else 'N'
)
with GenomicJobController(GenomicJob.UPDATE_MEMBERS_BLOCKLISTS) as controller:
controller.update_members_blocklists()
modified_members = self.member_dao.get_all()
# should be RESEARCH blocked
self.assertTrue(all(
obj.blockResearch == 1 and obj.blockResearchReason is not None and obj.blockResearchReason == 'aian'
for obj in modified_members if obj.ai_an == 'Y' and obj.genomicWorkflowState == GenomicWorkflowState.AW1)
)
# should NOT be RESULTS blocked
self.assertTrue(all(
obj.blockResults == 0 and obj.blockResultsReason is None
for obj in modified_members if obj.ai_an == 'Y' and obj.genomicWorkflowState == GenomicWorkflowState.AW1)
)
# should be RESEARCH blocked
self.assertTrue(all(
obj.blockResearch == 1 and obj.blockResearchReason is not None and obj.blockResearchReason == 'test_sample_swap'
for obj in modified_members if obj.genomeType == 'test_investigation_one' and obj.genomicWorkflowState ==
GenomicWorkflowState.AW1)
)
# should be RESULTS blocked
self.assertTrue(all(
obj.blockResults == 1 and obj.blockResultsReason is not None and obj.blockResultsReason == 'test_sample_swap'
for obj in modified_members if obj.genomeType == 'test_investigation_one' and obj.genomicWorkflowState ==
GenomicWorkflowState.AW1)
)
run_result = self.job_run_dao.get(2)
self.assertEqual(run_result.runStatus, GenomicSubProcessStatus.COMPLETED)
self.assertEqual(run_result.runResult, GenomicSubProcessResult.SUCCESS)
def test_ingest_user_metrics_file(self):
test_file = 'Genomic-Metrics-File-User-Events-Test.csv'
bucket_name = 'test_bucket'
sub_folder = 'user_events'
pids = []
file_ingester = GenomicFileIngester()
for _ in range(2):
pid = self.data_generator.create_database_participant()
pids.append(pid.participantId)
test_metrics_file = create_ingestion_test_file(
test_file,
bucket_name,
sub_folder)
test_file_path = f'{bucket_name}/{sub_folder}/{test_metrics_file}'
with open_cloud_file(test_file_path) as csv_file:
metrics_to_ingest = file_ingester._read_data_to_ingest(csv_file)
with GenomicJobController(GenomicJob.METRICS_FILE_INGEST) as controller:
controller.ingest_metrics_file(
metric_type='user_events',
file_path=test_file_path,
)
job_run_id = controller.job_run.id
metrics = self.user_event_metrics_dao.get_all()
for pid in pids:
file_metrics = list(filter(lambda x: int(x['participant_id'].split('P')[-1]) == pid, metrics_to_ingest[
'rows']))
participant_ingested_metrics = list(filter(lambda x: x.participant_id == pid, metrics))
self.assertEqual(len(file_metrics), len(participant_ingested_metrics))
self.assertTrue(all(obj.run_id == job_run_id for obj in participant_ingested_metrics))
@mock.patch('rdr_service.genomic.genomic_job_controller.GenomicJobController.execute_cloud_task')
def test_reconcile_pdr_data(self, mock_cloud_task):
# init new job run in __enter__
with GenomicJobController(GenomicJob.RECONCILE_PDR_DATA) as controller:
controller.reconcile_pdr_data()
cloud_task_endpoint ='rebuild_genomic_table_records_task'
first_run = self.job_run_dao.get_all()
self.assertEqual(mock_cloud_task.call_count, 1)
call_args = mock_cloud_task.call_args_list
self.assertEqual(len(call_args), 1)
self.assertEqual(call_args[0].args[0]['table'], self.job_run_dao.model_type.__tablename__)
self.assertTrue(type(call_args[0].args[0]['ids']) is list)
self.assertEqual(call_args[0].args[0]['ids'], [obj.id for obj in first_run])
self.assertEqual(call_args[0].args[1], cloud_task_endpoint)
participant = self.data_generator.create_database_participant()
gen_set = self.data_generator.create_database_genomic_set(
genomicSetName=".",
genomicSetCriteria=".",
genomicSetVersion=1
)
plus_ten = clock.CLOCK.now() + datetime.timedelta(minutes=10)
plus_ten = plus_ten.replace(microsecond=0)
with FakeClock(plus_ten):
for i in range(2):
gen_member = self.data_generator.create_database_genomic_set_member(
genomicSetId=gen_set.id,
biobankId="100153482",
sampleId="21042005280",
genomeType="aou_wgs",
genomicWorkflowState=GenomicWorkflowState.AW1
)
gen_processed_file = self.data_generator.create_database_genomic_file_processed(
runId=first_run[0].id,
startTime=clock.CLOCK.now(),
filePath=f'test_file_path_{i}',
bucketName='test_bucket',
fileName='test_file_name',
)
self.data_generator.create_database_genomic_gc_validation_metrics(
genomicSetMemberId=gen_member.id,
genomicFileProcessedId=gen_processed_file.id
)
manifest = self.data_generator.create_database_genomic_manifest_file(
manifestTypeId=2,
filePath=f'test_file_path_{i}'
)
self.data_generator.create_database_genomic_manifest_feedback(
inputManifestFileId=manifest.id,
feedbackRecordCount=2
)
self.data_generator.create_database_genomic_user_event_metrics(
participant_id=participant.participantId,
event_name='test_event',
run_id=1,
)
self.data_generator.create_database_genomic_informing_loop(
message_record_id=1,
event_type='informing_loop_decision',
module_type='gem',
participant_id=participant.participantId,
decision_value='maybe_later',
event_authored_time=clock.CLOCK.now()
)
self.data_generator.create_database_genomic_cvl_past_due(
cvl_site_id='co',
email_notification_sent=0,
sample_id='sample_test',
results_type='hdr',
genomic_set_member_id=gen_member.id
)
self.data_generator.create_database_genomic_appointment(
message_record_id=i,
appointment_id=i,
event_type='appointment_scheduled',
module_type='hdr',
participant_id=participant.participantId,
event_authored_time=clock.CLOCK.now(),
source='Color',
appointment_timestamp=format_datetime(clock.CLOCK.now()),
appointment_timezone='America/Los_Angeles',
location='123 address st',
contact_number='17348675309',
language='en'
)
self.data_generator.create_database_genomic_member_report_state(
genomic_set_member_id=gen_member.id,
participant_id=participant.participantId,
module='gem',
genomic_report_state=GenomicReportState.GEM_RPT_READY,
event_authored_time=clock.CLOCK.now()
)
self.data_generator.create_genomic_result_viewed(
participant_id=participant.participantId,
event_type='result_viewed',
event_authored_time=clock.CLOCK.now(),
module_type='gem',
sample_id=gen_member.sampleId
)
# gets new records that were created with last job run from above
with GenomicJobController(GenomicJob.RECONCILE_PDR_DATA) as controller:
controller.reconcile_pdr_data()
affected_tables = [
'genomic_set',
'genomic_set_member',
'genomic_job_run',
'genomic_file_processed',
'genomic_gc_validation_metrics',
'genomic_manifest_file',
'genomic_manifest_feedback',
'genomic_informing_loop',
'genomic_cvl_results_past_due',
'user_event_metrics',
'genomic_member_report_state',
'genomic_result_viewed',
'genomic_appointment_event'
]
num_calls = len(affected_tables) + 1
self.assertEqual(mock_cloud_task.call_count, num_calls)
call_args = mock_cloud_task.call_args_list
self.assertEqual(len(call_args), num_calls)
mock_tables = set([obj[0][0]['table'] for obj in call_args])
mock_endpoint = [obj[0][1] for obj in call_args]
self.assertTrue([mock_tables].sort() == affected_tables.sort())
self.assertTrue(all(obj for obj in mock_endpoint if obj == cloud_task_endpoint))
@mock.patch('rdr_service.genomic.genomic_job_controller.GenomicJobController.execute_cloud_task')
def test_retry_manifest_ingestions_if_deltas(self, mock_cloud_task):
bucket_name = "test-bucket"
aw1_file_name = "AW1_wgs_sample_manifests/RDR_AoU_SEQ_PKG-2104-026571.csv"
aw1_manifest_path = f"{bucket_name}/{aw1_file_name}"
aw2_file_name = "AW2_wgs_data_manifests/RDR_AoU_SEQ_DataManifest_04092021.csv"
aw2_manifest_path = f"{bucket_name}/{aw2_file_name}"
gen_set = self.data_generator.create_database_genomic_set(
genomicSetName=".",
genomicSetCriteria=".",
genomicSetVersion=1
)
# Create AW1 job_run
aw1_job_run = self.data_generator.create_database_genomic_job_run(
jobId=GenomicJob.AW1_MANIFEST,
startTime=clock.CLOCK.now(),
endTime=clock.CLOCK.now(),
runResult=GenomicSubProcessResult.SUCCESS
)
# Create AW2 job_run
aw2_job_run = self.data_generator.create_database_genomic_job_run(
jobId=GenomicJob.METRICS_INGESTION,
startTime=clock.CLOCK.now(),
endTime=clock.CLOCK.now(),
runResult=GenomicSubProcessResult.SUCCESS
)
# should have no data
with GenomicJobController(GenomicJob.RETRY_MANIFEST_INGESTIONS) as controller:
controller.retry_manifest_ingestions()
job_run = self.job_run_dao.get(3)
self.assertEqual(job_run.jobId, GenomicJob.RETRY_MANIFEST_INGESTIONS)
self.assertEqual(job_run.runStatus, GenomicSubProcessStatus.COMPLETED)
self.assertEqual(job_run.runResult, GenomicSubProcessResult.NO_FILES)
self.assertEqual(mock_cloud_task.call_count, 0)
self.assertFalse(mock_cloud_task.call_count)
# Create genomic_aw1_raw record
self.data_generator.create_database_genomic_aw1_raw(
file_path=aw1_manifest_path,
package_id="PKG-2104-026571",
biobank_id="A10001",
)
# Create genomic_aw2_raw record
self.data_generator.create_database_genomic_aw2_raw(
file_path=aw2_manifest_path,
biobank_id="A10001",
sample_id="100001",
biobankidsampleid="A10001_100001",
)
# Create AW1 genomic_manifest_file record
aw1_manifest_file = self.data_generator.create_database_genomic_manifest_file(
created=clock.CLOCK.now(),
modified=clock.CLOCK.now(),
uploadDate=clock.CLOCK.now(),
manifestTypeId=GenomicManifestTypes.AW1,
filePath=aw1_manifest_path,
fileName=aw1_file_name,
bucketName=bucket_name,
recordCount=1,
rdrProcessingComplete=1,
rdrProcessingCompleteDate=clock.CLOCK.now(),
)
# Create AW2 genomic_manifest_file record
aw2_manifest_file = self.data_generator.create_database_genomic_manifest_file(
created=clock.CLOCK.now(),
modified=clock.CLOCK.now(),
uploadDate=clock.CLOCK.now(),
manifestTypeId=GenomicManifestTypes.AW2,
filePath=aw2_manifest_path,
fileName=aw2_file_name,
bucketName=bucket_name,
recordCount=1,
rdrProcessingComplete=1,
rdrProcessingCompleteDate=clock.CLOCK.now(),
)
# Create AW1 file_processed
aw1_file_processed = self.data_generator.create_database_genomic_file_processed(
runId=aw1_job_run.id,
startTime=clock.CLOCK.now(),
genomicManifestFileId=aw1_manifest_file.id,
filePath=f"/{aw1_manifest_path}",
bucketName=bucket_name,
fileName=aw1_file_name,
)
# Create AW2 file_processed
aw2_file_processed = self.data_generator.create_database_genomic_file_processed(
runId=aw2_job_run.id,
startTime=clock.CLOCK.now(),
genomicManifestFileId=aw2_manifest_file.id,
filePath=f"/{aw2_manifest_path}",
bucketName=bucket_name,
fileName=aw2_file_name,
)
# genomic_set_member for AW1
gen_member = self.data_generator.create_database_genomic_set_member(
genomicSetId=gen_set.id,
biobankId="100153482",
sampleId="21042005280",
genomeType="aou_wgs",
genomicWorkflowState=GenomicWorkflowState.AW1,
aw1FileProcessedId=aw1_file_processed.id
)
# genomic_gc_validation_metrics for AW1
self.data_generator.create_database_genomic_gc_validation_metrics(
genomicSetMemberId=gen_member.id,
genomicFileProcessedId=aw2_file_processed.id
)
# one AW1/AW2 with no deltas
with GenomicJobController(GenomicJob.RETRY_MANIFEST_INGESTIONS) as controller:
controller.retry_manifest_ingestions()
job_run = self.job_run_dao.get(4)
self.assertEqual(job_run.jobId, GenomicJob.RETRY_MANIFEST_INGESTIONS)
self.assertEqual(job_run.runStatus, GenomicSubProcessStatus.COMPLETED)
self.assertEqual(job_run.runResult, GenomicSubProcessResult.NO_FILES)
self.assertEqual(mock_cloud_task.call_count, 0)
self.assertFalse(mock_cloud_task.call_count)
# empty tables resulting in deltas and cloud task calls
with self.member_dao.session() as session:
session.query(GenomicGCValidationMetrics).delete()
session.query(GenomicSetMember).delete()
with GenomicJobController(GenomicJob.RETRY_MANIFEST_INGESTIONS) as controller:
controller.retry_manifest_ingestions()
job_run = self.job_run_dao.get(5)
self.assertEqual(job_run.jobId, GenomicJob.RETRY_MANIFEST_INGESTIONS)
self.assertEqual(job_run.runStatus, GenomicSubProcessStatus.COMPLETED)
self.assertEqual(job_run.runResult, GenomicSubProcessResult.SUCCESS)
# one AW1/AW2 with deltas
self.assertEqual(mock_cloud_task.call_count, 2)
self.assertTrue(mock_cloud_task.call_count)
call_args = mock_cloud_task.call_args_list
self.assertEqual(len(call_args), 2)
cloud_task_endpoint = ['ingest_aw1_manifest_task', 'ingest_aw2_manifest_task']
mock_endpoint = [obj[0][1] for obj in call_args]
self.assertTrue(all(obj for obj in mock_endpoint if obj == cloud_task_endpoint))
mock_buckets = set([obj[0][0]['bucket_name'] for obj in call_args])
self.assertTrue(len(mock_buckets), 1)
self.assertTrue(list(mock_buckets)[0] == bucket_name)
def test_calculate_informing_loop_ready_flags(self):
num_participants = 4
gen_set = self.data_generator.create_database_genomic_set(
genomicSetName=".",
genomicSetCriteria=".",
genomicSetVersion=1
)
for num in range(num_participants):
plus_num = clock.CLOCK.now() + datetime.timedelta(minutes=num)
plus_num = plus_num.replace(microsecond=0)
with FakeClock(plus_num):
summary = self.data_generator.create_database_participant_summary(
consentForStudyEnrollment=1,
consentForGenomicsROR=1
)
stored_sample = self.data_generator.create_database_biobank_stored_sample(
biobankId=summary.biobankId,
biobankOrderIdentifier=self.fake.pyint()
)
collection_site = self.data_generator.create_database_site(
siteType='Clinic'
)
order = self.data_generator.create_database_biobank_order(
collectedSiteId=collection_site.siteId,
participantId=summary.participantId,
finalizedTime=plus_num
)
self.data_generator.create_database_biobank_order_identifier(
value=stored_sample.biobankOrderIdentifier,
biobankOrderId=order.biobankOrderId,
system="1",
)
self.data_generator.create_database_biobank_order_identifier(
value=stored_sample.biobankOrderIdentifier,
biobankOrderId=order.biobankOrderId,
system="2",
)
member = self.data_generator.create_database_genomic_set_member(
genomicSetId=gen_set.id,
participantId=summary.participantId,
genomeType=config.GENOME_TYPE_WGS,
qcStatus=GenomicQcStatus.PASS,
gcManifestSampleSource='Whole Blood',
collectionTubeId=stored_sample.biobankStoredSampleId
)
self.data_generator.create_database_genomic_gc_validation_metrics(
genomicSetMemberId=member.id,
sexConcordance='True',
drcFpConcordance='Pass',
drcSexConcordance='Pass',
processingStatus='Pass'
)
members_for_ready_loop = self.member_dao.get_members_for_informing_loop_ready()
self.assertEqual(len(members_for_ready_loop), num_participants)
current_set_members = self.member_dao.get_all()
self.assertTrue(all(obj.informingLoopReadyFlag == 0 for obj in current_set_members))
self.assertTrue(all(obj.informingLoopReadyFlagModified is None for obj in current_set_members))
with GenomicJobController(GenomicJob.CALCULATE_INFORMING_LOOP_READY) as controller:
controller.calculate_informing_loop_ready_flags()
# no config object, controller method should return
members_for_ready_loop = self.member_dao.get_members_for_informing_loop_ready()
self.assertEqual(len(members_for_ready_loop), num_participants)
calculation_limit = 2
config.override_setting(config.CALCULATE_READY_FLAG_LIMIT, [calculation_limit])
with GenomicJobController(GenomicJob.CALCULATE_INFORMING_LOOP_READY) as controller:
controller.calculate_informing_loop_ready_flags()
current_set_members = self.member_dao.get_all()
self.assertTrue(any(obj.informingLoopReadyFlag == 1 for obj in current_set_members))
self.assertTrue(any(obj.informingLoopReadyFlagModified is not None for obj in current_set_members))
current_loops_set = [obj for obj in current_set_members if obj.informingLoopReadyFlag == 1
and obj.informingLoopReadyFlagModified is not None]
self.assertEqual(len(current_loops_set), calculation_limit)
members_for_ready_loop = self.member_dao.get_members_for_informing_loop_ready()
self.assertEqual(len(members_for_ready_loop), num_participants // 2)
with GenomicJobController(GenomicJob.CALCULATE_INFORMING_LOOP_READY) as controller:
controller.calculate_informing_loop_ready_flags()
current_set_members = self.member_dao.get_all()
self.assertTrue(all(obj.informingLoopReadyFlag == 1 for obj in current_set_members))
self.assertTrue(all(obj.informingLoopReadyFlagModified is not None for obj in current_set_members))
members_for_ready_loop = self.member_dao.get_members_for_informing_loop_ready()
self.assertEqual(len(members_for_ready_loop), 0)
@mock.patch('rdr_service.services.email_service.EmailService.send_email')
def test_getting_results_withdrawn(self, email_mock):
num_participants = 4
result_withdrawal_dao = GenomicResultWithdrawalsDao()
gen_set = self.data_generator.create_database_genomic_set(
genomicSetName=".",
genomicSetCriteria=".",
genomicSetVersion=1
)
gen_job_run = self.data_generator.create_database_genomic_job_run(
jobId=GenomicJob.AW1_MANIFEST,
startTime=clock.CLOCK.now(),
runResult=GenomicSubProcessResult.SUCCESS
)
pids = []
for num in range(num_participants):
summary = self.data_generator.create_database_participant_summary(
consentForStudyEnrollment=1,
consentForGenomicsROR=1,
withdrawalStatus=WithdrawalStatus.EARLY_OUT
)
self.data_generator.create_database_genomic_set_member(
genomicSetId=gen_set.id,
participantId=summary.participantId,
genomeType=config.GENOME_TYPE_ARRAY,
gemA1ManifestJobRunId=gen_job_run.id if num % 2 == 0 else None
)
self.data_generator.create_database_genomic_set_member(
genomicSetId=gen_set.id,
participantId=summary.participantId,
genomeType=config.GENOME_TYPE_WGS,
cvlW1ilHdrJobRunId=gen_job_run.id
)
pids.append(summary.participantId)
config.override_setting(config.RDR_GENOMICS_NOTIFICATION_EMAIL, '[email protected]')
with GenomicJobController(GenomicJob.RESULTS_PIPELINE_WITHDRAWALS) as controller:
controller.check_results_withdrawals()
# mock checks should be two => 1 GEM 1 HEALTH
self.assertEqual(email_mock.call_count, 2)
call_args = email_mock.call_args_list
self.assertTrue(any('GEM' in call.args[0].subject for call in call_args))
self.assertTrue(any('HEALTH' in call.args[0].subject for call in call_args))
job_runs = self.job_run_dao.get_all()
current_job_run = list(filter(lambda x: x.jobId == GenomicJob.RESULTS_PIPELINE_WITHDRAWALS, job_runs))[0]
self.assertTrue(current_job_run.runResult == GenomicSubProcessResult.SUCCESS)
all_withdrawal_records = result_withdrawal_dao.get_all()
self.assertTrue(len(all_withdrawal_records) == len(pids))
self.assertTrue(all(obj.participant_id in pids for obj in all_withdrawal_records))
array_results = list(filter(lambda x: x.array_results == 1, all_withdrawal_records))
# should only be 2
self.assertTrue(len(array_results), 2)
cvl_results = list(filter(lambda x: x.cvl_results == 1, all_withdrawal_records))
# should be 4 for num of participants
self.assertTrue(len(cvl_results), num_participants)
with GenomicJobController(GenomicJob.RESULTS_PIPELINE_WITHDRAWALS) as controller:
controller.check_results_withdrawals()
# mock checks should still be two on account of no records
self.assertEqual(email_mock.call_count, 2)
job_runs = self.job_run_dao.get_all()
current_job_run = list(filter(lambda x: x.jobId == GenomicJob.RESULTS_PIPELINE_WITHDRAWALS, job_runs))[1]
self.assertTrue(current_job_run.runResult == GenomicSubProcessResult.NO_RESULTS)
def test_gem_results_to_report_state(self):
num_participants = 8
gen_set = self.data_generator.create_database_genomic_set(
genomicSetName=".",
genomicSetCriteria=".",
genomicSetVersion=1
)
gem_a2_job_run = self.data_generator.create_database_genomic_job_run(
jobId=GenomicJob.GEM_A2_MANIFEST,
startTime=clock.CLOCK.now(),
runResult=GenomicSubProcessResult.SUCCESS
)
pids_to_update, member_ids = [], []
for num in range(num_participants):
summary = self.data_generator.create_database_participant_summary(
consentForStudyEnrollment=1,
consentForGenomicsROR=1,
withdrawalStatus=WithdrawalStatus.EARLY_OUT
)
member = self.data_generator.create_database_genomic_set_member(
genomicSetId=gen_set.id,
participantId=summary.participantId,
genomeType=config.GENOME_TYPE_ARRAY
)
if num % 2 == 0:
member_ids.append(member.id)
pids_to_update.append(summary.participantId)
with GenomicJobController(GenomicJob.GEM_RESULT_REPORTS) as controller:
controller.gem_results_to_report_state()
current_job_runs = self.job_run_dao.get_all()
self.assertEqual(len(current_job_runs), 2)
current_job_run = list(filter(lambda x: x.jobId == GenomicJob.GEM_RESULT_REPORTS, current_job_runs))[0]
self.assertTrue(current_job_run.runResult == GenomicSubProcessResult.NO_RESULTS)
current_members = self.member_dao.get_all()
# 4 members updated correctly should return
for member in current_members:
if member.participantId in pids_to_update:
member.gemA2ManifestJobRunId = gem_a2_job_run.id
member.genomicWorkflowState = GenomicWorkflowState.GEM_RPT_READY
self.member_dao.update(member)
with GenomicJobController(GenomicJob.GEM_RESULT_REPORTS) as controller:
controller.gem_results_to_report_state()
current_job_runs = self.job_run_dao.get_all()
self.assertEqual(len(current_job_runs), 3)
current_job_run = list(filter(lambda x: x.jobId == GenomicJob.GEM_RESULT_REPORTS, current_job_runs))[1]
self.assertTrue(current_job_run.runResult == GenomicSubProcessResult.SUCCESS)
current_gem_report_states = self.report_state_dao.get_all()
self.assertEqual(len(current_gem_report_states), len(pids_to_update))
self.assertTrue(all(obj.event_type =='result_ready' for obj in current_gem_report_states))
self.assertTrue(all(obj.event_authored_time is not None for obj in current_gem_report_states))
self.assertTrue(all(obj.module == 'gem' for obj in current_gem_report_states))
self.assertTrue(
all(obj.genomic_report_state == GenomicReportState.GEM_RPT_READY for obj in current_gem_report_states)
)
self.assertTrue(
all(obj.genomic_report_state_str == GenomicReportState.GEM_RPT_READY.name for obj in
current_gem_report_states)
)
self.assertTrue(
all(obj.genomic_set_member_id in member_ids for obj in
current_gem_report_states)
)
# 4 members inserted already should not return
with GenomicJobController(GenomicJob.GEM_RESULT_REPORTS) as controller:
controller.gem_results_to_report_state()
current_job_runs = self.job_run_dao.get_all()
self.assertEqual(len(current_job_runs), 4)
current_job_run = list(filter(lambda x: x.jobId == GenomicJob.GEM_RESULT_REPORTS, current_job_runs))[2]
self.assertTrue(current_job_run.runResult == GenomicSubProcessResult.NO_RESULTS)
self.clear_table_after_test('genomic_member_report_state')
def test_reconcile_informing_loop(self):
event_dao = UserEventMetricsDao()
event_dao.truncate() # for test suite
il_dao = GenomicInformingLoopDao()
for pid in range(8):
self.data_generator.create_database_participant(participantId=1 + pid, biobankId=1 + pid)
# Set up initial job run ID
self.data_generator.create_database_genomic_job_run(
jobId=GenomicJob.METRICS_FILE_INGEST,
startTime=clock.CLOCK.now()
)
# create genomic set
self.data_generator.create_database_genomic_set(
genomicSetName='test',
genomicSetCriteria='.',
genomicSetVersion=1
)
# insert set members
for b in ["aou_array", "aou_wgs"]:
for i in range(1, 9):
self.data_generator.create_database_genomic_set_member(
participantId=i,
genomicSetId=1,
biobankId=i,
collectionTubeId=100 + i,
sampleId=10 + i,
genomeType=b,
)
# Set up ingested metrics data
events = ['gem.informing_loop.started',
'gem.informing_loop.screen8_no',
'gem.informing_loop.screen8_yes',
'hdr.informing_loop.started',
'gem.informing_loop.screen3',
'pgx.informing_loop.screen8_no',
'hdr.informing_loop.screen10_no']
for p in range(4):
for i in range(len(events)):
self.data_generator.create_database_genomic_user_event_metrics(
created=clock.CLOCK.now(),
modified=clock.CLOCK.now(),
participant_id=p + 1,
created_at=datetime.datetime(2021, 12, 29, 00) + datetime.timedelta(hours=i),
event_name=events[i],
run_id=1,
ignore_flag=0,
)
# Set up informing loop from message broker records
decisions = [None, 'no', 'yes']
for p in range(3):
for i in range(2):
self.data_generator.create_database_genomic_informing_loop(
message_record_id=i,
event_type='informing_loop_started' if i == 0 else 'informing_loop_decision',
module_type='gem',
participant_id=p + 1,
decision_value=decisions[i],
sample_id=100 + p,
event_authored_time=datetime.datetime(2021, 12, 29, 00) + datetime.timedelta(hours=i)
)
# Test for no message but yes user event
self.data_generator.create_database_genomic_user_event_metrics(
created=clock.CLOCK.now(),
modified=clock.CLOCK.now(),
participant_id=6,
created_at=datetime.datetime(2021, 12, 29, 00),
event_name='gem.informing_loop.screen8_yes',
run_id=1,
ignore_flag=0,
)
# Run reconcile job
genomic_pipeline.reconcile_informing_loop_responses()
# Test mismatched GEM data ingested correctly
pid_list = [1,
|
addf92a3d4060fa9464a802a4a4378cf9eeadde4
|
Python
|
<|begin_of_text|># -*- coding: utf-8 -*-
# This file is auto-generated, don't edit it. Thanks.
from Tea.model import TeaModel
class CreateCertificateRequest(TeaModel):
def __init__(self, domain=None, certificate=None, private_key=None, certificate_name=None, instance_id=None):
self.domain = domain # type: str
self.certificate = certificate # type: str
self.private_key = private_key # type: str
self.certificate_name = certificate_name # type: str
self.instance_id = instance_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(CreateCertificateRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.certificate is not None:
result['Certificate'] = self.certificate
if self.private_key is not None:
result['PrivateKey'] = self.private_key
if self.certificate_name is not None:
result['CertificateName'] = self.certificate_name
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('Certificate') is not None:
self.certificate = m.get('Certificate')
if m.get('PrivateKey') is not None:
self.private_key = m.get('PrivateKey')
if m.get('CertificateName') is not None:
self.certificate_name = m.get('CertificateName')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
return self
class CreateCertificateResponseBody(TeaModel):
def __init__(self, request_id=None, certificate_id=None):
self.request_id = request_id # type: str
self.certificate_id = certificate_id # type: long
def validate(self):
pass
def to_map(self):
_map = super(CreateCertificateResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.certificate_id is not None:
result['CertificateId'] = self.certificate_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('CertificateId') is not None:
self.certificate_id = m.get('CertificateId')
return self
class CreateCertificateResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: CreateCertificateResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(CreateCertificateResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = CreateCertificateResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class CreateCertificateByCertificateIdRequest(TeaModel):
def __init__(self, domain=None, certificate_id=None, instance_id=None):
self.domain = domain # type: str
self.certificate_id = certificate_id # type: long
self.instance_id = instance_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(CreateCertificateByCertificateIdRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.certificate_id is not None:
result['CertificateId'] = self.certificate_id
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('CertificateId') is not None:
self.certificate_id = m.get('CertificateId')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
return self
class CreateCertificateByCertificateIdResponseBody(TeaModel):
def __init__(self, request_id=None, certificate_id=None):
self.request_id = request_id # type: str
self.certificate_id = certificate_id # type: long
def validate(self):
pass
def to_map(self):
_map = super(CreateCertificateByCertificateIdResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.certificate_id is not None:
result['CertificateId'] = self.certificate_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('CertificateId') is not None:
self.certificate_id = m.get('CertificateId')
return self
class CreateCertificateByCertificateIdResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: CreateCertificateByCertificateIdResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(CreateCertificateByCertificateIdResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = CreateCertificateByCertificateIdResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class CreateDomainRequest(TeaModel):
def __init__(self, instance_id=None, domain=None, source_ips=None, is_access_product=None,
access_header_mode=None, access_headers=None, load_balancing=None, log_headers=None, http_port=None, https_port=None,
http_2port=None, http_to_user_ip=None, https_redirect=None, cluster_type=None, resource_group_id=None,
connection_time=None, read_time=None, write_time=None, access_type=None, cloud_native_instances=None,
ip_follow_status=None):
self.instance_id = instance_id # type: str
self.domain = domain # type: str
self.source_ips = source_ips # type: str
self.is_access_product = is_access_product # type: int
self.access_header_mode = access_header_mode # type: int
self.access_headers = access_headers # type: str
self.load_balancing = load_balancing # type: int
self.log_headers = log_headers # type: str
self.http_port = http_port # type: str
self.https_port = https_port # type: str
self.http_2port = http_2port # type: str
self.http_to_user_ip = http_to_user_ip # type: int
self.https_redirect = https_redirect # type: int
self.cluster_type = cluster_type # type: int
self.resource_group_id = resource_group_id # type: str
self.connection_time = connection_time # type: int
self.read_time = read_time # type: int
self.write_time = write_time # type: int
self.access_type = access_type # type: str
self.cloud_native_instances = cloud_native_instances # type: str
self.ip_follow_status = ip_follow_status # type: int
def validate(self):
pass
def to_map(self):
_map = super(CreateDomainRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.domain is not None:
result['Domain'] = self.domain
if self.source_ips is not None:
result['SourceIps'] = self.source_ips
if self.is_access_product is not None:
result['IsAccessProduct'] = self.is_access_product
if self.access_header_mode is not None:
result['AccessHeaderMode'] = self.access_header_mode
if self.access_headers is not None:
result['AccessHeaders'] = self.access_headers
if self.load_balancing is not None:
result['LoadBalancing'] = self.load_balancing
if self.log_headers is not None:
result['LogHeaders'] = self.log_headers
if self.http_port is not None:
result['HttpPort'] = self.http_port
if self.https_port is not None:
result['HttpsPort'] = self.https_port
if self.http_2port is not None:
result['Http2Port'] = self.http_2port
if self.http_to_user_ip is not None:
result['HttpToUserIp'] = self.http_to_user_ip
if self.https_redirect is not None:
result['HttpsRedirect'] = self.https_redirect
if self.cluster_type is not None:
result['ClusterType'] = self.cluster_type
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
if self.connection_time is not None:
result['ConnectionTime'] = self.connection_time
if self.read_time is not None:
result['ReadTime'] = self.read_time
if self.write_time is not None:
result['WriteTime'] = self.write_time
if self.access_type is not None:
result['AccessType'] = self.access_type
if self.cloud_native_instances is not None:
result['CloudNativeInstances'] = self.cloud_native_instances
if self.ip_follow_status is not None:
result['IpFollowStatus'] = self.ip_follow_status
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('SourceIps') is not None:
self.source_ips = m.get('SourceIps')
if m.get('IsAccessProduct') is not None:
self.is_access_product = m.get('IsAccessProduct')
if m.get('AccessHeaderMode') is not None:
self.access_header_mode = m.get('AccessHeaderMode')
if m.get('AccessHeaders') is not None:
self.access_headers = m.get('AccessHeaders')
if m.get('LoadBalancing') is not None:
self.load_balancing = m.get('LoadBalancing')
if m.get('LogHeaders') is not None:
self.log_headers = m.get('LogHeaders')
if m.get('HttpPort') is not None:
self.http_port = m.get('HttpPort')
if m.get('HttpsPort') is not None:
self.https_port = m.get('HttpsPort')
if m.get('Http2Port') is not None:
self.http_2port = m.get('Http2Port')
if m.get('HttpToUserIp') is not None:
self.http_to_user_ip = m.get('HttpToUserIp')
if m.get('HttpsRedirect') is not None:
self.https_redirect = m.get('HttpsRedirect')
if m.get('ClusterType') is not None:
self.cluster_type = m.get('ClusterType')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
if m.get('ConnectionTime') is not None:
self.connection_time = m.get('ConnectionTime')
if m.get('ReadTime') is not None:
self.read_time = m.get('ReadTime')
if m.get('WriteTime') is not None:
self.write_time = m.get('WriteTime')
if m.get('AccessType') is not None:
self.access_type = m.get('AccessType')
if m.get('CloudNativeInstances') is not None:
self.cloud_native_instances = m.get('CloudNativeInstances')
if m.get('IpFollowStatus') is not None:
self.ip_follow_status = m.get('IpFollowStatus')
return self
class CreateDomainResponseBody(TeaModel):
def __init__(self, request_id=None, cname=None):
self.request_id = request_id # type: str
self.cname = cname # type: str
def validate(self):
pass
def to_map(self):
_map = super(CreateDomainResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.cname is not None:
result['Cname'] = self.cname
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('Cname') is not None:
self.cname = m.get('Cname')
return self
class CreateDomainResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: CreateDomainResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(CreateDomainResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = CreateDomainResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class CreateProtectionModuleRuleRequest(TeaModel):
def __init__(self, domain=None, defense_type=None, rule=None, instance_id=None):
self.domain = domain # type: str
self.defense_type = defense_type # type: str
self.rule = rule # type: str
self.instance_id = instance_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(CreateProtectionModuleRuleRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.defense_type is not None:
result['DefenseType'] = self.defense_type
if self.rule is not None:
result['Rule'] = self.rule
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('DefenseType') is not None:
self.defense_type = m.get('DefenseType')
if m.get('Rule') is not None:
self.rule = m.get('Rule')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
return self
class CreateProtectionModuleRuleResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(CreateProtectionModuleRuleResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class CreateProtectionModuleRuleResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: CreateProtectionModuleRuleResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(CreateProtectionModuleRuleResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = CreateProtectionModuleRuleResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DeleteDomainRequest(TeaModel):
def __init__(self, instance_id=None, domain=None):
self.instance_id = instance_id # type: str
self.domain = domain # type: str
def validate(self):
pass
def to_map(self):
_map = super(DeleteDomainRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.domain is not None:
result['Domain'] = self.domain
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('Domain') is not None:
self.domain = m.get('Domain')
return self
class DeleteDomainResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(DeleteDomainResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class DeleteDomainResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: DeleteDomainResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DeleteDomainResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DeleteDomainResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DeleteInstanceRequest(TeaModel):
def __init__(self, instance_id=None, resource_group_id=None):
self.instance_id = instance_id # type: str
self.resource_group_id = resource_group_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(DeleteInstanceRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
return self
class DeleteInstanceResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(DeleteInstanceResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class DeleteInstanceResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: DeleteInstanceResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DeleteInstanceResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DeleteInstanceResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DeleteProtectionModuleRuleRequest(TeaModel):
def __init__(self, domain=None, defense_type=None, rule_id=None, instance_id=None):
self.domain = domain # type: str
self.defense_type = defense_type # type: str
self.rule_id = rule_id # type: long
self.instance_id = instance_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(DeleteProtectionModuleRuleRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.defense_type is not None:
result['DefenseType'] = self.defense_type
if self.rule_id is not None:
result['RuleId'] = self.rule_id
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('DefenseType') is not None:
self.defense_type = m.get('DefenseType')
if m.get('RuleId') is not None:
self.rule_id = m.get('RuleId')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
return self
class DeleteProtectionModuleRuleResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(DeleteProtectionModuleRuleResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class DeleteProtectionModuleRuleResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: DeleteProtectionModuleRuleResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DeleteProtectionModuleRuleResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DeleteProtectionModuleRuleResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeCertificatesRequest(TeaModel):
def __init__(self, instance_id=None, domain=None):
self.instance_id = instance_id # type: str
self.domain = domain # type: str
def validate(self):
pass
def to_map(self):
_map = super(DescribeCertificatesRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.domain is not None:
result['Domain'] = self.domain
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('Domain') is not None:
self.domain = m.get('Domain')
return self
class DescribeCertificatesResponseBodyCertificates(TeaModel):
def __init__(self, certificate_name=None, common_name=None, sans=None, is_using=None, certificate_id=None):
self.certificate_name = certificate_name # type: str
self.common_name = common_name # type: str
self.sans = sans # type: list[str]
self.is_using = is_using # type: bool
self.certificate_id = certificate_id # type: long
def validate(self):
pass
def to_map(self):
_map = super(DescribeCertificatesResponseBodyCertificates, self).to_map()
if _map is not None:
return _map
result = dict()
if self.certificate_name is not None:
result['CertificateName'] = self.certificate_name
if self.common_name is not None:
result['CommonName'] = self.common_name
if self.sans is not None:
result['Sans'] = self.sans
if self.is_using is not None:
result['IsUsing'] = self.is_using
if self.certificate_id is not None:
result['CertificateId'] = self.certificate_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('CertificateName') is not None:
self.certificate_name = m.get('CertificateName')
if m.get('CommonName') is not None:
self.common_name = m.get('CommonName')
if m.get('Sans') is not None:
self.sans = m.get('Sans')
if m.get('IsUsing') is not None:
self.is_using = m.get('IsUsing')
if m.get('CertificateId') is not None:
self.certificate_id = m.get('CertificateId')
return self
class DescribeCertificatesResponseBody(TeaModel):
def __init__(self, request_id=None, certificates=None):
self.request_id = request_id # type: str
self.certificates = certificates # type: list[DescribeCertificatesResponseBodyCertificates]
def validate(self):
if self.certificates:
for k in self.certificates:
if k:
k.validate()
def to_map(self):
_map = super(DescribeCertificatesResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
result['Certificates'] = []
if self.certificates is not None:
for k in self.certificates:
result['Certificates'].append(k.to_map() if k else None)
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
self.certificates = []
if m.get('Certificates') is not None:
for k in m.get('Certificates'):
temp_model = DescribeCertificatesResponseBodyCertificates()
self.certificates.append(temp_model.from_map(k))
return self
class DescribeCertificatesResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: DescribeCertificatesResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeCertificatesResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeCertificatesResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeCertMatchStatusRequest(TeaModel):
def __init__(self, domain=None, certificate=None, private_key=None, instance_id=None):
self.domain = domain # type: str
self.certificate = certificate # type: str
self.private_key = private_key # type: str
self.instance_id = instance_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(DescribeCertMatchStatusRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.certificate is not None:
result['Certificate'] = self.certificate
if self.private_key is not None:
result['PrivateKey'] = self.private_key
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('Certificate') is not None:
self.certificate = m.get('Certificate')
if m.get('PrivateKey') is not None:
self.private_key = m.get('PrivateKey')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
return self
class DescribeCertMatchStatusResponseBody(TeaModel):
def __init__(self, request_id=None, match_status=None):
self.request_id = request_id # type: str
self.match_status = match_status # type: bool
def validate(self):
pass
def to_map(self):
_map = super(DescribeCertMatchStatusResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.match_status is not None:
result['MatchStatus'] = self.match_status
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('MatchStatus') is not None:
self.match_status = m.get('MatchStatus')
return self
class DescribeCertMatchStatusResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: DescribeCertMatchStatusResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeCertMatchStatusResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeCertMatchStatusResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeDomainRequest(TeaModel):
def __init__(self, instance_id=None, domain=None):
self.instance_id = instance_id # type: str
self.domain = domain # type: str
def validate(self):
pass
def to_map(self):
_map = super(DescribeDomainRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.domain is not None:
result['Domain'] = self.domain
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('Domain') is not None:
self.domain = m.get('Domain')
return self
class DescribeDomainResponseBodyDomainCloudNativeInstancesProtocolPortConfigs(TeaModel):
def __init__(self, protocol=None, ports=None):
self.protocol = protocol # type: str
self.ports = ports # type: str
def validate(self):
pass
def to_map(self):
_map = super(DescribeDomainResponseBodyDomainCloudNativeInstancesProtocolPortConfigs, self).to_map()
if _map is not None:
return _map
result = dict()
if self.protocol is not None:
result['Protocol'] = self.protocol
if self.ports is not None:
result['Ports'] = self.ports
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Protocol') is not None:
self.protocol = m.get('Protocol')
if m.get('Ports') is not None:
self.ports = m.get('Ports')
return self
class DescribeDomainResponseBodyDomainCloudNativeInstances(TeaModel):
def __init__(self, protocol_port_configs=None, redirection_type_name=None, cloud_native_product_name=None,
instance_id=None, ipaddress_list=None):
self.protocol_port_configs = protocol_port_configs # type: list[DescribeDomainResponseBodyDomainCloudNativeInstancesProtocolPortConfigs]
self.redirection_type_name = redirection_type_name # type: str
self.cloud_native_product_name = cloud_native_product_name # type: str
self.instance_id = instance_id # type: str
self.ipaddress_list = ipaddress_list # type: str
def validate(self):
if self.protocol_port_configs:
for k in self.protocol_port_configs:
if k:
k.validate()
def to_map(self):
_map = super(DescribeDomainResponseBodyDomainCloudNativeInstances, self).to_map()
if _map is not None:
return _map
result = dict()
result['ProtocolPortConfigs'] = []
if self.protocol_port_configs is not None:
for k in self.protocol_port_configs:
result['ProtocolPortConfigs'].append(k.to_map() if k else None)
if self.redirection_type_name is not None:
result['RedirectionTypeName'] = self.redirection_type_name
if self.cloud_native_product_name is not None:
result['CloudNativeProductName'] = self.cloud_native_product_name
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.ipaddress_list is not None:
result['IPAddressList'] = self.ipaddress_list
return result
def from_map(self, m=None):
m = m or dict()
self.protocol_port_configs = []
if m.get('ProtocolPortConfigs') is not None:
for k in m.get('ProtocolPortConfigs'):
temp_model = DescribeDomainResponseBodyDomainCloudNativeInstancesProtocolPortConfigs()
self.protocol_port_configs.append(temp_model.from_map(k))
if m.get('RedirectionTypeName') is not None:
self.redirection_type_name = m.get('RedirectionTypeName')
if m.get('CloudNativeProductName') is not None:
self.cloud_native_product_name = m.get('CloudNativeProductName')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('IPAddressList') is not None:
self.ipaddress_list = m.get('IPAddressList')
return self
class DescribeDomainResponseBodyDomainLogHeaders(TeaModel):
def __init__(self, k=None, v=None):
self.k = k # type: str
self.v = v # type: str
def validate(self):
pass
def to_map(self):
_map = super(DescribeDomainResponseBodyDomainLogHeaders, self).to_map()
if _map is not None:
return _map
result = dict()
if self.k is not None:
result['k'] = self.k
if self.v is not None:
result['v'] = self.v
return result
def from_map(self, m=None):
m = m or dict()
if m.get('k') is not None:
self.k = m.get('k')
if m.get('v') is not None:
self.v = m.get('v')
return self
class DescribeDomainResponseBodyDomain(TeaModel):
def __init__(self, http_2port=None, cloud_native_instances=None, http_to_user_ip=None, http_port=None,
log_headers=None, is_access_product=None, access_headers=None, access_header_mode=None, https_redirect=None,
load_balancing=None, ip_follow_status=None, access_type=None, version=None, cluster_type=None, read_time=None,
write_time=None, resource_group_id=None, cname=None, source_ips=None, connection_time=None, https_port=None):
self.http_2port = http_2port # type: list[str]
self.cloud_native_instances = cloud_native_instances # type: list[DescribeDomainResponseBodyDomainCloudNativeInstances]
self.http_to_user_ip = http_to_user_ip # type: int
self.http_port = http_port # type: list[str]
self.log_headers = log_headers # type: list[DescribeDomainResponseBodyDomainLogHeaders]
self.is_access_product = is_access_product # type: int
self.access_headers = access_headers # type: list[str]
self.access_header_mode = access_header_mode # type: int
self.https_redirect = https_redirect # type: int
self.load_balancing = load_balancing # type: int
self.ip_follow_status = ip_follow_status # type: int
self.access_type = access_type # type: str
self.version = version # type: long
self.cluster_type = cluster_type # type: int
self.read_time = read_time # type: int
self.write_time = write_time # type: int
self.resource_group_id = resource_group_id # type: str
self.cname = cname # type: str
self.source_ips = source_ips # type: list[str]
self.connection_time = connection_time # type: int
self.https_port = https_port # type: list[str]
def validate(self):
if self.cloud_native_instances:
for k in self.cloud_native_instances:
if k:
k.validate()
if self.log_headers:
for k in self.log_headers:
if k:
k.validate()
def to_map(self):
_map = super(DescribeDomainResponseBodyDomain, self).to_map()
if _map is not None:
return _map
result = dict()
if self.http_2port is not None:
result['Http2Port'] = self.http_2port
result['CloudNativeInstances'] = []
if self.cloud_native_instances is not None:
for k in self.cloud_native_instances:
result['CloudNativeInstances'].append(k.to_map() if k else None)
if self.http_to_user_ip is not None:
result['HttpToUserIp'] = self.http_to_user_ip
if self.http_port is not None:
result['HttpPort'] = self.http_port
result['LogHeaders'] = []
if self.log_headers is not None:
for k in self.log_headers:
result['LogHeaders'].append(k.to_map() if k else None)
if self.is_access_product is not None:
result['IsAccessProduct'] = self.is_access_product
if self.access_headers is not None:
result['AccessHeaders'] = self.access_headers
if self.access_header_mode is not None:
result['AccessHeaderMode'] = self.access_header_mode
if self.https_redirect is not None:
result['HttpsRedirect'] = self.https_redirect
if self.load_balancing is not None:
result['LoadBalancing'] = self.load_balancing
if self.ip_follow_status is not None:
result['IpFollowStatus'] = self.ip_follow_status
if self.access_type is not None:
result['AccessType'] = self.access_type
if self.version is not None:
result['Version'] = self.version
if self.cluster_type is not None:
result['ClusterType'] = self.cluster_type
if self.read_time is not None:
result['ReadTime'] = self.read_time
if self.write_time is not None:
result['WriteTime'] = self.write_time
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
if self.cname is not None:
result['Cname'] = self.cname
if self.source_ips is not None:
result['SourceIps'] = self.source_ips
if self.connection_time is not None:
result['ConnectionTime'] = self.connection_time
if self.https_port is not None:
result['HttpsPort'] = self.https_port
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Http2Port') is not None:
self.http_2port = m.get('Http2Port')
self.cloud_native_instances = []
|
87baaf4a1b48fa248c65d26cc44e819a2ede1140
|
Python
|
<|begin_of_text|># Python library import
import asyncio, asyncssh, logging
# Module logging logger
log = logging.getLogger(__package__)
# Debug level
# logging.basicConfig(level=logging.WARNING)
# logging.basicConfig(level=logging.INFO)
logging.basicConfig(level=logging.DEBUG)
asyncssh.set_debug_level(2)
# Declaration of constant values
# Max data to read in read function
MAX_BUFFER_DATA = 65535
# Dictonary with all netmasks of IPv4
ipv4_netmask_list = {
"0.0.0.0": "0",
"128.0.0.0": "1",
"192.0.0.0": "2",
"224.0.0.0": "3",
"240.0.0.0": "4",
"248.0.0.0": "5",
"252.0.0.0": "6",
"254.0.0.0": "7",
"255.0.0.0": "8",
"255.128.0.0": "9",
"255.192.0.0": "10",
"255.224.0.0": "11",
"255.240.0.0": "12",
"255.248.0.0": "13",
"255.252.0.0": "14",
"255.254.0.0": "15",
"255.255.0.0": "16",
"255.255.128.0": "17",
"255.255.192.0": "18",
"255.255.224.0": "19",
"255.255.240.0": "20",
"255.255.248.0": "21",
"255.255.252.0": "22",
"255.255.254.0": "23",
"255.255.255.0": "24",
"255.255.255.128": "25",
"255.255.255.192": "26",
"255.255.255.224": "27",
"255.255.255.240": "28",
"255.255.255.248": "29",
"255.255.255.252": "30",
"255.255.255.254": "31",
"255.255.255.255": "32",
}
class NetworkDevice:
"""
Base class for network object
:param ip: IP address of a device
:type ip: str
:param username: Username used to connect to a device
:type username: str
:param password: Password used to connect to a device
:type password: str
:param device_type: Type of device used
:type device_type: str
:param port: TCP port used to connect a device. Default value is "22" for SSH
:type port: int, optional
:param timeout: TCP port used to connect a device. Default value is 10 seconds
:type timeout: int, optional
:param _protocol: Protocol used to connect a device. "ssh" or "telnet" are possible options. Default value is "ssh"
:type _protocol: str, optional
:param enable_mode: Enable mode for devices requiring it. Default value is "False"
:type enable_mode: bool, optional
:param enable_password: Enable password used for enable mode.
:type enable_password: str, optional
:param conn: Variable used for the management of the SSH connection
:type conn: SSHClientConnection object
:param _writer: Variable used for the management of the Telnet connection and writing channel
:type _writer: StreamWriter object
:param _reader: Variable used for the management of the Telnet reading channel
:type _reader: StreamReader object
:param possible_prompts: Used by the connect method to list all possible prompts of the device
:type possible_prompts: list
:param _connect_first_ending_prompt: Default possible ending prompts. Used only the time after login and password to discover the prompt
:type _connect_first_ending_prompt: list
:param list_of_possible_ending_prompts: Different strings at the end of a prompt the device can get. Used for detecting the prompt returned in sent commands
:type list_of_possible_ending_prompts: list
:param _telnet_connect_login: Login prompt for Telnet. Used to detect when a login is expected or when login and password access is failed
:type _telnet_connect_login: str
:param _telnet_connect_password: Password prompt for Telnet. Used to detect when a login is expected or when login and password access is failed
:type _telnet_connect_password: list
:param _telnet_connect_authentication_fail_prompt: Known failing messages or prompts when an authentication has failed. Used to get an answer faster than timeout events
:type _telnet_connect_authentication_fail_prompt: list
:param cmd_enable: Enable command for entering into enable mode
:type cmd_enable: str
:param cmd_disable_paging: Command used to disable paging on a device. That command is run at connection time
:type cmd_disable_paging: str
:param cmd_enter_config_mode: Command used to enter into a configuration mode on a device when this device support that feature.
:type cmd_enter_config_mode: str
:param cmd_exit_config_mode: Command used to leave a configuration mode on a device when this device support that feature.
:type cmd_exit_config_mode: str
:param cmd_get_version: API command used to get the software version of a device
:type cmd_get_version: str
:param cmd_get_hostname: API command used to get the hostname of a device
:type cmd_get_hostname: str
:param cmd_get_model: API command used to get the model of a device
:type cmd_get_model: str
:param cmd_get_serial_number: API command used to get the serial number of a device
:type cmd_get_serial_number: str
:param cmd_get_config: API command used to get the running configuration of a device
:type cmd_get_config: str
:param cmd_save_config: API command used to save the running configuration on the device
:type cmd_save_config: str
"""
def __init__(self, **kwargs):
# Display info message
log.info("__init__")
self.ip = ""
self.username = ""
self.password = ""
self.device_type = ""
self.port = 22
self.timeout = 10
self._protocol = "ssh"
self.enable_mode = False
self.enable_password = ""
self.conn = None
self._writer = None
self._reader = None
self.possible_prompts = []
self._connect_first_ending_prompt = ["#", ">"]
self.list_of_possible_ending_prompts = [
"(config-line)#",
"(config-if)#",
"(config)#",
">",
"#",
]
self._carriage_return_for_send_command = "\n"
self._send_command_error_in_returned_output = []
self._telnet_connect_login = "Username:"
self._telnet_connect_password = "Password:"
self._telnet_connect_authentication_fail_prompt = [":", "%"]
# General commands
self.cmd_enable = "enable"
self.cmd_disable_paging = "terminal length 0"
self.cmd_enter_config_mode = "configure terminal"
self.cmd_exit_config_mode = "exit"
self.cmd_get_version = "show version"
self.cmd_get_hostname = "show version | include uptime"
self.cmd_get_model = "show inventory"
self.cmd_get_serial_number = "show inventory | i SN"
self.cmd_get_config = "show running-config"
self.cmd_save_config = "write memory"
# Layer 1 commands
self.cmd_get_interfaces = [
"interface ethernet print terse without-paging",
"foreach i in=([/interface ethernet find]) do={/interface ethernet monitor $i once without-paging}",
"interface bridge port print terse without-paging",
]
self.cmd_set_interface = [
"interface ethernet enable <INTERFACE>",
"interface ethernet disable <INTERFACE>",
'interface ethernet comment <INTERFACE> "<COMMENT>"',
"interface ethernet set l2mtu=<MAXIMUMFRAMESIZE> <INTERFACE>",
"interface bridge port set frame-types=<MODE> ingress-filtering=<FILTERINGVLAN> [find interface=<INTERFACE>]",
]
# Layer 2 commands
self.cmd_get_mac_address_table = "interface bridge host print without-paging"
self.cmd_get_arp = "ip arp print terse without-paging"
self.cmd_get_lldp_neighbors = "ip neighbor print terse without-paging"
self.cmd_get_vlans = "interface bridge vlan print terse without-paging"
self.cmd_add_vlan = 'interface bridge vlan add vlan-ids=<VLAN> comment="<VLAN_NAME>" bridge=<BRIDGE>'
self.cmd_remove_vlan = "interface bridge vlan remove [find vlan-ids=<VLAN>]"
self.cmd_add_interface_to_vlan = [
"interface bridge vlan print terse",
"interface bridge vlan set [find vlan-ids=<VLAN>] untagged=<INTERFACE>",
"interface bridge vlan set [find vlan-ids=<VLAN>] tagged=<INTERFACE>",
"interface bridge port set [find interface=<INTERFACE>] pvid=<VLAN>",
]
self.cmd_remove_interface_from_vlan = [
"interface bridge vlan print terse",
"interface bridge vlan set [find vlan-ids=<VLAN>] untagged=<INTERFACE>",
"interface bridge vlan set [find vlan-ids=<VLAN>] tagged=<INTERFACE>",
"interface bridge port set [find interface=<INTERFACE>] pvid=<VLAN>",
]
# Layer 3 commands
self.cmd_get_routing_table = "ip route print without-paging terse"
self.cmd_get_interfaces_ip = "ip address print terse without-paging"
self.cmd_add_static_route = "ip route add dst-address=<NETWORK>/<PREFIXLENGTH> gateway=<DESTINATION> distance=<METRIC>"
self.cmd_remove_static_route = (
"ip route remove [find dst-address=<NETWORK>/<PREFIXLENGTH>]"
)
# Display info message
log.debug("__init__: kwargs: " + str(kwargs))
# Get information from dictionary
# "ip" found?
if "ip" in kwargs:
# Save "ip" parameter
self.ip = kwargs["ip"]
# Display info message
log.info("__init__: ip found: " + str(self.ip))
# "username" found?
if "username" in kwargs:
self.username = kwargs["username"]
# Display info message
log.info("__init__: username found: " + str(self.username))
# "password" found?
if "password" in kwargs:
self.password = kwargs["password"]
# Display info message
log.debug("__init__: password found: " + str(self.password))
# "device_type" found?
if "device_type" in kwargs:
self.device_type = kwargs["device_type"]
# Display info message
log.info("__init__: device_type found: " + str(self.device_type))
# "timeout" found?
if "timeout" in kwargs:
self.timeout = kwargs["timeout"]
# Display info message
log.info("__init__: timeout found: " + str(self.timeout))
# "protocol" found?
if "protocol" in kwargs:
self._protocol = kwargs["protocol"].lower()
# Display info message
log.info("__init__: protocol found: " + str(self._protocol))
# By default telnet port is 23
if self._protocol.lower() == "telnet":
self.port = 23
# "port" found?
if "port" in kwargs:
self.port = kwargs["port"]
# Display info message
log.info("__init__: port found: " + str(self.port))
# "enable_mode" found?
if "enable_mode" in kwargs:
self.enable_mode = kwargs["enable_mode"]
# Display info message
log.info("__init__: enable_mode found: " + str(self.enable_mode))
# "enable_password" found?
if "enable_password" in kwargs:
self.enable_password = kwargs["enable_password"]
# Display info message
log.info("__init__: enable_password found: " + str(self.enable_password))
async def __aenter__(self):
"""
Context manager opening connection
"""
try:
# Run an async method to connect a device
await self.connect()
except Exception:
# Disconnection (if needed) in case the connection is done but something failed
await self.disconnect()
# propagate exception if needed
raise
return self
# async def _aexit_(self, exc_type, exc_value, traceback):
async def __aexit__(self, exc_type, exc_value, traceback):
"""
Context manager closing connection
"""
# Close the connection
await self.disconnect()
def find_prompt(self, text):
"""
Method used to find a prompt inside an output string
This method is used during the first communication with the device.
First it find the prompt then caculate the different forms the prompt
can take. This will be useful later on while finding prompt in other
output stream (read).
:param text: data with a prompt
:type text: str
:return: the prompt found
:rtype: str
"""
# Get last line of the data
prompt = text.split("\n")[-1]
# Remove possible \r in the data
# prompt = prompt.replace("\r", "")
prompt = text.split("\r")[-1]
# Display info message
log.info(f"find_prompt: prompt: '{prompt}'")
# Get the possible prompts for future recognition
self.possible_prompts = self.get_possible_prompts(prompt)
# Return the prompt
return prompt
def get_possible_prompts(self, prompt):
"""
Method used to check if a prompt has one of the expected endings then
create a list with all possible prompts for the device
:param prompt: a prompt with a possible ending prompt (eg. "switch#")
:type prompt: str
:return: the list of prompts
:rtype: list
"""
# By default no prompts are returned
list_of_prompts = []
# Get all the ppossible values of the endings of the prompt
list_of_possible_ending_prompts = self.list_of_possible_ending_prompts
# Temporary variable storing the prompt value
my_prompt = prompt
# Test each possible prompt ending (i.e '#', '>', "(config-if)#", "(config)#")
for ending in list_of_possible_ending_prompts:
# Is this current prompt ending at the end of the prompt?
if my_prompt.endswith(ending):
# Yes
# Then remove the ending
my_prompt = my_prompt[: -len(ending)]
# Break the loop
break
# Prompt should be from "switch#" to "switch"
# Display info message
log.info(f"get_possible_prompts: prompt found: '{my_prompt}'")
# Display info message
log.info(f"get_possible_prompts: prompt found size: '{len(my_prompt)}'")
# Now create all the possible prompts for that device
for ending in list_of_possible_ending_prompts:
# Save the prompt name with a possible ending in the list
list_of_prompts.append(my_prompt + ending)
# Display info message
log.info(f"get_possible_prompts: list of possible prompts: {list_of_prompts}")
# Return the list of prompts
return list_of_prompts
def check_if_prompt_is_found(self, text):
"""
Method used to check if a prompt is detected inside a string
:param text: a string with prompt
:type text: str
:return: the prompt found
:rtype: str
"""
# By default the prompt is not found
prompt_found = False
# Check all possible prompts
for prompt in self.possible_prompts:
# Display info message
log.info(f"check_if_prompt_is_found: prompt: '{prompt}'")
# Is this prompt present in the text?
if prompt in text:
# Yes
prompt_found = True
# Display info message
log.info(f"check_if_prompt_is_found: prompt found: '{prompt}'")
# Leave the for loop
break
# Return the prompt found
return prompt_found
def remove_command_in_output(self, text, cmd):
"""
Method removing the command at the beginning of a string
After sending commands an "echo" of the command sent
is display in the output string. This method removes it.
:param text: the text with the command at the beginning
:type text: str
:param cmd: the command previously sent
:type cmd: str
:return: the output string without the command
:rtype: str
"""
# Display info message
log.info(f"remove_command_in_output: cmd = '{cmd}'")
# Display info message
log.info(f"remove_command_in_output: cmd (hex) = '{cmd.encode().hex()}'")
# Remove the command from the beginning of the output
# output = text.lstrip(cmd + "\n")
output = text.split(cmd + "\n")[-1]
# Display info message
log.info(f"remove_command_in_output: output = '{output}'")
# Return the string without the command
return output
def remove_starting_carriage_return_in_output(self, text):
"""
Method removing the carriage return at the beginning of a string
:param text: the text with the command at the beginning
:type text: str
:return: the output string without the starting carriage return
:rtype: str
"""
# Display info message
log.info("remove_starting_carriage_return_in_output")
# Remove the carriage return at the beginning of the string
output = text.lstrip("\r\n\r")
# Display info message
log.info(f"remove_starting_carriage_return_in_output: output = '{output}'")
# Return the string without the starting carriage return
return output
def remove_ending_prompt_in_output(self, text):
"""
Method removing the prompt at the end of a string
:param text: the text with a prompt at the beginning
:type text: str
:return: the output string without the ending prompt
:rtype: str
"""
# Display info message
log.info("remove_ending_prompt_in_output")
# Check all possible prompts
for prompt in self.possible_prompts:
# Display info message
log.info(f"remove_ending_prompt_in_output: prompt: '{prompt}'")
# Prompt found in the text?
if prompt in text:
# Yes
# Then it is removed from the text
# text = text.rstrip(prompt)
text = text[: -len(prompt)]
# Remove also carriage return
text = text.rstrip("\r\n")
# Leave the loop
break
# output = text.rstrip("\r\n" + self.prompt)
# Display info message
log.info(f"remove_ending_prompt_in_output: text without prompt:\n'{text}'")
# Return the text without prompt at the end
return text
def check_error_output(self, output):
"""
Check if an error is returned by the device ("% Unrecognized command", "% Ambiguous command", etc.)
If an error is found, then an exception is raised
"""
# Display info message
log.info("check_error_output")
# Check if output has some data
if output:
# Yes
# Display info message
log.info("check_error_output: output has some data")
# Check all elements in the list of output
for element in self._send_command_error_in_returned_output:
# Display info message
log.info(f"check_error_output: element: {element}")
# Display info message
log.info(f"check_error_output: output[0]: {output[0]}")
# Check if the output starts with a string with an error message (like "% Invalid input detected at '^' marker.")
# Error message?
if output.startswith(element):
# Yes
# Raise an exception
raise Exception(output)
def remove_ansi_escape_sequence(self, text):
"""
Method removing ANSI escape sequence from a string
Just CSI sequences are removed
:param text: the text with a prompt at the beginning
:type text: str
:return: the output string without the ending prompt
:rtype: str
"""
# By default no string returned
output = ""
# By default no escape sequence found
esc_found = 0
# Read char by char a string
for i in text:
# Display char
# log.info(f"{str(i).encode('ascii')}")
# No escape previously found?
if esc_found == 0:
# No escape sequence currently found
# Escape?
if i == "\x1b":
# Yes
log.info("Esc!")
# Escape found
esc_found = 1
else:
# No
# Then the current char can be saved
output += i
# Escape previously found?
elif esc_found == 1:
# Yes
# Then check if this is a CSI sequence
if i == "[":
# Beginning of CSI sequence
log.info("CSI sequence")
# CSI sequence
esc_found = 2
else:
# Another Escape sequence
# Keep the escape sequence in the string
output += "\x1b" + i
# No escape sequence next
esc_found = 0
else:
# Char between 'a' and 'z' or 'A' and 'Z'?
if (i >= "a" and i <= "z") or (i >= "A" and i <= "Z"):
# Yes
# Then it is the end of CSI escape sequence
log.info("End of escape sequence")
# No escape sequence next
esc_found = 0
# Return a string without ANSI escape sequence
return output
async def disable_paging(self):
"""
Async method disabling paging on a device
Use the "cmd_disable_paging" attribute
"""
# Display info message
log.info("disable_paging")
# Send command to the device to disable paging
await self.send_command(self.cmd_disable_paging)
async def connect(self):
"""
Async method used for connecting a device
Currently supported: SSH and Telnet
"""
# Display info message
log.info("connect")
try:
# SSH?
if self._protocol == "ssh":
# Yes
# Then Connect using SSH
await self.connectSSH()
# Telnet?
elif self._protocol == "telnet":
# Yes
# Then Connect using Telnet
await self.connectTelnet()
else:
# Unsupported protocol
# Raise an exception
raise Exception(f"connect: unsupported protocol: {self._protocol}")
except Exception:
# There was a problem with a connection method
# Display info message
log.info("connect: connection error")
raise
async def connectSSH(self):
"""
Async method used for connecting a device using SSH protocol
"""
# Display info message
log.info("connectSSH")
# Parameters of the connection
generator = asyncssh.connect(
self.ip,
username=self.username,
password=self.password,
known_hosts=None,
# encryption_algs="*", # Parameter that includes all encryption algorithms (even the old ones disabled by default)
encryption_algs=[
algs.decode("utf-8") for algs in asyncssh.encryption._enc_algs
], # Parameter that includes all encryption algorithms (even the old ones disabled by default)
)
# Trying to connect to the device
try:
self.conn = await asyncio.wait_for(generator, timeout=self.timeout)
except asyncio.exceptions.TimeoutError as error:
# Timeout
# Display error message
log.error(f"connectSSH: connection failed: {self.ip} timeout: '{error}'")
# Exception propagation
raise asyncio.exceptions.TimeoutError(
"Connection failed: connection timed out."
)
except Exception as error:
# Connection failed
# Display error message
log.error(f"connectSSH: connection failed: {self.ip} '{error}'")
# Exception propagation
raise
# Display info message
log.info("connectSSH: connection success")
# Create a session
self.stdinx, self.stdoutx, _ = await self.conn.open_session(term_type="netscud")
# Display info message
log.info("connectSSH: open_session success")
# By default no data has been read
data = ""
# By default no prompt found
prompt_not_found = True
try:
# Read data
while prompt_not_found:
# Display info message
log.info("connectSSH: beginning of the loop")
# Read the prompt
data += await asyncio.wait_for(
self.stdoutx.read(MAX_BUFFER_DATA), timeout=self.timeout
)
# Display info message
log.info(f"connectSSH: data: '{str(data)}'")
# Display info message
log.info(f"connectSSH: data: hex:'{data.encode('utf-8').hex()}'")
# Check if an initial prompt is found
for prompt in self._connect_first_ending_prompt:
# Ending prompt found?
if data.endswith(prompt):
# Yes
# Display info message
log.info(f"connectSSH: first ending prompt found: '{prompt}'")
# A ending prompt has been found
prompt_not_found = False
# Leave the loop
break
# Display info message
log.info("connectSSH: end of loop")
except Exception as error:
# Fail while reading the prompt
# Display error message
log.error(
f"connectSSH: timeout while reading the prompt: {self.ip} '{error}'"
)
# Exception propagation
raise
# Display info message
log.info(f"connectSSH: end of prompt loop")
# Remove possible escape sequence
data = self.remove_ansi_escape_sequence(data)
# Find prompt
self.prompt = self.find_prompt(str(data))
# Display info message
log.info(f"connectSSH: prompt found: '{self.prompt}'")
# Display info message
log.info(f"connectSSH: prompt found size: '{len(self.prompt)}'")
# Disable paging command available?
if self.cmd_disable_paging:
# Yes
# Disable paging
await self.disable_paging()
async def connectTelnet(self):
"""
Async method used for connecting a device using Telnet protocol
"""
# Display info message
log.info("connectTelnet")
try:
# Prepare connection with Telnet
conn = asyncio.open_connection(self.ip, self.port)
except Exception as error:
# Preparation to the connection failed
# Display error message
log.error(f"connectTelnet: preparation to the connection failed: '{error}'")
# Exception propagation
raise
# Display info message
log.info("connectTelnet: preparation to the connection success")
try:
# Connection with Telnet
self._reader, self._writer = await asyncio.wait_for(
conn, timeout=self.timeout
)
except asyncio.TimeoutError:
# Time out during connection
# Display error message
log.error("connectTelnet: connection: timeout")
# Exception propagation
raise
# Display info message
log.info("connectTelnet: connection success")
# Get prompt for the login
prompt = self._telnet_connect_login
# Get prompt for the password
prompt_password = self._telnet_connect_password
# By default a login is expected
use_login = True
# Temporary string variable
output = ""
# Temporary bytes variable
byte_data = b""
# Read the telnet information and first prompt (for login but a password prompt can be found for IOS for instance)
while True:
# Display info message
log.info(f"connectTelnet: read data for prompt")
# Read returned prompt
byte_data += await asyncio.wait_for(
self._reader.read(MAX_BUFFER_DATA), timeout=self.timeout
)
# Display info message
log.info(f"connectTelnet: byte_data: {byte_data}")
# Temporary convertion in string. This string has the following form: "b'....'"
output = str(byte_data)
# Display info message
log.info(f"connectTelnet: output: {output}")
# Prompt for the username found?
if prompt in output:
# Yes
# Leave the loop
break
# Prompt for the password found?
elif prompt_password in output:
# Yes
# That means only password is required
use_login = False
# Leave the loop
break
# Display info message
log.info(f"connectTelnet: login prompt: '{output}'")
# Login to use?
if use_login:
# Yes
# Display info message
log.info("connectTelnet: sending login")
try:
# Send login
await self.send_command(self.username, prompt_password)
# Display info message
log.info("connectTelnet: login sent")
except Exception:
# Problem with the login
# Propagate the exception
raise
# Display info message
log.info("connectTelnet: sending password")
try:
# Send password
output = await self.telnet_send_command_with_unexpected_pattern(
self.password,
self._connect_first_ending_prompt,
self._telnet_connect_authentication_fail_prompt,
)
except Exception:
# Problem with the password
# Propagate the exception
raise
# Display info message
log.info("connectTelnet: password sent")
# Find prompt
self.prompt = self.find_prompt(str(output))
# Display info message
log.info(f"connectTelnet: prompt found: '{self.prompt}'")
# Password enable?
if self.enable_mode:
# Yes
# Display info message
log.info("connectTelnet: enable mode to be activated")
try:
# Send enable command
await self.send_command(self.cmd_enable, prompt_password)
# Display info message
log.info("connectTelnet: enable command sent")
# Display info message
log.info("connectTelnet: sending enable password")
# Send enable password
await self.telnet_send_command_with_unexpected_pattern(
self.enable_password,
self._connect_first_ending_prompt,
self._telnet_connect_authentication_fail_prompt,
)
# Display info message
log.info("connectTelnet: enable password sent")
except Exception:
# Problem with the enable password
# Display info message
log.info("connectTelnet: enable password failure")
# Propagate the exception
raise
# Disable paging command available?
if self.cmd_disable_paging:
# Yes
# Disable paging
await self.disable_paging()
async def disconnect(self):
"""
Async method used to disconnect a device
If this method is not used then exceptions will happen
when the program will end
"""
# Debug info message
log.info("disconnect")
# SSH?
if self._protocol == "ssh":
# Yes
# Then disconnect using SSH
await self.disconnectSSH()
# Telnet?
elif self._protocol == "telnet":
# Yes
# Then disconnect using Telnet
await self.disconnectTelnet()
else:
# Unsupported protocol
# Raise an exception
raise Exception(f"Unsupported protocol: {self._protocol}")
async def disconnectSSH(self):
"""
Async method used to disconnect a device in SSH
If this method is not used then exceptions will happen
when the program will end
"""
# Debug info message
log.info("disconnectSSH")
# Connection previously open in SSH?
if self.conn:
# Yes
# Then close the SSH connection
self.conn.close()
# No more connection to disconnect
self.conn = None
async def disconnectTelnet(self):
"""
Async method used to disconnect a device in Telnet
If this method is not used then exceptions will happen
when the program will end
"""
# Debug info message
log.info("disconnectTelnet")
# Connection previously open in Telnet?
if self._writer:
# Yes
# Then close the SSH connection
self._writer.close()
# No more connection to disconnect
self._writer = None
async def send_command(self, cmd, pattern=None, timeout=None):
"""
Async method used to send data to a device
:param cmd: command to send
:type cmd: str
:param pattern: optional, a pattern replacing the prompt when the prompt is not expected
:type pattern: str
:param timeout: optional, a timeout for the command sent. Default value is self.timeout
:type timeout: str
:return: the output of command
:rtype: str
"""
# Debug info message
log.info("send_command")
# Default value of timeout variable
if timeout is None:
timeout = self.timeout
# SSH?
if self._protocol == "ssh":
# Yes
# Then disconnect using SSH
output = await self.send_commandSSH(cmd, pattern=pattern, timeout=timeout)
# Telnet?
elif self._protocol == "telnet":
# Yes
# Then disconnect using Telnet
output = await self.send_commandTelnet(
cmd, pattern=pattern, timeout=timeout
)
else:
# Unsupported protocol
# Raise an exception
raise Exception(f"send_command: unsupported protocol: {self._protocol}")
# Return the result of the command
return output
async def send_commandSSH(self, cmd, pattern=None, timeout=None):
"""
Async method used to send data to a device
:param cmd: command to send
:type cmd: str
:param pattern: optional, a pattern replacing the prompt when the prompt is not expected
:type pattern: str
:param timeout: optional, a timeout for the command sent. Default value is self.timeout
:type timeout: str
:return: the output of command
:rtype: str
"""
# Debug info message
log.info("send_commandSSH")
# Default value of timeout variable
if timeout is None:
timeout = self.timeout
# Add carriage return at the end of the command (mandatory to send the command)
# cmd = cmd + "\n"
# cmd = cmd + "\r\n"
# Debug info message
log.info(f"send_commandSSH: cmd = '{cmd}'")
# Sending command
self.stdinx.write(cmd + self._carriage_return_for_send_command)
# Display message
log.info("send_commandSSH: command sent")
# Variable used to gather data
output = ""
# Reading data
while True:
# await asyncio.sleep(1)
# Read the data received
output += await asyncio.wait_for(
self.stdoutx.read(MAX_BUFFER_DATA), timeout=timeout
)
# Debug info message
# log.info(f"send_commandSSH: output hex: '{str(output).encode("utf-8").hex()}'")
# Remove ANSI escape sequence
output = self.remove_ansi_escape_sequence(output)
# Remove possible "\r"
output = output.replace("\r", "")
# data = ""
# for i in output:
# data += i.encode("utf-8").hex()
# print(data)
# Debug info message
log.info(f"send_commandSSH: output: '{output}'")
# Is a patten used?
if pattern:
# Use pattern instead of prompt
if pattern in output:
# Yes
# Leave the loop
break
else:
# Check if prompt is found
if self.check_if_prompt_is_found(output):
# Yes
# Leave the loop
break
# Debug info message
log.debug(
f"send_commandSSH: raw output: '{output}'\nsend_commandSSH: raw output (hex): '{output.encode().hex()}'"
)
# Remove the command sent from the result of the command
output = self.remove_command_in_output(output, str(cmd))
# Remove the carriage return of the output
output = self.remove_starting_carriage_return_in_output(output)
# Remove the ending prompt of the output
output = self.remove_ending_prompt_in_output(output)
# Debug info message
log.debug(
f"send_commandSSH: cleaned output: '{output}'\nsend_commandSSH: cleaned output (hex): '{output.encode().hex()}'"
)
# Check if there is an error in the output string (like "% Unrecognized command")
# and generate an exception if needed
self.check_error_output(output)
# Return the result of the command
return output
async def send_commandTelnet(self, cmd, pattern=None, timeout=None):
"""
Async method used to send data to a device
:param cmd: command to send
:type cmd: str
:param pattern: optional, a pattern replacing the prompt when the prompt is not expected
:type pattern: str
:param timeout: optional, a timeout for the command sent. Default value is self.timeout
:type timeout: str
:return: the output of command
:rtype: str
"""
# Debug info message
log.info("send_commandTelnet")
# Default value of timeout variable
if timeout is None:
timeout = self.timeout
# Add carriage return at the end of the command (mandatory to send the command)
cmd = cmd + "\n"
# Sending command
self._writer.write(cmd.encode())
# Temporary string variable
output = ""
# Temporary bytes variable
byte_data = b""
try:
# Read data
while True:
# Read returned prompt
byte_data += await asyncio.wait_for(
self._reader.read(MAX_BUFFER_DATA), timeout=timeout
)
# Display info message
log.info(f"send_commandTelnet: byte_data: '{byte_data}'")
# Temporary convertion in string. This string has the following form: "b'....'"
output = str(byte_data)
# Display info message
log.info(f"send_commandTelnet: output: '{output}'")
# Is a patten used?
if pattern:
# Use pattern instead of prompt
if pattern in output:
# Yes
# Leave the loop
break
else:
# Check if prompt is found
if self.check_if_prompt_is_found(output):
# Yes
# Leave the loop
break
except asyncio.TimeoutError:
# Time out during when reading prompt
# Display error message
log.error("send_commandTelnet: connection: timeout")
# Exception propagation
raise
except Exception as error:
# Error during when reading prompt
# Display error message
log.error(f"send_commandTelnet: error: {error}")
# Exception propagation
raise
# Convert data (bytes) into string
output = byte_data.decode("utf-8", "ignore")
# Debug info message
log.debug(
f"send_commandTelnet: raw output: '{output}'\nsend_commandTelnet: raw output (hex): '{output.encode().hex()}'"
)
# Remove the command sent from the result of the command
output = self.remove_command_in_output(output, str(cmd))
# Remove the carriage return of the output
output = self.remove_starting_carriage_return_in_output(output)
# Remove the ending prompt of the output
output = self.remove_ending_prompt_in_output(output)
# Debug info message
log.debug(
f"send_commandTelnet: cleaned output: '{output}'\nsend_commandTelnet: cleaned output (hex): '{output.encode().hex()}'"
)
# Check if there is an error in the output string (like "% Unrecognized command")
# and generate an exception if needed
self.check_error_output(output)
# Return the result of the command
return output
async def telnet_send_command_with_unexpected_pattern(
self, cmd, pattern, error_pattern=None, timeout=None
):
"""
Async method used to send command for Telnet connection to a device with possible unexpected patterns
send_command can wait till time out if login and password are wrong. This method
speed up the returned error message when authentication failed is identified.
This method is limited to authentication whem password is required
:param cmd: command to send
:type cmd: str
:param pattern: optional, a list of patterns located at the very end of the a returned string. Can be used
to define a custom or unexpected prompt a the end of a string
:type pattern: str
:param timeout: optional, a timeout for the command sent. Default value is self.timeout
:type timeout: str
:param error_pattern: optional, a list of failed prompts found when the login and password are not correct
:type error_pattern: str
:return: the output of command
:rtype: str
"""
# Debug info message
log.info("telnet_send_command_with_unexpected_pattern")
# Default value of timeout variable
if timeout is None:
timeout = self.timeout
# Add carriage return at the end of the command (mandatory to send the command)
cmd = cmd + self._carriage_return_for_send_command
# Sending command
self._writer.write(cmd.encode())
# Temporary string variable
output = ""
# Temporary bytes variable
byte_data = b""
# By default pattern is not found
pattern_not_found = True
try:
# Read data
while pattern_not_found:
# Read returned prompt
byte_data += await asyncio.wait_for(
self._reader.read(MAX_BUFFER_DATA), timeout=timeout
)
# Display info message
log.info(
f"telnet_send_command_with_unexpected_pattern: byte_data: '{byte_data}'"
)
# Display debug message
log.debug(
f"telnet_send_command_with_unexpected_pattern: byte_data: hex: '{byte_data.hex()}'"
)
# Temporary convertion in string. This string has the following form: "b'....'"
output = str(byte_data)
# Display info message
log.info(
f"telnet_send_command_with_unexpected_pattern: output: '{output}'"
)
# Is a pattern used?
if pattern:
# Check all pattern of prompt in the output
for prompt in pattern:
# Display info message
log.info(
f"telnet_send_command_with_unexpected_pattern: checking prompt: '{prompt}'"
)
# A pattern found?
if prompt in output:
# Yes
# A pattern is found. The main loop can be stopped
pattern_not_found = False
# Display info message
log.info(
f"telnet_send_command_with_unexpected_pattern: prompt found: '{prompt}'"
)
# Leave the loop
break
# Is an unexpected pattern used?
if error_pattern and pattern_not_found:
# Check all unexpected pattern of prompt in the output
for bad_prompt in error_pattern:
# Display info message
log.info(
f"telnet_send_command_with_unexpected_pattern: checking unexpected prompt: '{bad_prompt}'"
)
# An error_pattern pattern found?
if bad_prompt in output:
# Yes
# Display error message
log.error(
"telnet_send_command_with_unexpected_pattern: authentication failed"
)
# Raise exception
raise Exception(
"telnet_send_command_with_unexpected_pattern: authentication failed"
)
# Leave the loop
# break
except asyncio.TimeoutError:
# Time out during when reading prompt
# Close the connection in order to not display RuntimeError
await self.disconnect()
# Display error message
log.error(
"telnet_send_command_with_unexpected_pattern: reading prompt: timeout"
)
# Exception propagation
raise
except Exception as error:
# Error during when reading prompt
# Close the connection in order to not display RuntimeError
await self.disconnect()
# Display error message
log.error(
f"telnet_send_command_with_unexpected_pattern: reading prompt: error: {error}"
)
# Exception propagation
raise
# Convert data (bytes) into string
output = byte_data.decode("utf-8", "ignore")
# Debug info message
log.debug(
f"telnet_send_command_with_unexpected_pattern: raw output: '{output}'\ntelnet_send_command_with_unexpected_pattern: raw output (hex): '{output.encode().
|
32e904a39d03d3166369420b49db0b9b118110a3
|
Python
|
<|begin_of_text|>import hashlib
import json
import logging
import os
import urllib.parse
import uuid
from datetime import datetime
import pytz
from celery import states as celery_states
from django.conf import settings
from django.contrib.auth.base_user import AbstractBaseUser
from django.contrib.auth.base_user import BaseUserManager
from django.contrib.auth.models import PermissionsMixin
from django.contrib.sessions.models import Session
from django.core.cache import cache
from django.core.exceptions import MultipleObjectsReturned
from django.core.exceptions import ObjectDoesNotExist
from django.core.exceptions import PermissionDenied
from django.core.exceptions import ValidationError
from django.core.files.storage import default_storage
from django.core.files.storage import FileSystemStorage
from django.core.mail import send_mail
from django.core.validators import MaxValueValidator
from django.core.validators import MinValueValidator
from django.db import IntegrityError
from django.db import models
from django.db.models import Count
from django.db.models import Exists
from django.db.models import F
from django.db.models import Index
from django.db.models import JSONField
from django.db.models import Max
from django.db.models import OuterRef
from django.db.models import Q
from django.db.models import Subquery
from django.db.models import Sum
from django.db.models import UUIDField as DjangoUUIDField
from django.db.models import Value
from django.db.models.expressions import ExpressionList
from django.db.models.expressions import RawSQL
from django.db.models.functions import Lower
from django.db.models.indexes import IndexExpression
from django.db.models.query_utils import DeferredAttribute
from django.db.models.sql import Query
from django.dispatch import receiver
from django.utils import timezone
from django.utils.translation import gettext as _
from django_celery_results.models import TaskResult
from django_cte import With
from le_utils import proquint
from le_utils.constants import content_kinds
from le_utils.constants import exercises
from le_utils.constants import file_formats
from le_utils.constants import format_presets
from le_utils.constants import languages
from le_utils.constants import roles
from model_utils import FieldTracker
from mptt.models import MPTTModel
from mptt.models import raise_if_unsaved
from mptt.models import TreeForeignKey
from postmark.core import PMMailInactiveRecipientException
from postmark.core import PMMailUnauthorizedException
from rest_framework.authtoken.models import Token
from rest_framework.fields import get_attribute
from rest_framework.utils.encoders import JSONEncoder
from contentcuration.constants import channel_history
from contentcuration.constants import completion_criteria
from contentcuration.constants import user_history
from contentcuration.constants.contentnode import kind_activity_map
from contentcuration.db.models.expressions import Array
from contentcuration.db.models.functions import ArrayRemove
from contentcuration.db.models.functions import Unnest
from contentcuration.db.models.manager import CustomContentNodeTreeManager
from contentcuration.db.models.manager import CustomManager
from contentcuration.statistics import record_channel_stats
from contentcuration.utils.cache import delete_public_channel_cache_keys
from contentcuration.utils.parser import load_json_string
from contentcuration.viewsets.sync.constants import ALL_CHANGES
from contentcuration.viewsets.sync.constants import ALL_TABLES
EDIT_ACCESS = "edit"
VIEW_ACCESS = "view"
DEFAULT_CONTENT_DEFAULTS = {
'license': None,
'language': None,
'author': None,
'aggregator': None,
'provider': None,
'copyright_holder': None,
'license_description': None,
'mastery_model': exercises.NUM_CORRECT_IN_A_ROW_5,
'm_value': 5,
'n_value': 5,
'auto_derive_video_thumbnail': True,
'auto_derive_audio_thumbnail': True,
'auto_derive_document_thumbnail': True,
'auto_derive_html5_thumbnail': True,
'auto_derive_exercise_thumbnail': True,
'auto_randomize_questions': True,
}
DEFAULT_USER_PREFERENCES = json.dumps(DEFAULT_CONTENT_DEFAULTS, ensure_ascii=False)
def to_pk(model_or_pk):
if isinstance(model_or_pk, models.Model):
return model_or_pk.pk
return model_or_pk
class UserManager(BaseUserManager):
def create_user(self, email, first_name, last_name, password=None):
if not email:
raise ValueError('Email address not specified')
new_user = self.model(
email=self.normalize_email(email),
)
new_user.set_password(password)
new_user.first_name = first_name
new_user.last_name = last_name
new_user.save(using=self._db)
return new_user
def create_superuser(self, email, first_name, last_name, password=None):
new_user = self.create_user(email, first_name, last_name, password=password)
new_user.is_admin = True
new_user.save(using=self._db)
return new_user
class UniqueActiveUserIndex(Index):
def create_sql(self, model, schema_editor, using='', **kwargs):
"""
This is a vendored and modified version of the Django create_sql method
We do this so that we can monkey patch in the unique index statement onto the schema_editor
while we create the statement for this index, and then revert it to normal.
We should remove this as soon as Django natively supports UniqueConstraints with Expressions.
This should hopefully be the case in Django 3.3.
"""
include = [model._meta.get_field(field_name).column for field_name in self.include]
condition = self._get_condition_sql(model, schema_editor)
if self.expressions:
index_expressions = []
for expression in self.expressions:
index_expression = IndexExpression(expression)
index_expression.set_wrapper_classes(schema_editor.connection)
index_expressions.append(index_expression)
expressions = ExpressionList(*index_expressions).resolve_expression(
Query(model, alias_cols=False),
)
fields = None
col_suffixes = None
else:
fields = [
model._meta.get_field(field_name)
for field_name, _ in self.fields_orders
]
col_suffixes = [order[1] for order in self.fields_orders]
expressions = None
sql = "CREATE UNIQUE INDEX %(name)s ON %(table)s (%(columns)s)%(include)s%(condition)s"
# Store the normal SQL statement for indexes
old_create_index_sql = schema_editor.sql_create_index
# Replace it with our own unique index so that this index actually adds a constraint
schema_editor.sql_create_index = sql
# Generate the SQL staetment that we want to return
return_statement = schema_editor._create_index_sql(
model, fields=fields, name=self.name, using=using,
db_tablespace=self.db_tablespace, col_suffixes=col_suffixes,
opclasses=self.opclasses, condition=condition, include=include,
expressions=expressions, **kwargs,
)
# Reinstate the previous index SQL statement so that we have done no harm
schema_editor.sql_create_index = old_create_index_sql
# Return our SQL statement
return return_statement
class User(AbstractBaseUser, PermissionsMixin):
email = models.EmailField(max_length=100, unique=True)
first_name = models.CharField(max_length=100)
last_name = models.CharField(max_length=100)
is_admin = models.BooleanField(default=False)
is_active = models.BooleanField('active', default=False,
help_text='Designates whether this user should be treated as active.')
is_staff = models.BooleanField('staff status', default=False,
help_text='Designates whether the user can log into this admin site.')
date_joined = models.DateTimeField('date joined', default=timezone.now)
clipboard_tree = models.ForeignKey('ContentNode', null=True, blank=True, related_name='user_clipboard', on_delete=models.SET_NULL)
preferences = models.TextField(default=DEFAULT_USER_PREFERENCES)
disk_space = models.FloatField(default=524288000, help_text='How many bytes a user can upload')
disk_space_used = models.FloatField(default=0, help_text='How many bytes a user has uploaded')
information = JSONField(null=True)
content_defaults = JSONField(default=dict)
policies = JSONField(default=dict, null=True)
feature_flags = JSONField(default=dict, null=True)
deleted = models.BooleanField(default=False, db_index=True)
_field_updates = FieldTracker(fields=[
# Field to watch for changes
"disk_space",
])
objects = UserManager()
USERNAME_FIELD = 'email'
REQUIRED_FIELDS = ['first_name', 'last_name']
def __unicode__(self):
return self.email
def delete(self):
"""
Soft deletes the user account.
"""
self.deleted = True
# Deactivate the user to disallow authentication and also
# to let the user verify the email again after recovery.
self.is_active = False
self.save()
self.history.create(user_id=self.pk, action=user_history.DELETION)
def recover(self):
"""
Use this method when we want to recover a user.
"""
self.deleted = False
self.save()
self.history.create(user_id=self.pk, action=user_history.RECOVERY)
def hard_delete_user_related_data(self):
"""
Hard delete all user related data. But keeps the user record itself intact.
User related data that gets hard deleted are:
- sole editor non-public channels.
- sole editor non-public channelsets.
- sole editor non-public channels' content nodes and its underlying files that are not
used by any other channel.
- all user invitations.
"""
from contentcuration.viewsets.common import SQCount
# Hard delete invitations associated to this account.
self.sent_to.all().delete()
self.sent_by.all().delete()
editable_channels_user_query = (
User.objects.filter(editable_channels__id=OuterRef('id'))
.values_list('id', flat=True)
.distinct()
)
non_public_channels_sole_editor = self.editable_channels.annotate(num_editors=SQCount(
editable_channels_user_query, field="id")).filter(num_editors=1, public=False)
# Point sole editor non-public channels' contentnodes to orphan tree to let
# our garbage collection delete the nodes and underlying files.
ContentNode._annotate_channel_id(ContentNode.objects).filter(channel_id__in=list(
non_public_channels_sole_editor.values_list("id", flat=True))).update(parent_id=settings.ORPHANAGE_ROOT_ID)
# Hard delete non-public channels associated with this user (if user is the only editor).
non_public_channels_sole_editor.delete()
# Hard delete non-public channel collections associated with this user (if user is the only editor).
user_query = (
User.objects.filter(channel_sets__id=OuterRef('id'))
.values_list('id', flat=True)
.distinct()
)
self.channel_sets.annotate(num_editors=SQCount(user_query, field="id")).filter(num_editors=1, public=False).delete()
# Create history!
self.history.create(user_id=self.pk, action=user_history.RELATED_DATA_HARD_DELETION)
def can_edit(self, channel_id):
return Channel.filter_edit_queryset(Channel.objects.all(), self).filter(pk=channel_id).exists()
def check_space(self, size, checksum):
if self.is_admin:
return True
active_files = self.get_user_active_files()
if active_files.filter(checksum=checksum).exists():
return True
space = self.get_available_space(active_files=active_files)
if space < size:
raise PermissionDenied(_("Not enough space. Check your storage under Settings page."))
def check_channel_space(self, channel):
active_files = self.get_user_active_files()
staging_tree_id = channel.staging_tree.tree_id
channel_files = self.files\
.filter(contentnode__tree_id=staging_tree_id)\
.values('checksum')\
.distinct()\
.exclude(checksum__in=active_files.values_list('checksum', flat=True))
staged_size = float(channel_files.aggregate(used=Sum('file_size'))['used'] or 0)
if self.get_available_space(active_files=active_files) < (staged_size):
raise PermissionDenied(_('Out of storage! Request more space under Settings > Storage.'))
def check_staged_space(self, size, checksum):
if self.staged_files.filter(checksum=checksum).exists():
return True
space = self.get_available_staged_space()
if space < size:
raise PermissionDenied(_('Out of storage! Request more space under Settings > Storage.'))
def get_available_staged_space(self):
space_used = self.staged_files.values('checksum').distinct().aggregate(size=Sum("file_size"))['size'] or 0
return float(max(self.disk_space - space_used, 0))
def get_available_space(self, active_files=None):
return float(max(self.disk_space - self.get_space_used(active_files=active_files), 0))
def get_user_active_trees(self):
return self.editable_channels.exclude(deleted=True)\
.values(tree_id=F("main_tree__tree_id"))
def get_user_active_files(self):
cte = With(self.get_user_active_trees().distinct())
return cte.join(self.files.get_queryset(), contentnode__tree_id=cte.col.tree_id)\
.with_cte(cte)\
.values('checksum')\
.distinct()
def get_space_used(self, active_files=None):
active_files = active_files or self.get_user_active_files()
files = active_files.aggregate(total_used=Sum('file_size'))
return float(files['total_used'] or 0)
def set_space_used(self):
self.disk_space_used = self.get_space_used()
self.save()
return self.disk_space_used
def get_space_used_by_kind(self):
active_files = self.get_user_active_files()
files = active_files.values('preset__kind_id')\
.annotate(space=Sum('file_size'))\
.order_by()
kind_dict = {}
for item in files:
kind_dict[item['preset__kind_id']] = item['space']
return kind_dict
def email_user(self, subject, message, from_email=None, **kwargs):
try:
# msg = EmailMultiAlternatives(subject, message, from_email, [self.email])
# msg.attach_alternative(kwargs["html_message"],"text/html")
# msg.send()
send_mail(subject, message, from_email, [self.email], **kwargs)
except (PMMailInactiveRecipientException, PMMailUnauthorizedException) as e:
logging.error(str(e))
def clean(self):
super(User, self).clean()
self.email = self.__class__.objects.normalize_email(self.email)
def get_full_name(self):
"""
Returns the first_name plus the last_name, with a space in between.
"""
full_name = '%s %s' % (self.first_name, self.last_name)
return full_name.strip()
def get_short_name(self):
"""
Returns the short name for the user.
"""
return self.first_name
def get_token(self):
token, _ = Token.objects.get_or_create(user=self)
return token.key
def save(self, *args, **kwargs):
from contentcuration.utils.user import calculate_user_storage
super(User, self).save(*args, **kwargs)
if 'disk_space' in self._field_updates.changed():
calculate_user_storage(self.pk)
changed = False
if not self.content_defaults:
self.content_defaults = DEFAULT_CONTENT_DEFAULTS
changed = True
if not self.clipboard_tree:
self.clipboard_tree = ContentNode.objects.create(title=self.email + " clipboard", kind_id=content_kinds.TOPIC)
self.clipboard_tree.save()
changed = True
if changed:
self.save()
class Meta:
verbose_name = "User"
verbose_name_plural = "Users"
indexes = [
UniqueActiveUserIndex(Lower('email'), condition=Q(is_active=True), name="contentcura_email_d4d492_idx")
]
@classmethod
def filter_view_queryset(cls, queryset, user):
if user.is_anonymous:
return queryset.none()
if user.is_admin:
return queryset
# all shared editors
all_editable = User.editable_channels.through.objects.all()
editable = all_editable.filter(
channel_id__in=all_editable.filter(user_id=user.pk).values_list("channel_id", flat=True)
)
# all shared viewers
all_view_only = User.view_only_channels.through.objects.all()
view_only = all_view_only.filter(
channel_id__in=all_view_only.filter(user_id=user.pk).values_list("channel_id", flat=True)
)
return queryset.filter(
Q(pk=user.pk)
| Q(pk__in=editable.values_list("user_id", flat=True))
| Q(pk__in=view_only.values_list("user_id", flat=True))
)
@classmethod
def filter_edit_queryset(cls, queryset, user):
if user.is_anonymous:
return queryset.none()
if user.is_admin:
return queryset
return queryset.filter(pk=user.pk)
@classmethod
def get_for_email(cls, email, deleted=False, **filters):
"""
Returns the appropriate User record given an email, ordered by:
- those with is_active=True first, which there should only ever be one
- otherwise by ID DESC so most recent inactive shoud be returned
Filters out deleted User records by default. To include both deleted and
undeleted user records pass None to the deleted argument.
:param email: A string of the user's email
:param filters: Additional filters to filter the User queryset
:return: User or None
"""
user_qs = User.objects.filter(email__iexact=email.strip())
if deleted is not None:
user_qs = user_qs.filter(deleted=deleted)
return user_qs.filter(**filters).order_by("-is_active", "-id").first()
class UUIDField(models.CharField):
def __init__(self, *args, **kwargs):
kwargs['max_length'] = 32
super(UUIDField, self).__init__(*args, **kwargs)
def prepare_value(self, value):
if isinstance(value, uuid.UUID):
return value.hex
return value
def get_default(self):
result = super(UUIDField, self).get_default()
if isinstance(result, uuid.UUID):
result = result.hex
return result
def to_python(self, value):
if isinstance(value, uuid.UUID):
return value.hex
return value
class MPTTTreeIDManager(models.Model):
"""
Because MPTT uses plain integers for tree IDs and does not use an auto-incrementing field for them,
the same ID can sometimes be assigned to two trees if two channel create ops happen concurrently.
As we are using this table only for the ID generation, it does not need any fields.
We resolve this by creating a dummy table and using its ID as the tree index to take advantage of the db's
concurrency-friendly way of generating sequential integer IDs. There is a custom migration that ensures
that the number of records (and thus id) matches the max tree ID number when this table gets added.
"""
def file_on_disk_name(instance, filename):
"""
Create a name spaced file path from the File obejct's checksum property.
This path will be used to store the content copy
:param instance: File (content File model)
:param filename: str
:return: str
"""
return generate_file_on_disk_name(instance.checksum, filename)
def generate_file_on_disk_name(checksum, filename):
""" Separated from file_on_disk_name to allow for simple way to check if has already exists """
h = checksum
basename, ext = os.path.splitext(filename)
directory = os.path.join(settings.STORAGE_ROOT, h[0], h[1])
if not os.path.exists(directory):
os.makedirs(directory)
return os.path.join(directory, h + ext.lower())
def object_storage_name(instance, filename):
"""
Create a name spaced file path from the File obejct's checksum property.
This path will be used to store the content copy
:param instance: File (content File model)
:param filename: str
:return: str
"""
default_ext = ''
if instance.file_format_id:
default_ext = '.{}'.format(instance.file_format_id)
return generate_object_storage_name(instance.checksum, filename, default_ext)
def generate_object_storage_name(checksum, filename, default_ext=''):
""" Separated from file_on_disk_name to allow for simple way to check if has already exists """
h = checksum
basename, actual_ext = os.path.splitext(filename)
ext = actual_ext if actual_ext else default_ext
# Use / instead of os.path.join as Windows makes this \\
directory = "/".join([settings.STORAGE_ROOT, h[0], h[1]])
return os.path.join(directory, h + ext.lower())
def generate_storage_url(filename, request=None, *args):
"""
Generate a storage URL for the given content filename.
"""
path = generate_object_storage_name(os.path.splitext(filename)[0], filename)
# There are three scenarios where Studio might be run as:
#
# 1. In normal kubernetes, nginx will proxy for us. We'll know we're in kubernetes when the
# environment variable RUN_MODE=k8s
#
# 2. In Docker Compose and bare metal runserver, we'll be running in runserver, and minio
# will be exposed in port 9000 in the host's localhost network.
# Note (aron): returning the true storage URL (e.g. https://storage.googleapis.com/storage/a.mp4)
# isn't too important, because we have CDN in front of our servers, so it should be cached.
# But change the logic here in case there is a potential for bandwidth and latency improvement.
# Detect our current state first
run_mode = os.getenv("RUN_MODE")
# if we're running inside k8s, then just serve the normal /content/{storage,databases} URL,
# and let nginx handle proper proxying.
if run_mode == "k8s":
url = "/content/{path}".format(
path=path,
)
# if we're in docker-compose or in baremetal, just return the object storage URL as localhost:9000
elif run_mode == "docker-compose" or run_mode is None:
# generate the minio storage URL, so we can get the GET parameters that give everyone
# access even if they don't need to log in
params = urllib.parse.urlparse(default_storage.url(path)).query
host = "localhost"
port = 9000 # hardcoded to the default minio IP address
url = "http://{host}:{port}/{bucket}/{path}?{params}".format(
host=host,
port=port,
bucket=settings.AWS_S3_BUCKET_NAME,
path=path,
params=params,
)
return url
class FileOnDiskStorage(FileSystemStorage):
"""
Overrider FileSystemStorage's default save method to ignore duplicated file.
"""
def get_available_name(self, name):
return name
def _save(self, name, content):
if self.exists(name):
# if the file exists, do not call the superclasses _save method
logging.warn('Content copy "%s" already exists!' % name)
return name
return super(FileOnDiskStorage, self)._save(name, content)
class SecretToken(models.Model):
"""Tokens for channels"""
token = models.CharField(max_length=100, unique=True)
is_primary = models.BooleanField(default=False)
@classmethod
def exists(cls, token):
"""
Return true when the token string given by string already exists.
Returns false otherwise.
"""
return cls.objects.filter(token=token).exists()
@classmethod
def generate_new_token(cls):
"""
Creates a primary secret token for the current channel using a proquint
string. Creates a secondary token containing the channel id.
These tokens can be used to refer to the channel to download its content
database.
"""
token = proquint.generate()
# Try 100 times to generate a unique token.
TRIALS = 100
for __ in range(TRIALS):
token = proquint.generate()
if SecretToken.exists(token):
continue
break
# after TRIALS attempts and we didn't get a unique token,
# just raise an error.
# See https://stackoverflow.com/a/9980160 on what for-else loop does.
else:
raise ValueError("Cannot generate new token")
# We found a unique token! Save it
return token
def __str__(self):
return "{}-{}".format(self.token[:5], self.token[5:])
def get_channel_thumbnail(channel):
if not isinstance(channel, dict):
channel = channel.__dict__
if channel.get("thumbnail_encoding"):
thumbnail_data = channel.get("thumbnail_encoding")
if thumbnail_data.get("base64"):
return thumbnail_data["base64"]
if channel.get("thumbnail") and'static' not in channel.get("thumbnail"):
return generate_storage_url(channel.get("thumbnail"))
return '/static/img/kolibri_placeholder.png'
CHANNEL_NAME_INDEX_NAME = "channel_name_idx"
# A list of all the FKs from Channel object
# to ContentNode trees
# used for permissions filtering
CHANNEL_TREES = (
"main_tree",
"chef_tree",
"trash_tree",
"staging_tree",
"previous_tree",
)
def boolean_val(val):
return Value(val, output_field=models.BooleanField())
class PermissionCTE(With):
tree_id_fields = [
"channel__{}__tree_id".format(tree_name)
for tree_name in CHANNEL_TREES
]
def __init__(self, model, user_id, **kwargs):
queryset = model.objects.filter(user_id=user_id)\
.annotate(
tree_id=Unnest(ArrayRemove(Array(*self.tree_id_fields), None), output_field=models.IntegerField())
)
super(PermissionCTE, self).__init__(queryset=queryset.values("user_id", "channel_id", "tree_id"), **kwargs)
@classmethod
def editable_channels(cls, user_id):
return PermissionCTE(User.editable_channels.through, user_id, name="editable_channels_cte")
@classmethod
def view_only_channels(cls, user_id):
return PermissionCTE(User.view_only_channels.through, user_id, name="view_only_channels_cte")
def exists(self, *filters):
return Exists(self.queryset().filter(*filters).values("user_id"))
class Channel(models.Model):
""" Permissions come from association with organizations """
id = UUIDField(primary_key=True, default=uuid.uuid4)
name = models.CharField(max_length=200, blank=True)
description = models.CharField(max_length=400, blank=True)
tagline = models.CharField(max_length=150, blank=True, null=True)
version = models.IntegerField(default=0)
thumbnail = models.TextField(blank=True, null=True)
thumbnail_encoding = JSONField(default=dict)
editors = models.ManyToManyField(
settings.AUTH_USER_MODEL,
related_name='editable_channels',
verbose_name="editors",
help_text="Users with edit rights",
blank=True,
)
viewers = models.ManyToManyField(
settings.AUTH_USER_MODEL,
related_name='view_only_channels',
verbose_name="viewers",
help_text="Users with view only rights",
blank=True,
)
language = models.ForeignKey('Language', null=True, blank=True, related_name='channel_language', on_delete=models.SET_NULL)
trash_tree = models.ForeignKey('ContentNode', null=True, blank=True, related_name='channel_trash', on_delete=models.SET_NULL)
clipboard_tree = models.ForeignKey('ContentNode', null=True, blank=True, related_name='channel_clipboard', on_delete=models.SET_NULL)
main_tree = models.ForeignKey('ContentNode', null=True, blank=True, related_name='channel_main', on_delete=models.SET_NULL)
staging_tree = models.ForeignKey('ContentNode', null=True, blank=True, related_name='channel_staging', on_delete=models.SET_NULL)
chef_tree = models.ForeignKey('ContentNode', null=True, blank=True, related_name='channel_chef', on_delete=models.SET_NULL)
previous_tree = models.ForeignKey('ContentNode', null=True, blank=True, related_name='channel_previous', on_delete=models.SET_NULL)
bookmarked_by = models.ManyToManyField(
settings.AUTH_USER_MODEL,
related_name='bookmarked_channels',
verbose_name="bookmarked by",
)
deleted = models.BooleanField(default=False, db_index=True)
public = models.BooleanField(default=False, db_index=True)
preferences = models.TextField(default=DEFAULT_USER_PREFERENCES)
content_defaults = JSONField(default=dict)
priority = models.IntegerField(default=0, help_text="Order to display public channels")
last_published = models.DateTimeField(blank=True, null=True)
secret_tokens = models.ManyToManyField(
SecretToken,
related_name='channels',
verbose_name="secret tokens",
blank=True,
)
source_url = models.CharField(max_length=200, blank=True, null=True)
demo_server_url = models.CharField(max_length=200, blank=True, null=True)
# Fields specific to content generated by Ricecooker
source_id = models.CharField(max_length=200, blank=True, null=True)
source_domain = models.CharField(max_length=300, blank=True, null=True)
ricecooker_version = models.CharField(max_length=100, blank=True, null=True)
# Fields to calculate when channel is published
published_data = JSONField(default=dict)
icon_encoding = models.TextField(blank=True, null=True)
total_resource_count = models.IntegerField(default=0)
published_kind_count = models.TextField(blank=True, null=True)
published_size = models.FloatField(default=0)
included_languages = models.ManyToManyField(
"Language",
related_name='channels',
verbose_name="languages",
blank=True,
)
_field_updates = FieldTracker(fields=[
# Field to watch for changes
"description",
"language_id",
"thumbnail",
"name",
"thumbnail_encoding",
# watch these fields for changes
# but exclude them from setting changed
# on the main tree
"deleted",
"public",
"main_tree_id",
"version",
])
@classmethod
def get_editable(cls, user, channel_id):
return cls.filter_edit_queryset(cls.objects.all(), user).get(id=channel_id)
@classmethod
def filter_edit_queryset(cls, queryset, user):
user_id = not user.is_anonymous and user.id
# it won't return anything
if not user_id:
return queryset.none()
edit = Exists(User.editable_channels.through.objects.filter(user_id=user_id, channel_id=OuterRef("id")))
queryset = queryset.annotate(edit=edit)
if user.is_admin:
return queryset
return queryset.filter(edit=True)
@classmethod
def filter_view_queryset(cls, queryset, user):
user_id = not user.is_anonymous and user.id
user_email = not user.is_anonymous and user.email
if user_id:
filters = dict(user_id=user_id, channel_id=OuterRef("id"))
edit = Exists(User.editable_channels.through.objects.filter(**filters).values("user_id"))
view = Exists(User.view_only_channels.through.objects.filter(**filters).values("user_id"))
else:
edit = boolean_val(False)
view = boolean_val(False)
queryset = queryset.annotate(
edit=edit,
view=view,
)
if user_id and user.is_admin:
return queryset
permission_filter = Q()
if user_id:
pending_channels = Invitation.objects.filter(email=user_email, revoked=False, declined=False, accepted=False).values_list(
"channel_id", flat=True
)
permission_filter = (
Q(view=True) | Q(edit=True) | Q(deleted=False, id__in=pending_channels)
)
return queryset.filter(permission_filter | Q(deleted=False, public=True))
@classmethod
def get_all_channels(cls):
return cls.objects.select_related('main_tree').prefetch_related('editors', 'viewers').distinct()
def resource_size_key(self):
return "{}_resource_size".format(self.pk)
# Might be good to display resource size, but need to improve query time first
def get_resource_size(self):
cached_data = cache.get(self.resource_size_key())
if cached_data:
return cached_data
tree_id = self.main_tree.tree_id
files = File.objects.select_related('contentnode', 'assessment_item')\
.filter(contentnode__tree_id=tree_id)\
.values('checksum', 'file_size')\
.distinct()\
.aggregate(resource_size=Sum('file_size'))
cache.set(self.resource_size_key(), files['resource_size'] or 0, None)
return files['resource_size'] or 0
def on_create(self):
record_channel_stats(self, None)
if not self.content_defaults:
self.content_defaults = DEFAULT_CONTENT_DEFAULTS
if not self.main_tree:
self.main_tree = ContentNode.objects.create(
title=self.name,
kind_id=content_kinds.TOPIC,
content_id=self.id,
node_id=self.id,
original_channel_id=self.id,
source_channel_id=self.id,
changed=True,
complete=True,
)
# Ensure that locust or unit tests raise if there are any concurrency issues with tree ids.
if settings.DEBUG:
if ContentNode.objects.filter(parent=None, tree_id=self.main_tree.tree_id).count()!= 1:
raise AssertionError
if not self.trash_tree:
self.trash_tree = ContentNode.objects.create(
title=self.name,
kind_id=content_kinds.TOPIC,
content_id=self.id,
node_id=self.id,
)
# if this change affects the published channel list, clear the channel cache
if self.public and (self.main_tree and self.main_tree.published):
delete_public_channel_cache_keys()
def on_update(self):
from contentcuration.utils.user import calculate_user_storage
original_values = self._field_updates.changed()
record_channel_stats(self, original_values)
blacklist = set([
"public",
"main_tree_id",
"version",
])
if self.main_tree and original_values and any((True for field in original_values if field not in blacklist)):
# Changing channel metadata should also mark main_tree as changed
self.main_tree.changed = True
# Check if original thumbnail is no longer referenced
if "thumbnail" in original_values and original_values["thumbnail"] and'static' not in original_values["thumbnail"]:
filename, ext = os.path.splitext(original_values["thumbnail"])
delete_empty_file_reference(filename, ext[1:])
# Refresh storage for all editors on the channel
if "deleted" in original_values:
for editor in self.editors.all():
calculate_user_storage(editor.pk)
# Delete db if channel has been deleted and mark as unpublished
if "deleted" in original_values and not original_values["deleted"]:
self.pending_editors.all().delete()
export_db_storage_path = os.path.join(settings.DB_ROOT, "{channel_id}.sqlite3".format(channel_id=self.id))
if default_storage.exists(export_db_storage_path):
default_storage.delete(export_db_storage_path)
if self.main_tree:
self.main_tree.published = False
if self.main_tree and self.main_tree._field_updates.changed():
self.main_tree.save()
# if this change affects the published channel list, clear the channel cache
if "public" in original_values and (self.main_tree and self.main_tree.published):
delete_public_channel_cache_keys()
def save(self, *args, **kwargs):
if self._state.adding:
self.on_create()
else:
self.on_update()
super(Channel, self).save(*args, **kwargs)
def get_thumbnail(self):
return get_channel_thumbnail(self)
def has_changes(self):
return self.main_tree.get_descendants(include_self=True).filter(changed=True).exists()
def get_date_modified(self):
return self.main_tree.get_descendants(include_self=True).aggregate(last_modified=Max('modified'))['last_modified']
def get_resource_count(self):
return self.main_tree.get_descendants().exclude(kind_id=content_kinds.TOPIC).order_by('content_id').distinct('content_id').count()
def get_human_token(self):
return self.secret_tokens.get(is_primary=True)
def get_channel_id_token(self):
return self.secret_tokens.get(token=self.id)
def make_token(self):
token = self.secret_tokens.create(token=SecretToken.generate_new_token(), is_primary=True)
self.secret_tokens.get_or_create(token=self.id)
return token
def make_public(self, bypass_signals=False):
"""
Sets the current channel object to be public and viewable by anyone.
If bypass_signals is True, update the model in such a way that we
prevent any model signals from running due to the update.
Returns the same channel object.
"""
if bypass_signals:
self.public = True # set this attribute still, so the object will be updated
Channel.objects.filter(id=self.id).update(public=True)
# clear the channel cache
delete_public_channel_cache_keys()
else:
self.public = True
self.save()
return self
def mark_created(self, user):
self.history.create(actor_id=to_pk(user), action=channel_history.CREATION)
def mark_publishing(self, user):
self.history.create(actor_id=to_pk(user), action=channel_history.PUBLICATION)
self.main_tree.publishing = True
self.main_tree.save()
def mark_deleted(self, user):
self.history.create(actor_id=to_pk(user), action=channel_history.DELETION)
self.deleted = True
self.save()
def mark_recovered(self, user):
self.history.create(actor_id=to_pk(user), action=channel_history.RECOVERY)
self.deleted = False
self.save()
@property
def deletion_history(self):
return self.history.filter(action=channel_history.DELETION)
@property
def publishing_history(self):
return self.history.filter(action=channel_history.PUBLICATION)
@classmethod
def get_public_channels(cls, defer_nonmain_trees=False):
"""
Get all public channels.
If defer_nonmain_trees is True, defer the loading of all
trees except for the main_tree."""
if defer_nonmain_trees:
c = (Channel.objects
.filter(public=True)
.exclude(deleted=True)
.select_related('main_tree')
.prefetch_related('editors')
.defer('trash_tree', 'clipboard_tree','staging_tree', 'chef_tree', 'previous_tree', 'viewers'))
else:
c = Channel.objects.filter(public=True).exclude(deleted=True)
return c
class Meta:
verbose_name = "Channel"
verbose_name_plural = "Channels"
indexes = [
models.Index(fields=["name"], name=CHANNEL_NAME_INDEX_NAME),
]
index_together = [
["deleted", "public"]
]
CHANNEL_HISTORY_CHANNEL_INDEX_NAME = "idx_channel_history_channel_id"
class ChannelHistory(models.Model):
"""
Model for tracking certain actions performed on a channel
"""
channel = models.ForeignKey('Channel', null=False, blank=False, related_name='history', on_delete=models.CASCADE)
actor = models.ForeignKey('User', null=False, blank=False, related_name='channel_history', on_delete=models.CASCADE)
performed = models.DateTimeField(default=timezone.now)
action = models.CharField(max_length=50, choices=channel_history.choices)
@classmethod
def prune(cls):
"""
Prunes history records by keeping the most recent actions for each channel and type,
and deleting all other older actions
"""
keep_ids = cls.objects.distinct("channel_id", "action").order_by("channel_id", "action", "-performed").values_list("id", flat=True)
cls.objects.exclude(id__in=keep_ids).delete()
class Meta:
verbose_name = "Channel history"
verbose_name_plural = "Channel histories"
indexes = [
models.Index(fields=["channel_id"], name=CHANNEL_HISTORY_CHANNEL_INDEX_NAME),
]
class UserHistory(models.Model):
"""
Model that stores the user's action history.
"""
user = models.ForeignKey(settings.AUTH_USER_MODEL, null=False, blank=False, related_name="history", on_delete=models.CASCADE)
action = models.CharField(max_length=32, choices=user_history.choices)
performed_at = models.DateTimeField(default=timezone.now)
class ChannelSet(models.Model):
# NOTE: this is referred to as "channel collections" on the front-end, but we need to call it
# something else as there is already a ChannelCollection model on the front-end
id = UUIDField(primary_key=True, default=uuid.uuid4)
name = models.CharField(max_length=200, blank=True)
description = models.CharField(max_length=400, blank=True)
public = models.BooleanField(default=False, db_index=True)
editors = models.ManyToManyField(
settings.AUTH_USER_MODEL,
related_name='channel_sets',
verbose_name="editors",
help_text="Users with edit rights",
blank=True,
)
secret_token = models.ForeignKey('SecretToken', null=True, blank=True, related_name='channel_sets', on_delete=models.SET_NULL)
@classmethod
def filter_edit_queryset(cls, queryset, user):
if user.is_anonymous:
return queryset.none()
user_id = not user.is_anonymous and user.id
edit = Exists(User.channel_sets.through.objects.filter(user_id=user_id, channelset_id=OuterRef("id")))
queryset = queryset.annotate(edit=edit)
if user.is_admin:
return queryset
return queryset.filter(edit=True)
@classmethod
def filter_view_queryset(cls, queryset, user):
return cls.filter_edit_queryset(queryset, user)
def get_channels(self):
if self.secret_token:
return self.secret_token.channels.filter(deleted=False)
def save(self, *args, **kwargs):
if self._state.adding:
self.on_create()
super(ChannelSet, self).save()
def on_create(self):
if not self.secret_token:
self.secret_token = SecretToken.objects.create(token=SecretToken.generate_new_token())
def delete(self, *args, **kwargs):
super(ChannelSet, self).delete(*args, **kwargs)
if self.secret_token:
self.secret_token.delete()
class ContentTag(models.Model):
id = UUIDField(primary_key=True, default=uuid.uuid4)
tag_name = models.CharField(max_length=50)
channel = models.ForeignKey('Channel', related_name='tags', blank=True, null=True, db_index=True, on_delete=models.SET_NULL)
objects = CustomManager()
def __str__(self):
return self.tag_name
class Meta:
unique_together = ['tag_name', 'channel']
class License(models.Model):
"""
Normalize the license of ContentNode model
"""
license_name = models.CharField(max_length=50)
license_url = models.URLField(blank=True)
license_description = models.TextField(blank=True)
copyright_holder_required = models.BooleanField(default=True)
is_custom = models.BooleanField(default=False)
exists = models.BooleanField(
default=False,
verbose_name="license exists",
help_text="Tells whether or not a content item is licensed to share",
)
@classmethod
def validate_name(cls, name):
if cls.objects.filter(license_name=name).count() == 0:
raise ValidationError('License `{}` does not exist'.format(name))
def __str__(self):
return self.license_name
NODE_ID_INDEX_NAME = "node_id_idx"
NODE_MODIFIED_INDEX_NAME = "node_modified_idx"
NODE_MODIFIED_DESC_INDEX_NAME = "node_modified_desc_idx"
CONTENTNODE_TREE_ID_CACHE_KEY = "contentnode_{pk}__tree_id"
class ContentNode(MPTTModel, models.Model):
"""
By default, all nodes have a title and can be used as a topic.
"""
# Random id used internally on Studio (See `node_id` for id used in Kolibri)
id = UUIDField(primary_key=True, default=uuid.uuid4)
# the content_id is used for tracking a user's interaction with a piece of
# content, in the face of possibly many copies of that content. When a user
# interacts with a piece of content, all substantially similar pieces of
# content should be marked as such as well. We track these "substantially
# similar" types of content by having them have the same content_id.
content_id = UUIDField(primary_key=False, default=uuid.uuid4, editable=False, db_index=True)
# Note this field is indexed, but we are using the Index API to give it an explicit name, see the model Meta
node_id = UUIDField(primary_key=False, default=uuid.uuid4, editable=False)
# TODO: disallow nulls once existing models have been set
original_channel_id = UUIDField(primary_key=False, editable=False, null=True,
db_index=True) # Original channel copied from
source_channel_id = UUIDField(primary_key=False, editable=False, null=True) # Immediate channel copied from
# Original node_id of node copied from (TODO: original_node_id clashes with original_node field - temporary)
original_source_node_id = UUIDField(primary_key=False, editable=False, null=True,
db_index=True)
source_node_id = UUIDField(primary_key=False, editable=False, null=True) # Immediate node_id of node copied from
# Fields specific to content generated by Ricecooker
source_id = models.CharField(max_length=200, blank=True, null=True)
source_domain = models.CharField(max_length=300, blank=True, null=True)
title = models.CharField(max_length=200, blank=True)
description = models.TextField(blank=True)
kind = models.ForeignKey('ContentKind', related_name='contentnodes', db_index=True, null=True, blank=True, on_delete=models.SET_NULL)
license = models.ForeignKey('License', null=True, blank=True, on_delete=models.SET_NULL)
license_description = models.CharField(max_length=400, null=True, blank=True)
prerequisite = models.ManyToManyField('self', related_name='is_prerequisite_of',
through='PrerequisiteContentRelationship', symmetrical=False, blank=True)
is_related = models
|
1721bba2cae1e330bffeb9df05341df9522ff885
|
Python
|
<|begin_of_text|>import ROOT
from PhysicsTools.NanoAODTools.postprocessing.framework.datamodel import Collection
from PhysicsTools.NanoAODTools.postprocessing.framework.eventloop import Module
from TreeProducer import *
from TreeProducerCommon import *
from CorrectionTools.PileupWeightTool import *
from CorrectionTools.BTaggingTool import BTagWeightTool, BTagWPs
from CorrectionTools.MuonSFs import *
from CorrectionTools.ElectronSFs import *
from CorrectionTools.RecoilCorrectionTool import getTTptWeight, getTTPt
from CorrectionTools.DYCorrection import *
import struct
import numpy as np
class LLProducer(Module):
def __init__(self, name, DataType, filelist, **kwargs):
self.name = name
self.out = TreeProducer(name)
self.sample = filelist
if DataType=='data':
self.isData = True
self.isMC = False
else:
self.isData = False
self.isMC = True
self.year = kwargs.get('year', 2017 )
self.tes = kwargs.get('tes', 1.0 )
self.ltf = kwargs.get('ltf', 1.0 )
self.jtf = kwargs.get('jtf', 1.0 )
year = self.year
self.filter = getMETFilters(year,self.isData)
if not self.isData:
self.muSFs = MuonSFs(year=year)
self.elSFs = ElectronSFs(year=year)
self.puTool = PileupWeightTool(year =year)
self.btagToolAK8_deep = BTagWeightTool('DeepCSV','AK8','loose',sigma='central',channel='ll',year=year)
self.btagToolAK8_deep_up = BTagWeightTool('DeepCSV','AK8','loose',sigma='up',channel='ll',year=year)
self.btagToolAK8_deep_down = BTagWeightTool('DeepCSV','AK8','loose',sigma='down',channel='ll',year=year)
self.btagToolAK4_deep = BTagWeightTool('DeepCSV','AK4','loose',sigma='central',channel='ll',year=year)
self.btagToolAK4_deep_up = BTagWeightTool('DeepCSV','AK4','loose',sigma='up',channel='ll',year=year)
self.btagToolAK4_deep_down = BTagWeightTool('DeepCSV','AK4','loose',sigma='down',channel='ll',year=year)
if 'DYJetsToLL' in self.sample[0]:
self.DYCorr = DYCorrection('DYJetsToLL')
elif 'ZJetsToNuNu' in self.sample[0]:
self.DYCorr = DYCorrection('ZJetsToNuNu')
elif 'WJetsToLNu' in self.sample[0]:
self.DYCorr = DYCorrection('WJetsToLNu')
self.runJEC = False
JEC_samples = ['Zprime','WWTo','WZTo','ZZTo','GluGluHToBB','ZH_HToBB','Wplus','Wminus']
for JEC_sample in JEC_samples:
if self.sample[0].find(JEC_sample)>0:
self.runJEC = True
def beginJob(self):
pass
def endJob(self):
if not self.isData:
self.btagToolAK8_deep.setDirectory(self.out.outputfile,'AK8btag_deep')
self.btagToolAK4_deep.setDirectory(self.out.outputfile,'AK4btag_deep')
self.out.outputfile.Write()
self.out.outputfile.Close()
def beginFile(self, inputFile, outputFile, inputTree, wrappedOutputTree):
pass
def endFile(self, inputFile, outputFile, inputTree, wrappedOutputTree):
pass
def fillBranches(self,event):
self.out.isMC[0] = self.isMC
self.out.is2016[0] = self.is2016
self.out.is2017[0] = self.is2017
self.out.is2018[0] = self.is2018
self.out.EventNumber[0] = event.event
self.out.LumiNumber[0] = event.luminosityBlock
self.out.RunNumber[0] = event.run
self.out.EventWeight[0] = self.EventWeight
self.out.TopWeight[0] = self.TopWeight
self.out.BTagAK8Weight[0] = self.BTagAK8Weight
self.out.BTagAK4Weight[0] = self.BTagAK4Weight
self.out.BTagAK8Weight_deep[0] = self.BTagAK8Weight_deep
self.out.BTagAK8Weight_deep_up[0] = self.BTagAK8Weight_deep_up
self.out.BTagAK8Weight_deep_down[0] = self.BTagAK8Weight_deep_down
self.out.BTagAK4Weight_deep[0] = self.BTagAK4Weight_deep
self.out.BTagAK4Weight_deep_up[0] = self.BTagAK4Weight_deep_up
self.out.BTagAK4Weight_deep_down[0] = self.BTagAK4Weight_deep_down
self.out.BBTagWeight[0] = self.BBTagWeight
self.out.GenWeight[0] = self.GenWeight
self.out.PUWeight[0] = self.PUWeight
self.out.LeptonWeight[0] = self.LeptonWeight
self.out.LeptonWeightUp[0] = self.LeptonWeightUp
self.out.LeptonWeightDown[0] = self.LeptonWeightDown
self.out.TriggerWeight[0] = self.TriggerWeight
self.out.TriggerWeightUp[0] = self.TriggerWeightUp
self.out.TriggerWeightDown[0] = self.TriggerWeightDown
self.out.QCDNLO_Corr[0] = self.QCDNLO_Corr
self.out.QCDNNLO_Corr[0] = self.QCDNNLO_Corr
self.out.EWKNLO_Corr[0] = self.EWKNLO_Corr
self.out.isZtoNN[0] = self.isZtoNN
self.out.isZtoEE[0] = self.isZtoEE
self.out.isZtoMM[0] = self.isZtoMM
self.out.isTtoEM[0] = self.isTtoEM
self.out.isBoosted4B[0] = self.isBoosted4B
self.out.isHtobb[0] = self.isHtobb
self.out.isHtobb_ml[0] = self.isHtobb_ml
self.out.isMaxBTag_loose[0] = self.isMaxBTag_loose
self.out.isMaxBTag_medium[0] = self.isMaxBTag_medium
self.out.isMaxBTag_tight[0] = self.isMaxBTag_tight
self.out.isVBF[0] = self.isVBF
self.out.nPV[0] = event.PV_npvsGood
self.out.nTaus[0] = self.nTaus
self.out.nElectrons[0] = self.nElectrons
self.out.nMuons[0] = self.nMuons
self.out.nJets[0] = self.nJetsNoFatJet
self.out.nFatJets[0] = self.nFatJets
self.out.DPhi[0] = self.DPhi
self.out.DEta[0] = self.VHDEta
self.out.MinDPhi[0] = self.MinJetMetDPhi
self.out.MaxBTag[0] = self.MaxJetNoFatJetBTag
self.out.BtagDeepB[0] = self.BtagDeepB
self.out.DeepTagMD_H4qvsQCD[0] = self.DeepTagMD_H4qvsQCD
self.out.DeepTagMD_HbbvsQCD[0] = self.DeepTagMD_HbbvsQCD
self.out.DeepTagMD_ZHbbvsQCD[0] = self.DeepTagMD_ZHbbvsQCD
self.out.DeepTagMD_ZbbvsQCD[0] = self.DeepTagMD_ZbbvsQCD
self.out.DeepTagMD_bbvsLight[0] = self.DeepTagMD_bbvsLight
self.out.DeepTagMD_WvsQCD[0] = self.DeepTagMD_WvsQCD
self.out.DeepTagMD_ZvsQCD[0] = self.DeepTagMD_ZvsQCD
self.out.Mu1_pt[0] = self.Mu1_pt
self.out.Mu1_eta[0] = self.Mu1_eta
self.out.Mu1_phi[0] = self.Mu1_phi
self.out.Mu1_mass[0] = self.Mu1_mass
self.out.Mu1_pfIsoId[0] = self.Mu1_pfIsoId
self.out.Mu1_relIso[0] = self.Mu1_relIso
self.out.Mu1_highPtId[0] = self.Mu1_highPtId
self.out.Mu2_pt[0] = self.Mu2_pt
self.out.Mu2_eta[0] = self.Mu2_eta
self.out.Mu2_phi[0] = self.Mu2_phi
self.out.Mu2_mass[0] = self.Mu2_mass
self.out.Mu2_pfIsoId[0] = self.Mu2_pfIsoId
self.out.Mu2_relIso[0] = self.Mu2_relIso
self.out.Mu2_highPtId[0] = self.Mu2_highPtId
self.out.Ele1_pt[0] = self.Ele1_pt
self.out.Ele1_eta[0] = self.Ele1_eta
self.out.Ele1_phi[0] = self.Ele1_phi
self.out.Ele1_mass[0] = self.Ele1_mass
self.out.Ele2_pt[0] = self.Ele2_pt
self.out.Ele2_eta[0] = self.Ele2_eta
self.out.Ele2_phi[0] = self.Ele2_phi
self.out.Ele2_mass[0] = self.Ele2_mass
self.out.Ele_HEM15_16[0] = self.Ele_HEM15_16
self.out.Jet1_VBF_pt[0] = self.Jet1_VBF_pt
self.out.Jet1_VBF_eta[0] = self.Jet1_VBF_eta
self.out.Jet1_VBF_phi[0] = self.Jet1_VBF_phi
self.out.Jet1_VBF_mass[0] = self.Jet1_VBF_mass
self.out.Jet2_VBF_pt[0] = self.Jet2_VBF_pt
self.out.Jet2_VBF_eta[0] = self.Jet2_VBF_eta
self.out.Jet2_VBF_phi[0] = self.Jet2_VBF_phi
self.out.Jet2_VBF_mass[0] = self.Jet2_VBF_mass
self.out.dijet_VBF_mass[0] = self.dijet_VBF_mass
self.out.deltaR_VBF[0] = self.deltaR_VBF
self.out.deltaR_HVBFjet1[0] = self.deltaR_HVBFjet1
self.out.deltaR_HVBFjet2[0] = self.deltaR_HVBFjet2
self.out.MET[0] = event.PuppiMET_pt
self.out.MET_chs[0] = event.MET_pt
self.out.HT_HEM15_16[0] = self.HT_HEM15_16
self.out.LHEScaleWeight = self.LHEScaleWeight
self.out.LHEPdfWeight = self.LHEPdfWeight
self.out.LHEWeight_originalXWGTUP[0]= self.LHEWeight_originalXWGTUP
self.out.PrefireWeight[0] = self.PrefireWeight
self.out.PrefireWeightUp[0] = self.PrefireWeightUp
self.out.PrefireWeightDown[0] = self.PrefireWeightDown
self.out.HT[0] = self.HT
self.out.H_pt[0] = self.H_pt
self.out.H_eta[0] = self.H_eta
self.out.H_phi[0] = self.H_phi
self.out.H_mass[0] = self.H_mass
self.out.H_M[0] = self.H_M
self.out.H_tau21[0] = self.H_tau21
self.out.H_tau41[0] = self.H_tau41
self.out.H_tau42[0] = self.H_tau42
self.out.H_tau31[0] = self.H_tau31
self.out.H_tau32[0] = self.H_tau32
self.out.H_ddt[0] = self.H_ddt
self.out.H_csv1[0] = self.H_csv1
self.out.H_csv2[0] = self.H_csv2
self.out.H_deepcsv1[0] = self.H_deepcsv1
self.out.H_deepcsv2[0] = self.H_deepcsv2
self.out.H_dbt[0] = self.H_dbt
self.out.H_hadronflavour[0] = self.H_hadronflavour
self.out.H_partonflavour[0] = self.H_partonflavour
self.out.H_chf[0] = self.H_chf
self.out.H_nhf[0] = self.H_nhf
self.out.V_pt[0] = self.V_pt
self.out.V_eta[0] = self.V_eta
self.out.V_phi[0] = self.V_phi
self.out.V_mass[0] = self.V_mass
self.out.VH_deltaR[0] = self.VH_deltaR
self.out.X_pt[0] = self.X_pt
self.out.X_eta[0] = self.X_eta
self.out.X_phi[0] = self.X_phi
self.out.X_mass[0] = self.X_mass
self.out.X_mass_chs[0] = self.X_mass_chs
self.out.X_mass_nom[0] = self.X_mass_nom
self.out.X_mass_jesUp[0] = self.X_mass_jesUp
self.out.X_mass_jesDown[0] = self.X_mass_jesDown
self.out.X_mass_jerUp[0] = self.X_mass_jerUp
self.out.X_mass_jerDown[0] = self.X_mass_jerDown
self.out.X_mass_MET_nom[0] = self.X_mass_MET_nom
self.out.X_mass_MET_jesUp[0] = self.X_mass_MET_jesUp
self.out.X_mass_MET_jesDown[0] = self.X_mass_MET_jesDown
self.out.X_mass_MET_jerUp[0] = self.X_mass_MET_jerUp
self.out.X_mass_MET_jerDown[0] = self.X_mass_MET_jerDown
self.out.H_mass_nom[0] = self.H_mass_nom
self.out.H_mass_jmsUp[0] = self.H_mass_jmsUp
self.out.H_mass_jmsDown[0] = self.H_mass_jmsDown
self.out.H_mass_jmrUp[0] = self.H_mass_jmrUp
self.out.H_mass_jmrDown[0] = self.H_mass_jmrDown
self.out.tree.Fill()
def analyze(self, event):
"""process event, return True (go to next module) or False (fail, go to next event)"""
##### set variables ####
self.nElectrons = 0
self.nMuons = 0
self.nTaus = 0
self.nFatJets = 0
self.EventWeight = 1.
self.TopWeight = 1.
self.BTagAK8Weight = 1.
self.BTagAK4Weight = 1.
self.BTagAK8Weight_deep = 1.
self.BTagAK8Weight_deep_up = 1.
self.BTagAK8Weight_deep_down = 1.
self.BTagAK4Weight_deep = 1.
self.BTagAK4Weight_deep_up = 1.
self.BTagAK4Weight_deep_down = 1.
self.BBTagWeight = 1.
self.GenWeight = 1.
self.PUWeight = 1.
self.LeptonWeight = 1.
self.LeptonWeightUp = 1.
self.LeptonWeightDown = 1.
self.TriggerWeight = 1.
self.TriggerWeightUp = 1.
self.TriggerWeightDown = 1.
self.isZtoMM = False
self.isZtoEE = False
self.isZtoNN = False
self.isTtoEM = False
self.isBoosted4B = False
self.isHtobb = False
self.isHtobb_ml = False
self.isMaxBTag_loose = False
self.isMaxBTag_medium = False
self.isMaxBTag_tight = False
self.isVBF = False
self.is2016 = False
self.is2017 = False
self.is2018 = False
self.nTaus = 0
self.nJetsNoFatJet = 0
self.H_partonflavour = -1.
self.H_hadronflavour = -1.
self.DPhi = -1.
self.VHDEta = -1.
self.MinJetMetDPhi = 10.
self.MaxJetNoFatJetBTag = -1.
self.BtagDeepB = -1.
self.DeepTagMD_H4qvsQCD = -1.
self.DeepTagMD_HbbvsQCD = -1.
self.DeepTagMD_ZHbbvsQCD = -1.
self.DeepTagMD_ZbbvsQCD = -1.
self.DeepTagMD_bbvsLight = -1.
self.DeepTagMD_WvsQCD = -1.
self.DeepTagMD_ZvsQCD = -1.
self.Mu1_pt = -1.
self.Mu1_eta = -1.
self.Mu1_phi = -1.
self.Mu1_mass = -1.
self.Mu1_pfIsoId = -1.
self.Mu1_relIso = -1.
self.Mu1_highPtId = -1.
self.Mu2_pt = -1.
self.Mu2_eta = -1.
self.Mu2_phi = -1.
self.Mu2_mass = -1.
self.Mu2_pfIsoId = -1.
self.Mu2_relIso = -1.
self.Mu2_highPtId = -1.
self.Ele1_pt = -1.
self.Ele1_eta = -1.
self.Ele1_phi = -1.
self.Ele1_mass = -1.
self.Ele2_pt = -1.
self.Ele2_eta = -1.
self.Ele2_phi = -1.
self.Ele2_mass = -1.
self.Ele_HEM15_16 = -1.
self.HT_HEM15_16 = -1.
self.HT = 0.
self.LHEScaleWeight = -1.
self.LHEPdfWeight = -1.
self.LHEWeight_originalXWGTUP = -1.
self.PrefireWeight = 1.
self.PrefireWeightUp = 1.
self.PrefireWeightDown = 1.
self.QCDNLO_Corr = 1.
self.QCDNNLO_Corr = 1.
self.EWKNLO_Corr = 1.
self.Jet1_VBF_pt = -1.
self.Jet1_VBF_eta = -1.
self.Jet1_VBF_phi = -1.
self.Jet1_VBF_mass = -1.
self.Jet2_VBF_pt = -1.
self.Jet2_VBF_eta = -1.
self.Jet2_VBF_phi = -1.
self.Jet2_VBF_mass = -1.
self.dijet_VBF_mass = -1.
self.deltaR_VBF = -1.
self.deltaR_HVBFjet1 = -1.
self.deltaR_HVBFjet2 = -1.
self.H_pt = -1.
self.H_eta = -1.
self.H_phi = -1.
self.H_mass = -1.
self.H_M = -1.
self.H_tau21 = -1.
self.H_tau41 = -1.
self.H_tau42 = -1.
self.H_tau31 = -1.
self.H_tau32 = -1.
self.H_ddt = -1.
self.H_csv1 = -1.
self.H_csv2 = -1.
self.H_deepcsv1 = -1.
self.H_deepcsv2 = -1.
self.H_dbt = -1.
self.H_chf = -1.
self.H_nhf = -1.
self.V_pt = -1.
self.V_eta = -1.
self.V_phi = -1.
self.V_mass = -1.
self.VH_deltaR = -1.
self.X_pt = -1.
self.X_eta = -1.
self.X_phi = -1.
self.X_mass = -1.
self.X_mass_chs = -1.
self.X_mass_nom = -1.
self.X_mass_jesUp = -1.
self.X_mass_jesDown = -1.
self.X_mass_jerUp = -1.
self.X_mass_jerDown = -1.
self.X_mass_MET_nom = -1.
self.X_mass_MET_jesUp = -1.
self.X_mass_MET_jesDown = -1.
self.X_mass_MET_jerUp = -1.
self.X_mass_MET_jerDown = -1.
self.H_mass_nom = -1.
self.H_mass_jmsUp = -1.
self.H_mass_jmsDown = -1.
self.H_mass_jmrUp = -1.
self.H_mass_jmrDown = -1.
eecutflow_list = []
mmcutflow_list = []
nncutflow_list = []
idx_electrons = []
idx_loose_electrons = []
idx_muons = []
idx_loose_muons = []
idx_fatjet = []
idx_jet = []
idx_jet_vbf = []
electrons_tlv_list = []
loose_electrons_tlv_list = []
muons_tlv_list = []
loose_muons_tlv_list = []
fatjet_tlv_list = []
jet_tlv_list = []
jet_tlv_list_vbf = []
fatjet_tau21_list = []
fatjet_tau41_list = []
fatjet_tau42_list = []
fatjet_tau31_list = []
fatjet_tau32_list = []
V = ROOT.TLorentzVector()
H = ROOT.TLorentzVector()
X = ROOT.TLorentzVector()
V_chs = ROOT.TLorentzVector()
######### cuts #########
elec1_pt_cut = 55.
elec2_pt_cut = 20.
elec_pt_cut = 10.
elec_eta_cut = 2.5
muon1_pt_cut = 55.
muon2_pt_cut = 20.
muon_pt_cut = 10.
muon_eta_cut = 2.4
tau_pt_cut = 18.
tau_eta_cut = 2.3
ak4_pt_cut = 30.
ak4_eta_cut = 2.4
fatjet_pt_cut = 200.
fatjet_eta_cut = 2.4
met_pt_cut = 250.
v_pt_cut = 200.
tau21_lowercut = 0.35
tau21_uppercut = 0.75
j_mass_lowercut = 30.
j_mass_uppercut = 250.
v_mass_lowercut = 65.
v_mass_intercut = 85.
v_mass_uppercut = 105.
h_mass_lowercut = 105.
h_mass_uppercut = 135.
x_mass_lowercut = 750.
xt_mass_lowercut = 650.
xjj_mass_lowercut = 950.
#### flag for year #######
if self.year == 2016:
self.is2016 = True
elif self.year == 2017:
self.is2017 = True
elif self.year == 2018:
self.is2018 = True
######### triggers #########
if self.year == 2016:
try:
trigger_SingleMu = any([event.HLT_Mu50,
event.HLT_TkMu50])
except:
trigger_SingleMu = event.HLT_Mu50
trigger_SingleEle = event.HLT_Ele115_CaloIdVT_GsfTrkIdT
trigger_SingleIsoEle = event.HLT_Ele27_WPTight_Gsf
trigger_SinglePhoton = event.HLT_Photon175
trigger_METMHTNoMu = any([event.HLT_PFMETNoMu110_PFMHTNoMu110_IDTight,
event.HLT_PFMETNoMu120_PFMHTNoMu120_IDTight,
event.HLT_MonoCentralPFJet80_PFMETNoMu120_PFMHTNoMu120_IDTight])
trigger_METMHT = any([event.HLT_PFMET110_PFMHT110_IDTight,
event.HLT_PFMET120_PFMHT120_IDTight])
trigger_MET = any([event.HLT_PFMET170_NotCleaned,
event.HLT_PFMET170_HBHECleaned])
elif self.year == 2017:
try:
trigger_SingleMu = any([event.HLT_Mu50,
event.HLT_TkMu100,
event.HLT_OldMu100])
except:
trigger_SingleMu = event.HLT_Mu50
try:
trigger_SingleEle = event.HLT_Ele115_CaloIdVT_GsfTrkIdT
except:
trigger_SingleEle = None
trigger_SingleIsoEle = event.HLT_Ele35_WPTight_Gsf
trigger_SinglePhoton = event.HLT_Photon200
try:
trigger_METMHTNoMu = any([event.HLT_PFMETNoMu110_PFMHTNoMu110_IDTight,
event.HLT_PFMETNoMu120_PFMHTNoMu120_IDTight,
event.HLT_PFMETNoMu130_PFMHTNoMu130_IDTight,
event.HLT_PFMETNoMu140_PFMHTNoMu140_IDTight,
event.HLT_MonoCentralPFJet80_PFMETNoMu120_PFMHTNoMu120_IDTight])
except:
trigger_METMHTNoMu = any([event.HLT_PFMETNoMu110_PFMHTNoMu110_IDTight,
event.HLT_PFMETNoMu120_PFMHTNoMu120_IDTight,
event.HLT_MonoCentralPFJet80_PFMETNoMu120_PFMHTNoMu120_IDTight])
trigger_METMHT = any([event.HLT_PFMET110_PFMHT110_IDTight,
event.HLT_PFMET120_PFMHT120_IDTight,
event.HLT_PFMET130_PFMHT130_IDTight,
event.HLT_PFMET140_PFMHT140_IDTight,
event.HLT_PFMETTypeOne110_PFMHT110_IDTight,
event.HLT_PFMETTypeOne120_PFMHT120_IDTight,
event.HLT_PFMETTypeOne130_PFMHT130_IDTight,
event.HLT_PFMETTypeOne140_PFMHT140_IDTight])
try:
trigger_MET = any([event.HLT_PFMET200_NotCleaned,
event.HLT_PFMET200_HBHECleaned,
event.HLT_PFMET200_HBHE_BeamHaloCleaned,
event.HLT_PFMET250_HBHECleaned])
except:
trigger_MET = None
elif self.year == 2018:
trigger_SingleMu = any([event.HLT_Mu50,
event.HLT_TkMu100,
event.HLT_OldMu100])
trigger_SingleEle = event.HLT_Ele115_CaloIdVT_GsfTrkIdT
trigger_SingleIsoEle = event.HLT_Ele32_WPTight_Gsf
trigger_SinglePhoton = event.HLT_Photon200
trigger_METMHTNoMu = any([event.HLT_PFMETNoMu110_PFMHTNoMu110_IDTight,
event.HLT_PFMETNoMu120_PFMHTNoMu120_IDTight,
event.HLT_PFMETNoMu130_PFMHTNoMu130_IDTight,
event.HLT_PFMETNoMu140_PFMHTNoMu140_IDTight,
event.HLT_MonoCentralPFJet80_PFMETNoMu120_PFMHTNoMu120_IDTight])
trigger_METMHT = any([event.HLT_PFMET110_PFMHT110_IDTight,
event.HLT_PFMET120_PFMHT120_IDTight,
event.HLT_PFMET130_PFMHT130_IDTight,
event.HLT_PFMET140_PFMHT140_IDTight,
event.HLT_PFMETTypeOne110_PFMHT110_IDTight,
event.HLT_PFMETTypeOne120_PFMHT120_IDTight,
event.HLT_PFMETTypeOne130_PFMHT130_IDTight,
event.HLT_PFMETTypeOne140_PFMHT140_IDTight])
trigger_MET = any([event.HLT_PFMET200_NotCleaned,
event.HLT_PFMET200_HBHECleaned,
event.HLT_PFMET200_HBHE_BeamHaloCleaned,
event.HLT_PFMET250_HBHECleaned])
########## Gen Weight #########
if self.isMC:
self.GenWeight = -1. if event.genWeight < 0 else 1.
self.PUWeight = self.puTool.getWeight(event.Pileup_nTrueInt)
self.EventWeight *= self.GenWeight
self.EventWeight *= self.PUWeight
for i,weight in enumerate(event.LHEScaleWeight):
self.out.LHEScaleWeight_hist.Fill(i,weight)
for j,weight in enumerate(event.LHEPdfWeight):
self.out.LHEPdfWeight_hist.Fill(j,weight)
self.LHEScaleWeight = event.LHEScaleWeight
self.LHEPdfWeight = event.LHEPdfWeight
self.LHEWeight_originalXWGTUP = event.LHEWeight_originalXWGTUP
self.out.events.Fill(0.,self.GenWeight)
self.out.original.Fill(0.,event.LHEWeight_originalXWGTUP)
if self.year == 2016 or self.year == 2017:
self.PrefireWeight = event.PrefireWeight
self.PrefireWeightUp = event.PrefireWeight_Up
self.PrefireWeightDown = event.PrefireWeight_Down
if self.isData and event.PV_npvs == 0:
return False
if not self.isData:
self.out.pileup.Fill(event.Pileup_nTrueInt)
if event.Pileup_nTrueInt == 0:
return False
########### FatJet #########
for ifatjet in range(event.nFatJet):
fatjet_pt = event.FatJet_pt[ifatjet]
fatjet_eta = event.FatJet_eta[ifatjet]
fatjet_phi = event.FatJet_phi[ifatjet]
fatjet_mass = event.FatJet_mass[ifatjet]
fatjet_jetid = event.FatJet_jetId[ifatjet]
fatjet_tlv = ROOT.TLorentzVector()
fatjet_tlv.SetPtEtaPhiM(fatjet_pt, fatjet_eta, fatjet_phi, fatjet_mass)
if fatjet_pt > fatjet_pt_cut and abs(fatjet_eta) < fatjet_eta_cut:
fatjet_tlv_list.append(fatjet_tlv)
idx_fatjet.append(ifatjet)
if event.FatJet_tau1[ifatjet]==0:
fatjet_tau21_list.append(0)
fatjet_tau41_list.append(0)
fatjet_tau31_list.append(0)
else:
fatjet_tau21_list.append(event.FatJet_tau2[ifatjet]/event.FatJet_tau1[ifatjet])
fatjet_tau41_list.append(event.FatJet_tau4[ifatjet]/event.FatJet_tau1[ifatjet])
fatjet_tau31_list.append(event.FatJet_tau3[ifatjet]/event.FatJet_tau1[ifatjet])
if event.FatJet_tau2[ifatjet]==0:
fatjet_tau42_list.append(0)
fatjet_tau32_list.append(0)
else:
fatjet_tau42_list.append(event.FatJet_tau4[ifatjet]/event.FatJet_tau2[ifatjet])
fatjet_tau32_list.append(event.FatJet_tau3[ifatjet]/event.FatJet_tau2[ifatjet])
self.nFatJets = len(fatjet_tlv_list)
#stop if no suitable Fatjet
if len(fatjet_tlv_list) == 0:
return False
########### electrons ##########
for ielectron in range(event.nElectron):
electron_pt = event.Electron_pt[ielectron]
electron_eta = event.Electron_eta[ielectron]
electron_phi = event.Electron_phi[ielectron]
electron_mass = event.Electron_mass[ielectron]
electron_tlv = ROOT.TLorentzVector()
electron_tlv.SetPtEtaPhiM(electron_pt,electron_eta,electron_phi,electron_mass)
if electron_eta > -2.5 and electron_eta < -1.479 and electron_phi > -1.55 and electron_phi < -0.9:
if self.Ele_HEM15_16 == -1.:
self.Ele_HEM15_16 = 0.
self.Ele_HEM15_16 += electron_pt
if electron_pt > elec_pt_cut and abs(electron_eta) < elec_eta_cut:
idx_electrons.append(ielectron)
electrons_tlv_list.append(electron_tlv)
if event.Electron_cutBased[ielectron] >= 2:
idx_loose_electrons.append(ielectron)
loose_electrons_tlv_list.append(electron_tlv)
self.nElectrons = len(loose_electrons_tlv_list)
########### muons #########
for imuon in range(event.nMuon):
muon_pt = event.Muon_pt[imuon]
muon_eta = event.Muon_eta[imuon]
muon_phi = event.Muon_phi[imuon]
muon_mass = event.Muon_mass[imuon]
muon_tlv = ROOT.TLorentzVector()
muon_tlv.SetPtEtaPhiM(muon_pt, muon_eta, muon_phi, muon_mass)
if muon_pt > muon_pt_cut and abs(muon_eta) < muon_eta_cut:
idx_muons.append(imuon)
muons_tlv_list.append(muon_tlv)
if event.Muon_isPFcand[imuon] and struct.unpack('B',event.Muon_pfIsoId[imuon])[0]>=2 and (event.Muon_isGlobal[imuon] or event.Muon_isTracker[imuon]):
idx_loose_muons.append(imuon)
loose_muons_tlv_list.append(muon_tlv)
self.nMuons = len(loose_muons_tlv_list)
############ taus #########
for itau in range(event.nTau):
tau_pt = event.Tau_pt[itau]
tau_eta = event.Tau_eta[itau]
tau_phi = event.Tau_phi[itau]
tau_mass = event.Tau_mass[itau]
tau_tlv = ROOT.TLorentzVector()
tau_tlv.SetPtEtaPhiM(tau_pt, tau_eta, tau_phi, tau_mass)
if tau_pt > tau_pt_cut and abs(tau_eta) < tau_eta_cut:
cleanTau = True
for loose_electrons_tlv in loose_electrons_tlv_list:
if loose_electrons_tlv.DeltaR(tau_tlv) < 0.4:
cleanTau = False
for loose_muons_tlv in loose_muons_tlv_list:
if loose_muons_tlv.DeltaR(tau_tlv) < 0.4:
cleanTau = False
if cleanTau:
self.nTaus += 1
############ MET ##########
METx = 0.
METy = 0.
MET_tlv = ROOT.TLorentzVector()
MET_tlv.SetPtEtaPhiE(event.PuppiMET_pt,0.,event.PuppiMET_phi, event.PuppiMET_pt)
############ TTbar pT reweighting ########
if self.isMC and 'TT' in self.sample[0]:
Top1_pt, Top2_pt = getTTPt(event)
self.TopWeight = getTTptWeight(Top1_pt, Top2_pt)
############ ZtoEE ############
self.out.eecutflow.Fill(0.,self.EventWeight)
eecutflow_list.append(self.EventWeight)
maxZpt = -1.
Z_pt = -1.
Z_m = -1.
goodelectronpair = False
for i in idx_electrons:
for j in idx_electrons:
if i==j or event.Electron_charge[i] == event.Electron_charge[j]:
continue
eli_tlv = ROOT.TLorentzVector()
eli_tlv.SetPtEtaPhiM(event.Electron_pt[i],event.Electron_eta[i],event.Electron_phi[i],event.Electron_mass[i])
eli_v = ROOT.TVector3()
eli_v.SetPtEtaPhi(event.Electron_pt[i],event.Electron_eta[i],event.Electron_phi[i])
elj_tlv = ROOT.TLorentzVector()
elj_tlv.SetPtEtaPhiM(event.Electron_pt[j],event.Electron_eta[j],event.Electron_phi[j],event.Electron_mass[j])
elj_v = ROOT.TVector3()
elj_v.SetPtEtaPhi(event.Electron_pt[j],event.Electron_eta[j],event.Electron_phi[j])
diel = eli_tlv + elj_tlv
Z_pt = diel.Pt()
Z_m = diel.M()
if Z_m > 70. and Z_m < 110. and Z_pt > maxZpt:
maxZpt = Z_pt
if eli_tlv.Pt() > elj_tlv.Pt():
el1 = i
el2 = j
el1_tlv = eli_tlv
el2_tlv = elj_tlv
el1_v = eli_v
el2_v = elj_v
else:
el1 = j
el2 = i
el1_tlv = elj_tlv
el2_tlv = eli_tlv
el1_v = elj_v
el2_v = eli_v
goodelectronpair = True
if goodelectronpair:
self.out.eecutflow.Fill(1.,self.EventWeight)
eecutflow_list.append(self.EventWeight)
if el1_tlv.Pt() > elec1_pt_cut and el2_tlv.Pt() > elec2_pt_cut:
self.out.eecutflow.Fill(2.,self.EventWeight)
eecutflow_list.append(self.EventWeight)
if event.Electron_cutBased[el1] >= 2 and event.Electron_cutBased[el2] >= 2:
self.out.eecutflow.Fill(3.,self.EventWeight)
eecutflow_list.append(self.EventWeight)
if maxZpt > v_pt_cut:
self.out.eecutflow.Fill(4.,self.EventWeight)
eecutflow_list.append(self.EventWeight)
if trigger_SingleEle == None:
if not trigger_SingleIsoEle and not trigger_SinglePhoton:
print "ZtoEE trigger inconsistency"
return False
else:
if not trigger_SingleEle and not trigger_SingleIsoEle and not trigger_SinglePhoton:
print "ZtoEE trigger inconsistency"
return False
#if not self.isMC and ("SinglePhoton" in self.sample[0] and (trigger_SingleEle or trigger_SingleIsoEle)):
# print "ZtoEE double counting"
# return False
self.out.eecutflow.Fill(5.,self.EventWeight)
eecutflow_list.append(self.EventWeight)
if self.isMC:
eltrig_tlv = el1_tlv
#for i in range(event.nTrigObj):
# if event.TrigObj_id[i] ==11:
# trigobj_v = ROOT.TVector3()
# trigobj_v.SetPtEtaPhi(event.TrigObj_pt[i],event.TrigObj_eta[i],event.TrigObj_phi[i])
# print "electron TrigObj_filterBits:",event.TrigObj_filterBits[i]
# if event.TrigObj_filterBits[i]==14336:
# #if event.TrigObj_filterBits[i]==1110000000000000:
# print "found matching electron"
# deltaR1 = trigobj_v.DeltaR(el1_v)
# deltaR2 = trigobj_v.DeltaR(el2_v)
# if deltaR2 < deltaR1 and deltaR2 < 0.2:
# eltrig_tlv = el2_tlv
# break
self.TriggerWeight = self.elSFs.getTriggerSF(eltrig_tlv.Pt(),eltrig_tlv.Eta())
self.TriggerWeightUp = self.elSFs.getTriggerSF(eltrig_tlv.Pt(),eltrig_tlv.Eta()) + self.elSFs.getTriggerSFerror(eltrig_tlv.Pt(),eltrig_tlv.Eta())
self.TriggerWeightDown = self.elSFs.getTriggerSF(eltrig_tlv.Pt(),eltrig_tlv.Eta()) - self.elSFs.getTriggerSFerror(eltrig_tlv.Pt(),eltrig_tlv.Eta())
self.LeptonWeight = self.elSFs.getIdIsoSF(el1_tlv.Pt(), el1_tlv.Eta())*self.elSFs.getIdIsoSF(el2_tlv.Pt(),el2_tlv.Eta())
IdIsoSF1 = self.elSFs.getIdIsoSF(el1_tlv.Pt(), el1_tlv.Eta())
IdIsoSF2 = self.elSFs.getIdIsoSF(el2_tlv.Pt(),el2_tlv.Eta())
IdIsoSF1error = self.elSFs.getIdIsoSFerror(el1_tlv.Pt(), el1_tlv.E
|
202670314ad28685aaa296dce4b5094daab3f47a
|
Python
|
<|begin_of_text|>#
# PySNMP MIB module Nortel-MsCarrier-MscPassport-AtmEbrMIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/Nortel-MsCarrier-MscPassport-AtmEbrMIB
# Produced by pysmi-0.3.4 at Mon Apr 29 20:19:41 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, ObjectIdentifier, OctetString = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueRangeConstraint, SingleValueConstraint, ValueSizeConstraint, ConstraintsUnion, ConstraintsIntersection = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueRangeConstraint", "SingleValueConstraint", "ValueSizeConstraint", "ConstraintsUnion", "ConstraintsIntersection")
mscAtmIfIndex, mscAtmIfVptIndex, mscAtmIfVcc, mscAtmIfVptVccIndex, mscAtmIfVpc, mscAtmIfVptVcc, mscAtmIfVccIndex, mscAtmIfVpcIndex = mibBuilder.importSymbols("Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex", "mscAtmIfVptIndex", "mscAtmIfVcc", "mscAtmIfVptVccIndex", "mscAtmIfVpc", "mscAtmIfVptVcc", "mscAtmIfVccIndex", "mscAtmIfVpcIndex")
mscAtmIfIisp, mscAtmIfVptIisp, mscAtmIfVptIispIndex, mscAtmIfIispIndex = mibBuilder.importSymbols("Nortel-MsCarrier-MscPassport-AtmIispMIB", "mscAtmIfIisp", "mscAtmIfVptIisp", "mscAtmIfVptIispIndex", "mscAtmIfIispIndex")
mscAtmIfVpcSrc, mscAtmIfVptVccSrcIndex, mscAtmIfVccSrcIndex, mscAtmIfVptVccSrc, mscAtmIfVpcSrcIndex, mscAtmIfVccSrc = mibBuilder.importSymbols("Nortel-MsCarrier-MscPassport-AtmNetworkingMIB", "mscAtmIfVpcSrc", "mscAtmIfVptVccSrcIndex", "mscAtmIfVccSrcIndex", "mscAtmIfVptVccSrc", "mscAtmIfVpcSrcIndex", "mscAtmIfVccSrc")
mscAtmIfVptPnniIndex, mscAtmIfPnniIndex, mscAtmIfPnni, mscAtmIfVptPnni = mibBuilder.importSymbols("Nortel-MsCarrier-MscPassport-AtmPnniMIB", "mscAtmIfVptPnniIndex", "mscAtmIfPnniIndex", "mscAtmIfPnni", "mscAtmIfVptPnni")
mscAtmIfVptUni, mscAtmIfUni, mscAtmIfUniIndex, mscAtmIfVptUniIndex = mibBuilder.importSymbols("Nortel-MsCarrier-MscPassport-AtmUniMIB", "mscAtmIfVptUni", "mscAtmIfUni", "mscAtmIfUniIndex", "mscAtmIfVptUniIndex")
Counter32, DisplayString, Gauge32, StorageType, RowStatus = mibBuilder.importSymbols("Nortel-MsCarrier-MscPassport-StandardTextualConventionsMIB", "Counter32", "DisplayString", "Gauge32", "StorageType", "RowStatus")
NonReplicated, = mibBuilder.importSymbols("Nortel-MsCarrier-MscPassport-TextualConventionsMIB", "NonReplicated")
mscPassportMIBs, = mibBuilder.importSymbols("Nortel-MsCarrier-MscPassport-UsefulDefinitionsMIB", "mscPassportMIBs")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
Integer32, ObjectIdentity, ModuleIdentity, Bits, Counter32, IpAddress, Gauge32, NotificationType, iso, MibScalar, MibTable, MibTableRow, MibTableColumn, MibIdentifier, Unsigned32, Counter64, TimeTicks = mibBuilder.importSymbols("SNMPv2-SMI", "Integer32", "ObjectIdentity", "ModuleIdentity", "Bits", "Counter32", "IpAddress", "Gauge32", "NotificationType", "iso", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "MibIdentifier", "Unsigned32", "Counter64", "TimeTicks")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
atmEbrMIB = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159))
mscAtmIfVpcSrcEbrOv = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 6, 2))
mscAtmIfVpcSrcEbrOvRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 6, 2, 1), )
if mibBuilder.loadTexts: mscAtmIfVpcSrcEbrOvRowStatusTable.setStatus('mandatory')
mscAtmIfVpcSrcEbrOvRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 6, 2, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVpcIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmNetworkingMIB", "mscAtmIfVpcSrcIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVpcSrcEbrOvIndex"))
if mibBuilder.loadTexts: mscAtmIfVpcSrcEbrOvRowStatusEntry.setStatus('mandatory')
mscAtmIfVpcSrcEbrOvRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 6, 2, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfVpcSrcEbrOvRowStatus.setStatus('mandatory')
mscAtmIfVpcSrcEbrOvComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 6, 2, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVpcSrcEbrOvComponentName.setStatus('mandatory')
mscAtmIfVpcSrcEbrOvStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 6, 2, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVpcSrcEbrOvStorageType.setStatus('mandatory')
mscAtmIfVpcSrcEbrOvIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 6, 2, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: mscAtmIfVpcSrcEbrOvIndex.setStatus('mandatory')
mscAtmIfVpcSrcEbrOvProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 6, 2, 20), )
if mibBuilder.loadTexts: mscAtmIfVpcSrcEbrOvProvTable.setStatus('mandatory')
mscAtmIfVpcSrcEbrOvProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 6, 2, 20, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVpcIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmNetworkingMIB", "mscAtmIfVpcSrcIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVpcSrcEbrOvIndex"))
if mibBuilder.loadTexts: mscAtmIfVpcSrcEbrOvProvEntry.setStatus('mandatory')
mscAtmIfVpcSrcEbrOvRecoverySubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 6, 2, 20, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1))).clone('yes')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfVpcSrcEbrOvRecoverySubscribed.setStatus('mandatory')
mscAtmIfVpcSrcEbrOvOptimizationSubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 6, 2, 20, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1))).clone('yes')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfVpcSrcEbrOvOptimizationSubscribed.setStatus('mandatory')
mscAtmIfVpcEbrInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11))
mscAtmIfVpcEbrInfoRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 1), )
if mibBuilder.loadTexts: mscAtmIfVpcEbrInfoRowStatusTable.setStatus('mandatory')
mscAtmIfVpcEbrInfoRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVpcIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVpcEbrInfoIndex"))
if mibBuilder.loadTexts: mscAtmIfVpcEbrInfoRowStatusEntry.setStatus('mandatory')
mscAtmIfVpcEbrInfoRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 1, 1, 1), RowStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVpcEbrInfoRowStatus.setStatus('mandatory')
mscAtmIfVpcEbrInfoComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVpcEbrInfoComponentName.setStatus('mandatory')
mscAtmIfVpcEbrInfoStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVpcEbrInfoStorageType.setStatus('mandatory')
mscAtmIfVpcEbrInfoIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: mscAtmIfVpcEbrInfoIndex.setStatus('mandatory')
mscAtmIfVpcEbrInfoOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 30), )
if mibBuilder.loadTexts: mscAtmIfVpcEbrInfoOperTable.setStatus('mandatory')
mscAtmIfVpcEbrInfoOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 30, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVpcIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVpcEbrInfoIndex"))
if mibBuilder.loadTexts: mscAtmIfVpcEbrInfoOperEntry.setStatus('mandatory')
mscAtmIfVpcEbrInfoRecoverySubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 30, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVpcEbrInfoRecoverySubscribed.setStatus('mandatory')
mscAtmIfVpcEbrInfoOptimizationSubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 30, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVpcEbrInfoOptimizationSubscribed.setStatus('mandatory')
mscAtmIfVpcEbrInfoConnectionRecovered = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 30, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVpcEbrInfoConnectionRecovered.setStatus('mandatory')
mscAtmIfVpcEbrInfoStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 40), )
if mibBuilder.loadTexts: mscAtmIfVpcEbrInfoStatsTable.setStatus('mandatory')
mscAtmIfVpcEbrInfoStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 40, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVpcIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVpcEbrInfoIndex"))
if mibBuilder.loadTexts: mscAtmIfVpcEbrInfoStatsEntry.setStatus('mandatory')
mscAtmIfVpcEbrInfoTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 40, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVpcEbrInfoTotalConnectionRecoveries.setStatus('mandatory')
mscAtmIfVpcEbrInfoTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 40, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVpcEbrInfoTotalPathOptimizations.setStatus('mandatory')
mscAtmIfVccSrcEbrOv = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 8, 2))
mscAtmIfVccSrcEbrOvRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 8, 2, 1), )
if mibBuilder.loadTexts: mscAtmIfVccSrcEbrOvRowStatusTable.setStatus('mandatory')
mscAtmIfVccSrcEbrOvRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 8, 2, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVccIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmNetworkingMIB", "mscAtmIfVccSrcIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVccSrcEbrOvIndex"))
if mibBuilder.loadTexts: mscAtmIfVccSrcEbrOvRowStatusEntry.setStatus('mandatory')
mscAtmIfVccSrcEbrOvRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 8, 2, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfVccSrcEbrOvRowStatus.setStatus('mandatory')
mscAtmIfVccSrcEbrOvComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 8, 2, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVccSrcEbrOvComponentName.setStatus('mandatory')
mscAtmIfVccSrcEbrOvStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 8, 2, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVccSrcEbrOvStorageType.setStatus('mandatory')
mscAtmIfVccSrcEbrOvIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 8, 2, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: mscAtmIfVccSrcEbrOvIndex.setStatus('mandatory')
mscAtmIfVccSrcEbrOvProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 8, 2, 20), )
if mibBuilder.loadTexts: mscAtmIfVccSrcEbrOvProvTable.setStatus('mandatory')
mscAtmIfVccSrcEbrOvProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 8, 2, 20, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVccIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmNetworkingMIB", "mscAtmIfVccSrcIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVccSrcEbrOvIndex"))
if mibBuilder.loadTexts: mscAtmIfVccSrcEbrOvProvEntry.setStatus('mandatory')
mscAtmIfVccSrcEbrOvRecoverySubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 8, 2, 20, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1))).clone('yes')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfVccSrcEbrOvRecoverySubscribed.setStatus('mandatory')
mscAtmIfVccSrcEbrOvOptimizationSubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 8, 2, 20, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1))).clone('yes')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfVccSrcEbrOvOptimizationSubscribed.setStatus('mandatory')
mscAtmIfVccEbrInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12))
mscAtmIfVccEbrInfoRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 1), )
if mibBuilder.loadTexts: mscAtmIfVccEbrInfoRowStatusTable.setStatus('mandatory')
mscAtmIfVccEbrInfoRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVccIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVccEbrInfoIndex"))
if mibBuilder.loadTexts: mscAtmIfVccEbrInfoRowStatusEntry.setStatus('mandatory')
mscAtmIfVccEbrInfoRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 1, 1, 1), RowStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVccEbrInfoRowStatus.setStatus('mandatory')
mscAtmIfVccEbrInfoComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVccEbrInfoComponentName.setStatus('mandatory')
mscAtmIfVccEbrInfoStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVccEbrInfoStorageType.setStatus('mandatory')
mscAtmIfVccEbrInfoIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: mscAtmIfVccEbrInfoIndex.setStatus('mandatory')
mscAtmIfVccEbrInfoOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 30), )
if mibBuilder.loadTexts: mscAtmIfVccEbrInfoOperTable.setStatus('mandatory')
mscAtmIfVccEbrInfoOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 30, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVccIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVccEbrInfoIndex"))
if mibBuilder.loadTexts: mscAtmIfVccEbrInfoOperEntry.setStatus('mandatory')
mscAtmIfVccEbrInfoRecoverySubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 30, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVccEbrInfoRecoverySubscribed.setStatus('mandatory')
mscAtmIfVccEbrInfoOptimizationSubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 30, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVccEbrInfoOptimizationSubscribed.setStatus('mandatory')
mscAtmIfVccEbrInfoConnectionRecovered = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 30, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVccEbrInfoConnectionRecovered.setStatus('mandatory')
mscAtmIfVccEbrInfoStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 40), )
if mibBuilder.loadTexts: mscAtmIfVccEbrInfoStatsTable.setStatus('mandatory')
mscAtmIfVccEbrInfoStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 40, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVccIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVccEbrInfoIndex"))
if mibBuilder.loadTexts: mscAtmIfVccEbrInfoStatsEntry.setStatus('mandatory')
mscAtmIfVccEbrInfoTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 40, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVccEbrInfoTotalConnectionRecoveries.setStatus('mandatory')
mscAtmIfVccEbrInfoTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 40, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVccEbrInfoTotalPathOptimizations.setStatus('mandatory')
mscAtmIfUniEbr = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7))
mscAtmIfUniEbrRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 1), )
if mibBuilder.loadTexts: mscAtmIfUniEbrRowStatusTable.setStatus('mandatory')
mscAtmIfUniEbrRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmUniMIB", "mscAtmIfUniIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfUniEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfUniEbrRowStatusEntry.setStatus('mandatory')
mscAtmIfUniEbrRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfUniEbrRowStatus.setStatus('mandatory')
mscAtmIfUniEbrComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfUniEbrComponentName.setStatus('mandatory')
mscAtmIfUniEbrStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfUniEbrStorageType.setStatus('mandatory')
mscAtmIfUniEbrIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: mscAtmIfUniEbrIndex.setStatus('mandatory')
mscAtmIfUniEbrProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 20), )
if mibBuilder.loadTexts: mscAtmIfUniEbrProvTable.setStatus('mandatory')
mscAtmIfUniEbrProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 20, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmUniMIB", "mscAtmIfUniIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfUniEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfUniEbrProvEntry.setStatus('mandatory')
mscAtmIfUniEbrConnectionRecovery = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 20, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue="c0")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfUniEbrConnectionRecovery.setStatus('mandatory')
mscAtmIfUniEbrPathOptimization = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 20, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue="c0")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfUniEbrPathOptimization.setStatus('mandatory')
mscAtmIfUniEbrOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 30), )
if mibBuilder.loadTexts: mscAtmIfUniEbrOperTable.setStatus('mandatory')
mscAtmIfUniEbrOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 30, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmUniMIB", "mscAtmIfUniIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfUniEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfUniEbrOperEntry.setStatus('mandatory')
mscAtmIfUniEbrSubscribedConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 30, 1, 1), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfUniEbrSubscribedConnections.setStatus('mandatory')
mscAtmIfUniEbrEligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 30, 1, 2), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfUniEbrEligibleRecoveredConnections.setStatus('mandatory')
mscAtmIfUniEbrIneligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 30, 1, 3), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfUniEbrIneligibleRecoveredConnections.setStatus('mandatory')
mscAtmIfUniEbrStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 40), )
if mibBuilder.loadTexts: mscAtmIfUniEbrStatsTable.setStatus('mandatory')
mscAtmIfUniEbrStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 40, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmUniMIB", "mscAtmIfUniIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfUniEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfUniEbrStatsEntry.setStatus('mandatory')
mscAtmIfUniEbrTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 40, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfUniEbrTotalConnectionRecoveries.setStatus('mandatory')
mscAtmIfUniEbrTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 40, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIf
|
b8e18877af990c533c642d4937354198a4676419
|
Python
|
<|begin_of_text|>"""autogenerated by genpy from arm_navigation_msgs/GetPlanningSceneRequest.msg. Do not edit."""
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
import arm_navigation_msgs.msg
import geometry_msgs.msg
import std_msgs.msg
import genpy
import sensor_msgs.msg
class GetPlanningSceneRequest(genpy.Message):
_md5sum = "67ad55e9bed9c8f21dfb4b9b1ca8df7d"
_type = "arm_navigation_msgs/GetPlanningSceneRequest"
_has_header = False #flag to mark the presence of a Header object
_full_text = """
PlanningScene planning_scene_diff
arm_navigation_msgs/OrderedCollisionOperations operations
================================================================================
MSG: arm_navigation_msgs/PlanningScene
#full robot state
arm_navigation_msgs/RobotState robot_state
#additional frames for duplicating tf
geometry_msgs/TransformStamped[] fixed_frame_transforms
#full allowed collision matrix
AllowedCollisionMatrix allowed_collision_matrix
#allowed contacts
arm_navigation_msgs/AllowedContactSpecification[] allowed_contacts
#all link paddings
arm_navigation_msgs/LinkPadding[] link_padding
#collision objects
arm_navigation_msgs/CollisionObject[] collision_objects
arm_navigation_msgs/AttachedCollisionObject[] attached_collision_objects
#the collision map
arm_navigation_msgs/CollisionMap collision_map
================================================================================
MSG: arm_navigation_msgs/RobotState
# This message contains information about the robot state, i.e. the positions of its joints and links
sensor_msgs/JointState joint_state
arm_navigation_msgs/MultiDOFJointState multi_dof_joint_state
================================================================================
MSG: sensor_msgs/JointState
# This is a message that holds data to describe the state of a set of torque controlled joints.
#
# The state of each joint (revolute or prismatic) is defined by:
# * the position of the joint (rad or m),
# * the velocity of the joint (rad/s or m/s) and
# * the effort that is applied in the joint (Nm or N).
#
# Each joint is uniquely identified by its name
# The header specifies the time at which the joint states were recorded. All the joint states
# in one message have to be recorded at the same time.
#
# This message consists of a multiple arrays, one for each part of the joint state.
# The goal is to make each of the fields optional. When e.g. your joints have no
# effort associated with them, you can leave the effort array empty.
#
# All arrays in this message should have the same size, or be empty.
# This is the only way to uniquely associate the joint name with the correct
# states.
Header header
string[] name
float64[] position
float64[] velocity
float64[] effort
================================================================================
MSG: std_msgs/Header
# Standard metadata for higher-level stamped data types.
# This is generally used to communicate timestamped data
# in a particular coordinate frame.
#
# sequence ID: consecutively increasing ID
uint32 seq
#Two-integer timestamp that is expressed as:
# * stamp.secs: seconds (stamp_secs) since epoch
# * stamp.nsecs: nanoseconds since stamp_secs
# time-handling sugar is provided by the client library
time stamp
#Frame this data is associated with
# 0: no frame
# 1: global frame
string frame_id
================================================================================
MSG: arm_navigation_msgs/MultiDOFJointState
#A representation of a multi-dof joint state
time stamp
string[] joint_names
string[] frame_ids
string[] child_frame_ids
geometry_msgs/Pose[] poses
================================================================================
MSG: geometry_msgs/Pose
# A representation of pose in free space, composed of postion and orientation.
Point position
Quaternion orientation
================================================================================
MSG: geometry_msgs/Point
# This contains the position of a point in free space
float64 x
float64 y
float64 z
================================================================================
MSG: geometry_msgs/Quaternion
# This represents an orientation in free space in quaternion form.
float64 x
float64 y
float64 z
float64 w
================================================================================
MSG: geometry_msgs/TransformStamped
# This expresses a transform from coordinate frame header.frame_id
# to the coordinate frame child_frame_id
#
# This message is mostly used by the
# <a href="http://www.ros.org/wiki/tf">tf</a> package.
# See it's documentation for more information.
Header header
string child_frame_id # the frame id of the child frame
Transform transform
================================================================================
MSG: geometry_msgs/Transform
# This represents the transform between two coordinate frames in free space.
Vector3 translation
Quaternion rotation
================================================================================
MSG: geometry_msgs/Vector3
# This represents a vector in free space.
float64 x
float64 y
float64 z
================================================================================
MSG: arm_navigation_msgs/AllowedCollisionMatrix
# the list of link names in the matrix
string[] link_names
# the individual entries in the allowed collision matrix
# symmetric, with same order as link_names
AllowedCollisionEntry[] entries
================================================================================
MSG: arm_navigation_msgs/AllowedCollisionEntry
# whether or not collision checking is enabled
bool[] enabled
================================================================================
MSG: arm_navigation_msgs/AllowedContactSpecification
# The names of the regions
string name
# The shape of the region in the environment
arm_navigation_msgs/Shape shape
# The pose of the space defining the region
geometry_msgs/PoseStamped pose_stamped
# The set of links that will be allowed to have penetration contact within this region
string[] link_names
# The maximum penetration depth allowed for every link
float64 penetration_depth
================================================================================
MSG: arm_navigation_msgs/Shape
byte SPHERE=0
byte BOX=1
byte CYLINDER=2
byte MESH=3
byte type
#### define sphere, box, cylinder ####
# the origin of each shape is considered at the shape's center
# for sphere
# radius := dimensions[0]
# for cylinder
# radius := dimensions[0]
# length := dimensions[1]
# the length is along the Z axis
# for box
# size_x := dimensions[0]
# size_y := dimensions[1]
# size_z := dimensions[2]
float64[] dimensions
#### define mesh ####
# list of triangles; triangle k is defined by tre vertices located
# at indices triangles[3k], triangles[3k+1], triangles[3k+2]
int32[] triangles
geometry_msgs/Point[] vertices
================================================================================
MSG: geometry_msgs/PoseStamped
# A Pose with reference coordinate frame and timestamp
Header header
Pose pose
================================================================================
MSG: arm_navigation_msgs/LinkPadding
#name for the link
string link_name
# padding to apply to the link
float64 padding
================================================================================
MSG: arm_navigation_msgs/CollisionObject
# a header, used for interpreting the poses
Header header
# the id of the object
string id
# The padding used for filtering points near the object.
# This does not affect collision checking for the object.
# Set to negative to get zero padding.
float32 padding
#This contains what is to be done with the object
CollisionObjectOperation operation
#the shapes associated with the object
arm_navigation_msgs/Shape[] shapes
#the poses associated with the shapes - will be transformed using the header
geometry_msgs/Pose[] poses
================================================================================
MSG: arm_navigation_msgs/CollisionObjectOperation
#Puts the object into the environment
#or updates the object if already added
byte ADD=0
#Removes the object from the environment entirely
byte REMOVE=1
#Only valid within the context of a CollisionAttachedObject message
#Will be ignored if sent with an CollisionObject message
#Takes an attached object, detaches from the attached link
#But adds back in as regular object
byte DETACH_AND_ADD_AS_OBJECT=2
#Only valid within the context of a CollisionAttachedObject message
#Will be ignored if sent with an CollisionObject message
#Takes current object in the environment and removes it as
#a regular object
byte ATTACH_AND_REMOVE_AS_OBJECT=3
# Byte code for operation
byte operation
================================================================================
MSG: arm_navigation_msgs/AttachedCollisionObject
# The CollisionObject will be attached with a fixed joint to this link
# If link name is set to REMOVE_ALL_ATTACHED_OBJECTS and object.operation
# is set to REMOVE will remove all attached bodies attached to any object
string link_name
#Reserved for indicating that all attached objects should be removed
string REMOVE_ALL_ATTACHED_OBJECTS = "all"
#This contains the actual shapes and poses for the CollisionObject
#to be attached to the link
#If action is remove and no object.id is set, all objects
#attached to the link indicated by link_name will be removed
CollisionObject object
# The set of links that the attached objects are allowed to touch
# by default - the link_name is included by default
string[] touch_links
================================================================================
MSG: arm_navigation_msgs/CollisionMap
#header for interpreting box positions
Header header
#boxes for use in collision testing
OrientedBoundingBox[] boxes
================================================================================
MSG: arm_navigation_msgs/OrientedBoundingBox
#the center of the box
geometry_msgs/Point32 center
#the extents of the box, assuming the center is at the point
geometry_msgs/Point32 extents
#the axis of the box
geometry_msgs/Point32 axis
#the angle of rotation around the axis
float32 angle
================================================================================
MSG: geometry_msgs/Point32
# This contains the position of a point in free space(with 32 bits of precision).
# It is recommeded to use Point wherever possible instead of Point32.
#
# This recommendation is to promote interoperability.
#
# This message is designed to take up less space when sending
# lots of points at once, as in the case of a PointCloud.
float32 x
float32 y
float32 z
================================================================================
MSG: arm_navigation_msgs/OrderedCollisionOperations
# A set of collision operations that will be performed in the order they are specified
CollisionOperation[] collision_operations
================================================================================
MSG: arm_navigation_msgs/CollisionOperation
# A definition of a collision operation
# E.g. ("gripper",COLLISION_SET_ALL,ENABLE) will enable collisions
# between the gripper and all objects in the collision space
string object1
string object2
string COLLISION_SET_ALL="all"
string COLLISION_SET_OBJECTS="objects"
string COLLISION_SET_ATTACHED_OBJECTS="attached"
# The penetration distance to which collisions are allowed. This is 0.0 by default.
float64 penetration_distance
# Flag that determines whether collisions will be enabled or disabled for the pair of objects specified above
int32 operation
int32 DISABLE=0
int32 ENABLE=1
"""
__slots__ = ['planning_scene_diff','operations']
_slot_types = ['arm_navigation_msgs/PlanningScene','arm_navigation_msgs/OrderedCollisionOperations']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
planning_scene_diff,operations
:param args: complete set of field values, in.msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(GetPlanningSceneRequest, self).__init__(*args, **kwds)
#message fields cannot be None, assign default values for those that are
if self.planning_scene_diff is None:
self.planning_scene_diff = arm_navigation_msgs.msg.PlanningScene()
if self.operations is None:
self.operations = arm_navigation_msgs.msg.OrderedCollisionOperations()
else:
self.planning_scene_diff = arm_navigation_msgs.msg.PlanningScene()
self.operations = arm_navigation_msgs.msg.OrderedCollisionOperations()
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
_x = self
buff.write(_struct_3I.pack(_x.planning_scene_diff.robot_state.joint_state.header.seq, _x.planning_scene_diff.robot_state.joint_state.header.stamp.secs, _x.planning_scene_diff.robot_state.joint_state.header.stamp.nsecs))
_x = self.planning_scene_diff.robot_state.joint_state.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
length = len(self.planning_scene_diff.robot_state.joint_state.name)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.robot_state.joint_state.name:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.planning_scene_diff.robot_state.joint_state.position)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.pack(pattern, *self.planning_scene_diff.robot_state.joint_state.position))
length = len(self.planning_scene_diff.robot_state.joint_state.velocity)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.pack(pattern, *self.planning_scene_diff.robot_state.joint_state.velocity))
length = len(self.planning_scene_diff.robot_state.joint_state.effort)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.pack(pattern, *self.planning_scene_diff.robot_state.joint_state.effort))
_x = self
buff.write(_struct_2I.pack(_x.planning_scene_diff.robot_state.multi_dof_joint_state.stamp.secs, _x.planning_scene_diff.robot_state.multi_dof_joint_state.stamp.nsecs))
length = len(self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.planning_scene_diff.robot_state.multi_dof_joint_state.poses)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.poses:
_v1 = val1.position
_x = _v1
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v2 = val1.orientation
_x = _v2
buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
length = len(self.planning_scene_diff.fixed_frame_transforms)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.fixed_frame_transforms:
_v3 = val1.header
buff.write(_struct_I.pack(_v3.seq))
_v4 = _v3.stamp
_x = _v4
buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
_x = _v3.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = val1.child_frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_v5 = val1.transform
_v6 = _v5.translation
_x = _v6
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v7 = _v5.rotation
_x = _v7
buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
length = len(self.planning_scene_diff.allowed_collision_matrix.link_names)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.allowed_collision_matrix.link_names:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.planning_scene_diff.allowed_collision_matrix.entries)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.allowed_collision_matrix.entries:
length = len(val1.enabled)
buff.write(_struct_I.pack(length))
pattern = '<%sB'%length
buff.write(struct.pack(pattern, *val1.enabled))
length = len(self.planning_scene_diff.allowed_contacts)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.allowed_contacts:
_x = val1.name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_v8 = val1.shape
buff.write(_struct_b.pack(_v8.type))
length = len(_v8.dimensions)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.pack(pattern, *_v8.dimensions))
length = len(_v8.triangles)
buff.write(_struct_I.pack(length))
pattern = '<%si'%length
buff.write(struct.pack(pattern, *_v8.triangles))
length = len(_v8.vertices)
buff.write(_struct_I.pack(length))
for val3 in _v8.vertices:
_x = val3
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v9 = val1.pose_stamped
_v10 = _v9.header
buff.write(_struct_I.pack(_v10.seq))
_v11 = _v10.stamp
_x = _v11
buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
_x = _v10.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_v12 = _v9.pose
_v13 = _v12.position
_x = _v13
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v14 = _v12.orientation
_x = _v14
buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
length = len(val1.link_names)
buff.write(_struct_I.pack(length))
for val2 in val1.link_names:
length = len(val2)
if python3 or type(val2) == unicode:
val2 = val2.encode('utf-8')
length = len(val2)
buff.write(struct.pack('<I%ss'%length, length, val2))
buff.write(_struct_d.pack(val1.penetration_depth))
length = len(self.planning_scene_diff.link_padding)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.link_padding:
_x = val1.link_name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
buff.write(_struct_d.pack(val1.padding))
length = len(self.planning_scene_diff.collision_objects)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.collision_objects:
_v15 = val1.header
buff.write(_struct_I.pack(_v15.seq))
_v16 = _v15.stamp
_x = _v16
buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
_x = _v15.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = val1.id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
buff.write(_struct_f.pack(val1.padding))
_v17 = val1.operation
buff.write(_struct_b.pack(_v17.operation))
length = len(val1.shapes)
buff.write(_struct_I.pack(length))
for val2 in val1.shapes:
buff.write(_struct_b.pack(val2.type))
length = len(val2.dimensions)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.pack(pattern, *val2.dimensions))
length = len(val2.triangles)
buff.write(_struct_I.pack(length))
pattern = '<%si'%length
buff.write(struct.pack(pattern, *val2.triangles))
length = len(val2.vertices)
buff.write(_struct_I.pack(length))
for val3 in val2.vertices:
_x = val3
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
length = len(val1.poses)
buff.write(_struct_I.pack(length))
for val2 in val1.poses:
_v18 = val2.position
_x = _v18
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v19 = val2.orientation
_x = _v19
buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
length = len(self.planning_scene_diff.attached_collision_objects)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.attached_collision_objects:
_x = val1.link_name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_v20 = val1.object
_v21 = _v20.header
buff.write(_struct_I.pack(_v21.seq))
_v22 = _v21.stamp
_x = _v22
buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
_x = _v21.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = _v20.id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
buff.write(_struct_f.pack(_v20.padding))
_v23 = _v20.operation
buff.write(_struct_b.pack(_v23.operation))
length = len(_v20.shapes)
buff.write(_struct_I.pack(length))
for val3 in _v20.shapes:
buff.write(_struct_b.pack(val3.type))
length = len(val3.dimensions)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.pack(pattern, *val3.dimensions))
length = len(val3.triangles)
buff.write(_struct_I.pack(length))
pattern = '<%si'%length
buff.write(struct.pack(pattern, *val3.triangles))
length = len(val3.vertices)
buff.write(_struct_I.pack(length))
for val4 in val3.vertices:
_x = val4
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
length = len(_v20.poses)
buff.write(_struct_I.pack(length))
for val3 in _v20.poses:
_v24 = val3.position
_x = _v24
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v25 = val3.orientation
_x = _v25
buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
length = len(val1.touch_links)
buff.write(_struct_I.pack(length))
for val2 in val1.touch_links:
length = len(val2)
if python3 or type(val2) == unicode:
val2 = val2.encode('utf-8')
length = len(val2)
buff.write(struct.pack('<I%ss'%length, length, val2))
_x = self
buff.write(_struct_3I.pack(_x.planning_scene_diff.collision_map.header.seq, _x.planning_scene_diff.collision_map.header.stamp.secs, _x.planning_scene_diff.collision_map.header.stamp.nsecs))
_x = self.planning_scene_diff.collision_map.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
length = len(self.planning_scene_diff.collision_map.boxes)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.collision_map.boxes:
_v26 = val1.center
_x = _v26
buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))
_v27 = val1.extents
_x = _v27
buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))
_v28 = val1.axis
_x = _v28
buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))
buff.write(_struct_f.pack(val1.angle))
length = len(self.operations.collision_operations)
buff.write(_struct_I.pack(length))
for val1 in self.operations.collision_operations:
_x = val1.object1
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = val1.object2
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = val1
buff.write(_struct_di.pack(_x.penetration_distance, _x.operation))
except struct.error as se: self._check_types(se)
except TypeError as te: self._check_types(te)
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
try:
if self.planning_scene_diff is None:
self.planning_scene_diff = arm_navigation_msgs.msg.PlanningScene()
if self.operations is None:
self.operations = arm_navigation_msgs.msg.OrderedCollisionOperations()
end = 0
_x = self
start = end
end += 12
(_x.planning_scene_diff.robot_state.joint_state.header.seq, _x.planning_scene_diff.robot_state.joint_state.header.stamp.secs, _x.planning_scene_diff.robot_state.joint_state.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.planning_scene_diff.robot_state.joint_state.header.frame_id = str[start:end].decode('utf-8')
else:
self.planning_scene_diff.robot_state.joint_state.header.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.robot_state.joint_state.name = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.planning_scene_diff.robot_state.joint_state.name.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
self.planning_scene_diff.robot_state.joint_state.position = struct.unpack(pattern, str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
self.planning_scene_diff.robot_state.joint_state.velocity = struct.unpack(pattern, str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
self.planning_scene_diff.robot_state.joint_state.effort = struct.unpack(pattern, str[start:end])
_x = self
start = end
end += 8
(_x.planning_scene_diff.robot_state.multi_dof_joint_state.stamp.secs, _x.planning_scene_diff.robot_state.multi_dof_joint_state.stamp.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.robot_state.multi_dof_joint_state.poses = []
for i in range(0, length):
val1 = geometry_msgs.msg.Pose()
_v29 = val1.position
_x = _v29
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v30 = val1.orientation
_x = _v30
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
self.planning_scene_diff.robot_state.multi_dof_joint_state.poses.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.fixed_frame_transforms = []
for i in range(0, length):
val1 = geometry_msgs.msg.TransformStamped()
_v31 = val1.header
start = end
end += 4
(_v31.seq,) = _struct_I.unpack(str[start:end])
_v32 = _v31.stamp
_x = _v32
start = end
end += 8
(_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v31.frame_id = str[start:end].decode('utf-8')
else:
_v31.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.child_frame_id = str[start:end].decode('utf-8')
else:
val1.child_frame_id = str[start:end]
_v33 = val1.transform
_v34 = _v33.translation
_x = _v34
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v35 = _v33.rotation
_x = _v35
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
self.planning_scene_diff.fixed_frame_transforms.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.allowed_collision_matrix.link_names = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.planning_scene_diff.allowed_collision_matrix.link_names.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.allowed_collision_matrix.entries = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.AllowedCollisionEntry()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sB'%length
start = end
end += struct.calcsize(pattern)
val1.enabled = struct.unpack(pattern, str[start:end])
val1.enabled = map(bool, val1.enabled)
self.planning_scene_diff.allowed_collision_matrix.entries.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.allowed_contacts = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.AllowedContactSpecification()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.name = str[start:end].decode('utf-8')
else:
val1.name = str[start:end]
_v36 = val1.shape
start = end
end += 1
(_v36.type,) = _struct_b.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
_v36.dimensions = struct.unpack(pattern, str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%si'%length
start = end
end += struct.calcsize(pattern)
_v36.triangles = struct.unpack(pattern, str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v36.vertices = []
for i in range(0, length):
val3 = geometry_msgs.msg.Point()
_x = val3
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v36.vertices.append(val3)
_v37 = val1.pose_stamped
_v38 = _v37.header
start = end
end += 4
(_v38.seq,) = _struct_I.unpack(str[start:end])
_v39 = _v38.stamp
_x = _v39
start = end
end += 8
(_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v38.frame_id = str[start:end].decode('utf-8')
else:
_v38.frame_id = str[start:end]
_v40 = _v37.pose
_v41 = _v40.position
_x = _v41
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v42 = _v40.orientation
_x = _v42
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.link_names = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val2 = str[start:end].decode('utf-8')
else:
val2 = str[start:end]
val1.link_names.append(val2)
start = end
end += 8
(val1.penetration_depth,) = _struct_d.unpack(str[start:end])
self.planning_scene_diff.allowed_contacts.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.link_padding = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.LinkPadding()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.link_name = str[start:end].decode('utf-8')
else:
val1.link_name = str[start:end]
start = end
end += 8
(val1.padding,) = _struct_d.unpack(str[start:end])
self.planning_scene_diff.link_padding.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.collision_objects = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.CollisionObject()
_v43 = val1.header
start = end
end += 4
(_v43.seq,) = _struct_I.unpack(str[start:end])
_v44 = _v43.stamp
_x = _v44
start = end
end += 8
(_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v43.frame_id = str[start:end].decode('utf-8')
else:
_v43.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.id = str[start:end].decode('utf-8')
else:
val1.id = str[start:end]
start = end
end += 4
(val1.padding,) = _struct_f.unpack(str[start:end])
_v45 = val1.operation
start = end
end += 1
(_v45.operation,) = _struct_b.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.shapes = []
for i in range(0, length):
val2 = arm_navigation_msgs.msg.Shape()
start = end
end += 1
(val2.type,) = _struct_b.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
val2.dimensions = struct.unpack(pattern, str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%si'%length
start = end
end += struct.calcsize(pattern)
val2.triangles = struct.unpack(pattern, str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val2.vertices = []
for i in range(0, length):
val3 = geometry_msgs.msg.Point()
_x = val3
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
val2.vertices.append(val3)
val1.shapes.append(val2)
start = end
end += 4
|
b90678c8f7ad9b97e13e5603bdf1dc8cb3511ca5
|
Python
|
<|begin_of_text|># PySNMP SMI module. Autogenerated from smidump -f python DOCS-IETF-QOS-MIB
# by libsmi2pysnmp-0.1.3 at Thu May 22 11:57:36 2014,
# Python version sys.version_info(major=2, minor=7, micro=2, releaselevel='final', serial=0)
# Imports
( Integer, ObjectIdentifier, OctetString, ) = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString")
( NamedValues, ) = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
( ConstraintsIntersection, ConstraintsUnion, SingleValueConstraint, ValueRangeConstraint, ValueSizeConstraint, ) = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsIntersection", "ConstraintsUnion", "SingleValueConstraint", "ValueRangeConstraint", "ValueSizeConstraint")
( DscpOrAny, ) = mibBuilder.importSymbols("DIFFSERV-DSCP-TC", "DscpOrAny")
( InterfaceIndex, ifIndex, ) = mibBuilder.importSymbols("IF-MIB", "InterfaceIndex", "ifIndex")
( InetAddress, InetAddressType, InetPortNumber, ) = mibBuilder.importSymbols("INET-ADDRESS-MIB", "InetAddress", "InetAddressType", "InetPortNumber")
( SnmpAdminString, ) = mibBuilder.importSymbols("SNMP-FRAMEWORK-MIB", "SnmpAdminString")
( ModuleCompliance, ObjectGroup, ) = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "ObjectGroup")
( Bits, Counter32, Counter64, Integer32, Integer32, ModuleIdentity, MibIdentifier, MibScalar, MibTable, MibTableRow, MibTableColumn, TimeTicks, Unsigned32, mib_2, ) = mibBuilder.importSymbols("SNMPv2-SMI", "Bits", "Counter32", "Counter64", "Integer32", "Integer32", "ModuleIdentity", "MibIdentifier", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "TimeTicks", "Unsigned32", "mib-2")
( MacAddress, RowStatus, StorageType, TextualConvention, TimeStamp, TruthValue, ) = mibBuilder.importSymbols("SNMPv2-TC", "MacAddress", "RowStatus", "StorageType", "TextualConvention", "TimeStamp", "TruthValue")
# Types
class DocsIetfQosBitRate(TextualConvention, Unsigned32):
displayHint = "d"
class DocsIetfQosRfMacIfDirection(Integer):
subtypeSpec = Integer.subtypeSpec+SingleValueConstraint(2,1,)
namedValues = NamedValues(("downstream", 1), ("upstream", 2), )
class DocsIetfQosSchedulingType(Integer):
subtypeSpec = Integer.subtypeSpec+SingleValueConstraint(3,1,5,6,2,4,)
namedValues = NamedValues(("undefined", 1), ("bestEffort", 2), ("nonRealTimePollingService", 3), ("realTimePollingService", 4), ("unsolictedGrantServiceWithAD", 5), ("unsolictedGrantService", 6), )
# Objects
docsIetfQosMIB = ModuleIdentity((1, 3, 6, 1, 2, 1, 127)).setRevisions(("2006-01-23 00:00",))
if mibBuilder.loadTexts: docsIetfQosMIB.setOrganization("IETF IP over Cable Data Network (IPCDN)\nWorking Group")
if mibBuilder.loadTexts: docsIetfQosMIB.setContactInfo("\nCo-Author: Michael Patrick\nPostal: Motorola BCS\n 111 Locke Drive\n Marlborough, MA 01752-7214\n U.S.A.\nPhone: +1 508 786 7563\nE-mail: [email protected]\n\nCo-Author: William Murwin\nPostal: Motorola BCS\n 111 Locke Drive\n Marlborough, MA 01752-7214\n U.S.A.\nPhone: +1 508 786 7594\nE-mail: [email protected]\n\nIETF IPCDN Working Group\nGeneral Discussion: [email protected]\nSubscribe: http://www.ietf.org/mailman/listinfo/ipcdn\nArchive: ftp://ftp.ietf.org/ietf-mail-archive/ipcdn\nCo-chairs: Richard Woundy, [email protected]\n Jean-Francois Mule, [email protected]")
if mibBuilder.loadTexts: docsIetfQosMIB.setDescription("This is the management information for\nQuality Of Service (QOS) for DOCSIS 1.1 and 2.0.\n\n\n\nCopyright (C) The Internet Society (2006). This version of\nthis MIB module is part of RFC 4323; see the RFC itself for\nfull legal notices.")
docsIetfQosNotifications = MibIdentifier((1, 3, 6, 1, 2, 1, 127, 0))
docsIetfQosMIBObjects = MibIdentifier((1, 3, 6, 1, 2, 1, 127, 1))
docsIetfQosPktClassTable = MibTable((1, 3, 6, 1, 2, 1, 127, 1, 1))
if mibBuilder.loadTexts: docsIetfQosPktClassTable.setDescription("This table describes the packet classification\nconfigured on the CM or CMTS.\nThe model is that a packet either received\nas input from an interface or transmitted\nfor output on an interface may be compared\nagainst an ordered list of rules pertaining to\nthe packet contents. Each rule is a row of this\ntable. A matching rule provides a Service Flow\nID to which the packet is classified.\nAll rules need to match for a packet to match\na classifier.\n\nThe objects in this row correspond to a set of\nClassifier Encoding parameters in a DOCSIS\nMAC management message. The\ndocsIetfQosPktClassBitMap indicates which\nparticular parameters were present in the\nclassifier as signaled in the DOCSIS message.\nIf the referenced parameter was not present\nin the signaled DOCSIS 1.1 and 2.0 Classifier, the\ncorresponding object in this row reports a\nvalue as specified in the DESCRIPTION section.")
docsIetfQosPktClassEntry = MibTableRow((1, 3, 6, 1, 2, 1, 127, 1, 1, 1)).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowId"), (0, "DOCS-IETF-QOS-MIB", "docsIetfQosPktClassId"))
if mibBuilder.loadTexts: docsIetfQosPktClassEntry.setDescription("An entry in this table provides a single packet\nclassifier rule. The index ifIndex is an ifType\nof docsCableMaclayer(127).")
docsIetfQosPktClassId = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("noaccess")
if mibBuilder.loadTexts: docsIetfQosPktClassId.setDescription("Index assigned to packet classifier entry by\nthe CMTS, which is unique per Service Flow.")
docsIetfQosPktClassDirection = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 2), DocsIetfQosRfMacIfDirection()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassDirection.setDescription("Indicates the direction to which the classifier\nis applied.")
docsIetfQosPktClassPriority = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassPriority.setDescription("The value specifies the order of evaluation\nof the classifiers.\n\nThe higher the value, the higher the priority.\nThe value of 0 is used as default in\nprovisioned Service Flows Classifiers.\nThe default value of 64 is used for dynamic\nService Flow Classifiers.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the default\nvalue as defined above.")
docsIetfQosPktClassIpTosLow = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 4), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1)).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassIpTosLow.setDescription("The low value of a range of TOS byte values.\nIf the referenced parameter is not present\nin a classifier, this object reports the value\nof 0.\n\nThe IP TOS octet, as originally defined in RFC 791,\nhas been superseded by the 6-bit Differentiated\nServices Field (DSField, RFC 3260) and the 2-bit\nExplicit Congestion Notification Field (ECN field,\nRFC 3168). This object is defined as an 8-bit\noctet as per the DOCSIS Specification\nfor packet classification.")
docsIetfQosPktClassIpTosHigh = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 5), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1)).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassIpTosHigh.setDescription("The 8-bit high value of a range of TOS byte\nvalues.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the\nvalue of 0.\n\nThe IP TOS octet as originally defined in RFC 791\nhas been superseded by the 6-bit Differentiated\nServices Field (DSField, RFC 3260) and the 2-bit\nExplicit Congestion Notification Field (ECN field,\nRFC 3168). This object is defined as an 8-bit\noctet as defined by the DOCSIS Specification\nfor packet classification.")
docsIetfQosPktClassIpTosMask = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 6), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1)).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassIpTosMask.setDescription("The mask value is bitwise ANDed with TOS byte\nin an IP packet, and this value is used for\nrange checking of TosLow and TosHigh.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value\nof 0.\n\nThe IP TOS octet as originally defined in RFC 791\nhas been superseded by the 6-bit Differentiated\nServices Field (DSField, RFC 3260) and the 2-bit\nExplicit Congestion Notification Field (ECN field,\nRFC 3168). This object is defined as an 8-bit\noctet per the DOCSIS Specification for packet\nclassification.")
docsIetfQosPktClassIpProtocol = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 258))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassIpProtocol.setDescription("This object indicates the value of the IP\nProtocol field required for IP packets to match\nthis rule.\n\n\n\n\nThe value 256 matches traffic with any IP Protocol\nvalue. The value 257 by convention matches both TCP\nand UDP.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value\nof 258.")
docsIetfQosPktClassInetAddressType = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 8), InetAddressType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassInetAddressType.setDescription("The type of the Internet address for\ndocsIetfQosPktClassInetSourceAddr,\ndocsIetfQosPktClassInetSourceMask,\ndocsIetfQosPktClassInetDestAddr, and\ndocsIetfQosPktClassInetDestMask.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value of\nipv4(1).")
docsIetfQosPktClassInetSourceAddr = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 9), InetAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassInetSourceAddr.setDescription("This object specifies the value of the IP\nSource Address required for packets to match\nthis rule.\n\nAn IP packet matches the rule when the packet\nIP Source Address bitwise ANDed with the\ndocsIetfQosPktClassInetSourceMask value equals the\ndocsIetfQosPktClassInetSourceAddr value.\n\nThe address type of this object is specified by\ndocsIetfQosPktClassInetAddressType.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value of\n'00000000'H.")
docsIetfQosPktClassInetSourceMask = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 10), InetAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassInetSourceMask.setDescription("This object specifies which bits of a packet's\nIP Source Address are compared to match\nthis rule.\n\nAn IP packet matches the rule when the packet\nsource address bitwise ANDed with the\ndocsIetfQosPktClassInetSourceMask value equals the\ndocsIetfQosIpPktClassInetSourceAddr value.\n\nThe address type of this object is specified by\ndocsIetfQosPktClassInetAddressType.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value of\n'FFFFFFFF'H.")
docsIetfQosPktClassInetDestAddr = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 11), InetAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassInetDestAddr.setDescription("This object specifies the value of the IP\nDestination Address required for packets to match\nthis rule.\n\nAn IP packet matches the rule when the packet\nIP Destination Address bitwise ANDed with the\ndocsIetfQosPktClassInetDestMask value\nequals the docsIetfQosPktClassInetDestAddr value.\n\nThe address type of this object is specified by\ndocsIetfQosPktClassInetAddressType.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value of\n'00000000'H.")
docsIetfQosPktClassInetDestMask = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 12), InetAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassInetDestMask.setDescription("This object specifies which bits of a packet's\nIP Destination Address are compared to\nmatch this rule.\n\nAn IP packet matches the rule when the packet\ndestination address bitwise ANDed with the\ndocsIetfQosPktClassInetDestMask value equals the\ndocsIetfQosIpPktClassInetDestAddr value.\n\nThe address type of this object is specified by\ndocsIetfQosPktClassInetAddressType.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value of\n'FFFFFFFF'H.")
docsIetfQosPktClassSourcePortStart = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 13), InetPortNumber()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassSourcePortStart.setDescription("This object specifies the low-end inclusive\nrange of TCP/UDP source port numbers to which\na packet is compared. This object is irrelevant\nfor non-TCP/UDP IP packets.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value\nof 0.")
docsIetfQosPktClassSourcePortEnd = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 14), InetPortNumber()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassSourcePortEnd.setDescription("This object specifies the high-end inclusive\nrange of TCP/UDP source port numbers to which\na packet is compared. This object is irrelevant\nfor non-TCP/UDP IP packets.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value of\n65535.")
docsIetfQosPktClassDestPortStart = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 15), InetPortNumber()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassDestPortStart.setDescription("This object specifies the low-end inclusive\nrange of TCP/UDP destination port numbers to\nwhich a packet is compared.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value\nof 0.")
docsIetfQosPktClassDestPortEnd = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 16), InetPortNumber()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassDestPortEnd.setDescription("This object specifies the high-end inclusive\nrange of TCP/UDP destination port numbers to which\na packet is compared.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value of\n65535.")
docsIetfQosPktClassDestMacAddr = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 17), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassDestMacAddr.setDescription("An Ethernet packet matches an entry when its\ndestination MAC address bitwise ANDed with\ndocsIetfQosPktClassDestMacMask equals the value of\ndocsIetfQosPktClassDestMacAddr.\n\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value of\n'000000000000'H.")
docsIetfQosPktClassDestMacMask = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 18), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassDestMacMask.setDescription("An Ethernet packet matches an entry when its\ndestination MAC address bitwise ANDed with\ndocsIetfQosPktClassDestMacMask equals the value of\ndocsIetfQosPktClassDestMacAddr.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value of\n'000000000000'H.")
docsIetfQosPktClassSourceMacAddr = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 19), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassSourceMacAddr.setDescription("An Ethernet packet matches this entry when its\nsource MAC address equals the value of\nthis object.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value of\n'FFFFFFFFFFFF'H.")
docsIetfQosPktClassEnetProtocolType = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 20), Integer().subtype(subtypeSpec=SingleValueConstraint(2,0,1,4,3,)).subtype(namedValues=NamedValues(("none", 0), ("ethertype", 1), ("dsap", 2), ("mac", 3), ("all", 4), ))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassEnetProtocolType.setDescription("This object indicates the format of the layer 3\nprotocol ID in the Ethernet packet. A value of\nnone(0) means that the rule does not use the\nlayer 3 protocol type as a matching criteria.\n\nA value of ethertype(1) means that the rule\napplies only to frames that contain an\nEtherType value. Ethertype values are contained\nin packets using the Dec-Intel-Xerox (DIX)\nencapsulation or the RFC1042 Sub-Network Access\nProtocol (SNAP) encapsulation formats.\n\nA value of dsap(2) means that the rule applies\n\n\n\nonly to frames using the IEEE802.3\nencapsulation format with a Destination Service\nAccess Point (DSAP) other\nthan 0xAA (which is reserved for SNAP).\n\nA value of mac(3) means that the rule applies\nonly to MAC management messages for MAC management\nmessages.\n\nA value of all(4) means that the rule matches\nall Ethernet packets.\n\nIf the Ethernet frame contains an 802.1P/Q Tag\nheader (i.e., EtherType 0x8100), this object\napplies to the embedded EtherType field within\nthe 802.1P/Q header.\n\nIf the referenced parameter is not present in a\nclassifier, this object reports the value of 0.")
docsIetfQosPktClassEnetProtocol = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 21), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassEnetProtocol.setDescription("If docsIetfQosEthPktClassProtocolType is none(0),\nthis object is ignored when considering whether\na packet matches the current rule.\n\nIf dosQosPktClassEnetProtocolType is ethertype(1),\nthis object gives the 16-bit value of the\nEtherType that the packet must match in order to\nmatch the rule.\n\nIf docsIetfQosPktClassEnetProtocolType is dsap(2),\nthe lower 8 bits of this object's value must match\nthe DSAP byte of the packet in order to match the\nrule.\n\nIf docsIetfQosPktClassEnetProtocolType is mac(3),\nthe lower 8 bits of this object's value represent a\nlower bound (inclusive) of MAC management message\ntype codes matched, and the upper 8 bits represent\nthe upper bound (inclusive) of matched MAC message\ntype codes. Certain message type codes are\nexcluded from matching, as specified in the\nreference.\n\n\n\nIf the Ethernet frame contains an 802.1P/Q Tag\nheader (i.e., EtherType 0x8100), this object applies\nto the embedded EtherType field within the 802.1P/Q\nheader.\n\nIf the referenced parameter is not present in the\nclassifier, the value of this object is reported\nas 0.")
docsIetfQosPktClassUserPriLow = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 22), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 7))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassUserPriLow.setDescription("This object applies only to Ethernet frames\nusing the 802.1P/Q tag header (indicated with\nEtherType 0x8100). Such frames include a 16-bit\nTag that contains a 3-bit Priority field and\na 12-bit VLAN number.\n\nTagged Ethernet packets must have a 3-bit\nPriority field within the range of\ndocsIetfQosPktClassPriLow to\ndocsIetfQosPktClassPriHigh in order to match this\nrule.\n\nIf the referenced parameter is not present in the\nclassifier, the value of this object is reported\nas 0.")
docsIetfQosPktClassUserPriHigh = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 23), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 7))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassUserPriHigh.setDescription("This object applies only to Ethernet frames\nusing the 802.1P/Qtag header (indicated with\nEtherType 0x8100). Such frames include a 16-bit\nTag that contains a 3-bit Priority field and\na 12-bit VLAN number.\n\nTagged Ethernet packets must have a 3-bit\nPriority field within the range of\ndocsIetfQosPktClassPriLow to\ndocsIetfQosPktClassPriHigh in order to match this\nrule.\n\n\n\nIf the referenced parameter is not present in the\nclassifier, the value of this object is reported\nas 7.")
docsIetfQosPktClassVlanId = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 24), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4094))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassVlanId.setDescription("This object applies only to Ethernet frames\nusing the 802.1P/Q tag header.\n\nTagged packets must have a VLAN Identifier that\nmatches the value in order to match the rule.\n\nIf the referenced parameter is not present in the\nclassifier, the value of this object is reported\nas 0.")
docsIetfQosPktClassStateActive = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 25), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassStateActive.setDescription("This object indicates whether or not the classifier\nis enabled to classify packets to a Service Flow.\n\nIf the referenced parameter is not present in the\nclassifier, the value of this object is reported\nas true(1).")
docsIetfQosPktClassPkts = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 26), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassPkts.setDescription("This object counts the number of packets that have\nbeen classified using this entry. This\nincludes all packets delivered to a Service Flow\nmaximum rate policing function, whether or not that\nfunction drops the packets.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosPktClassBitMap = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 27), Bits().subtype(namedValues=NamedValues(("rulePriority", 0), ("activationState", 1), ("destPortStart", 10), ("destPortEnd", 11), ("destMac", 12), ("sourceMac", 13), ("ethertype", 14), ("userPri", 15), ("vlanId", 16), ("ipTos", 2), ("ipProtocol", 3), ("ipSourceAddr", 4), ("ipSourceMask", 5), ("ipDestAddr", 6), ("ipDestMask", 7), ("sourcePortStart", 8), ("sourcePortEnd", 9), ))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassBitMap.setDescription("This object indicates which parameter encodings\nwere actually present in the DOCSIS packet\nclassifier encoding signaled in the DOCSIS message\nthat created or modified the classifier. Note that\nDynamic Service Change messages have replace\nsemantics, so that all non-default parameters must\nbe present whether the classifier is being created\nor changed.\n\nA bit of this object is set to 1 if the parameter\nindicated by the comment was present in the\nclassifier encoding, and to 0 otherwise.\n\nNote that BITS are encoded most significant bit\nfirst, so that if, for example, bits 6 and 7 are\nset, this object is encoded as the octet string\n'030000'H.")
docsIetfQosParamSetTable = MibTable((1, 3, 6, 1, 2, 1, 127, 1, 2))
if mibBuilder.loadTexts: docsIetfQosParamSetTable.setDescription("This table describes the set of DOCSIS 1.1 and 2.0\nQOS parameters defined in a managed device.\n\nThe ifIndex index specifies a DOCSIS MAC Domain.\nThe docsIetfQosServiceFlowId index specifies a\nparticular Service Flow.\nThe docsIetfQosParamSetType index indicates whether\nthe active, admitted, or provisioned QOS Parameter\nSet is being described by the row.\n\nOnly the QOS Parameter Sets of DOCSIS 1.1 and 2.0\nService Flows are represented in this table.\n\nDOCSIS 1.0 QOS service profiles are not\nrepresented in this table.\n\nEach row corresponds to a DOCSIS QOS Parameter Set\nas signaled via DOCSIS MAC management messages.\nEach object in the row corresponds to one or\npart of one DOCSIS 1.1 Service Flow Encoding.\nThe docsIetfQosParamSetBitMap object in the row\nindicates which particular parameters were signaled\nin the original registration or dynamic service\nrequest message that created the QOS Parameter Set.\n\nIn many cases, even if a QOS Parameter Set parameter\nwas not signaled, the DOCSIS specification calls\nfor a default value to be used. That default value\nis reported as the value of the corresponding object\nin this row.\n\nMany objects are not applicable, depending on\nthe Service Flow direction or upstream scheduling\ntype. The object value reported in this case\nis specified in the DESCRIPTION clause.")
docsIetfQosParamSetEntry = MibTableRow((1, 3, 6, 1, 2, 1, 127, 1, 2, 1)).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowId"), (0, "DOCS-IETF-QOS-MIB", "docsIetfQosParamSetType"))
if mibBuilder.loadTexts: docsIetfQosParamSetEntry.setDescription("A unique set of QOS parameters.")
docsIetfQosParamSetServiceClassName = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 1), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetServiceClassName.setDescription("Refers to the Service Class Name from which the\nparameter set values were derived.\n\nIf the referenced parameter is not present in the\ncorresponding DOCSIS QOS Parameter Set, the default\nvalue of this object is a zero-length string.")
docsIetfQosParamSetPriority = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 7))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetPriority.setDescription("The relative priority of a Service Flow.\nHigher numbers indicate higher priority.\nThis priority should only be used to differentiate\n\n\n\nService Flow from identical parameter sets.\n\nIf the referenced parameter is not present in the\ncorresponding DOCSIS QOS Parameter Set, the default\nvalue of this object is 0. If the parameter is\nnot applicable, the reported value is 0.")
docsIetfQosParamSetMaxTrafficRate = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 3), DocsIetfQosBitRate()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetMaxTrafficRate.setDescription("Maximum sustained traffic rate allowed for this\nService Flow in bits/sec. Must count all MAC frame\ndata PDU from the bytes following the MAC header\nHCS to the end of the CRC. The number of bytes\nforwarded is limited during any time interval.\nThe value 0 means no maximum traffic rate is\nenforced. This object applies to both upstream and\ndownstream Service Flows.\n\nIf the referenced parameter is not present in the\ncorresponding DOCSIS QOS Parameter Set, the default\nvalue of this object is 0. If the parameter is\nnot applicable, it is reported as 0.")
docsIetfQosParamSetMaxTrafficBurst = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 4), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetMaxTrafficBurst.setDescription("Specifies the token bucket size in bytes\nfor this parameter set. The value is calculated\nfrom the byte following the MAC header HCS to\nthe end of the CRC. This object is applied in\nconjunction with docsIetfQosParamSetMaxTrafficRate\nto calculate maximum sustained traffic rate.\n\nIf the referenced parameter is not present in the\ncorresponding DOCSIS QOS Parameter Set, the default\nvalue of this object for scheduling types\nbestEffort (2), nonRealTimePollingService(3),\nand realTimePollingService(4) is 3044.\n\nIf this parameter is not applicable, it is reported\nas 0.")
docsIetfQosParamSetMinReservedRate = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 5), DocsIetfQosBitRate()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetMinReservedRate.setDescription("Specifies the guaranteed minimum rate in\nbits/sec for this parameter set. The value is\ncalculated from the byte following the MAC\nheader HCS to the end of the CRC. The default\nvalue of 0 means that no bandwidth is reserved.\n\nIf the referenced parameter is not present in the\ncorresponding DOCSIS QOS Parameter Set, the default\nvalue of this object is 0. If the parameter\nis not applicable, it is reported as 0.")
docsIetfQosParamSetMinReservedPkt = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetMinReservedPkt.setDescription("Specifies an assumed minimum packet size in\nbytes for which the\ndocsIetfQosParamSetMinReservedRate will be\nprovided. The value is calculated from the byte\nfollowing the MAC header HCS to the end of the\nCRC.\n\nIf the referenced parameter is omitted from a\nDOCSIS QOS parameter set, the default value is\nCMTS implementation dependent. In this case, the\nCMTS reports the default value it is using, and the\nCM reports a value of 0. If the referenced\nparameter is not applicable to the direction or\nscheduling type of the Service Flow, both CMTS and\nCM report this object's value as 0.")
docsIetfQosParamSetActiveTimeout = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetActiveTimeout.setDescription("Specifies the maximum duration in seconds that\nresources remain unused on an active service\nflow before CMTS signals that both active and\nadmitted parameters set are null. The default\nvalue of 0 signifies an infinite amount of time.\n\nIf the referenced parameter is not present in the\ncorresponding DOCSIS QOS Parameter Set, the default\nvalue of this object is 0.")
docsIetfQosParamSetAdmittedTimeout = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535)).clone(200)).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetAdmittedTimeout.setDescription("Specifies the maximum duration in seconds that\nresources remain in admitted state before\nresources must be released.\n\nThe value of 0 signifies an infinite amount\nof time.\n\nIf the referenced parameter is not present in the\ncorresponding DOCSIS QOS Parameter Set, the\ndefault value of this object is 200.")
docsIetfQosParamSetMaxConcatBurst = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetMaxConcatBurst.setDescription("Specifies the maximum concatenated burst in\nbytes that an upstream Service Flow is allowed.\nThe value is calculated from the FC byte of the\nConcatenation MAC Header to the last CRC byte in\nof the last concatenated MAC frame, inclusive.\nThe value of 0 specifies no maximum burst.\n\nIf the referenced parameter is not present in the\ncorresponding DOCSIS QOS Parameter Set, the default\nvalue of this object for scheduling types\nbestEffort(2), nonRealTimePollingService(3), and\n\n\n\nrealTimePollingService(4) is 1522. If the parameter\nis not applicable, this object's value is reported\nas 0.")
docsIetfQosParamSetSchedulingType = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 10), DocsIetfQosSchedulingType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetSchedulingType.setDescription("Specifies the upstream scheduling service used for\nupstream Service Flow.\n\nIf the referenced parameter is not present in the\ncorresponding DOCSIS QOS Parameter Set of an\nupstream Service Flow, the default value of this\nobject is bestEffort(2). For QOS parameter sets of\ndownstream Service Flows, this object's value is\nreported as undefined(1).")
docsIetfQosParamSetNomPollInterval = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 11), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetNomPollInterval.setDescription("Specifies the nominal interval in microseconds\nbetween successive unicast request\nopportunities on an upstream Service Flow.\n\nThis object applies only to upstream Service Flows\nwith DocsIetfQosSchedulingType of value\nnonRealTimePollingService(3),\nrealTimePollingService(4), and\nunsolictedGrantServiceWithAD(5). The parameter is\nmandatory for realTimePollingService(4). If the\nparameter is omitted with\nnonRealTimePollingService(3), the CMTS uses an\nimplementation-dependent value. If the parameter\nis omitted with unsolictedGrantServiceWithAD(5),\nthe CMTS uses as a default value the value of the\nNominal Grant Interval parameter. In all cases,\nthe CMTS reports the value it is using when the\nparameter is applicable. The CM reports the\nsignaled parameter value if it was signaled,\nand 0 otherwise.\n\n\n\nIf the referenced parameter is not applicable to\nthe direction or scheduling type of the\ncorresponding DOCSIS QOS Parameter Set, both\nCMTS and CM report this object's value as 0.")
docsIetfQosParamSetTolPollJitter = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 12), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetTolPollJitter.setDescription("Specifies the maximum amount of time in\nmicroseconds that the unicast request interval\nmay be delayed from the nominal periodic\nschedule on an upstream Service Flow.\n\nThis parameter is applicable only to upstream\nService Flows with a DocsIetfQosSchedulingType of\nrealTimePollingService(4) or\nunsolictedGrantServiceWithAD(5).\n\nIf the referenced parameter is applicable but not\npresent in the corresponding DOCSIS QOS Parameter\nSet, the CMTS uses an implementation-dependent\nvalue and reports the value it is using.\nThe
|
4abcca52095a169b71d2527ce52b8367534c42a4
|
Python
|
<|begin_of_text|># -*- python -*-
# ex: set syntax=python:
# vim: set syntax=python:
import os
import re
from collections import defaultdict, namedtuple
from enum import Enum
from pathlib import Path
import buildbot.www.authz.endpointmatchers as ems
from buildbot.changes.filter import ChangeFilter
from buildbot.changes.gitpoller import GitPoller
from buildbot.config import BuilderConfig
from buildbot.locks import WorkerLock
from buildbot.process.factory import BuildFactory
from buildbot.process.properties import Interpolate, Property, renderer, Transform
from buildbot.reporters.generators.build import BuildStartEndStatusGenerator
from buildbot.reporters.github import GitHubStatusPush
from buildbot.reporters.message import MessageFormatterRenderable
from buildbot.schedulers.basic import AnyBranchScheduler
from buildbot.schedulers.forcesched import ForceScheduler
from buildbot.schedulers.timed import Nightly
from buildbot.steps.cmake import CMake
from buildbot.steps.master import SetProperties
from buildbot.steps.shell import SetPropertyFromCommand, ShellCommand
from buildbot.steps.source.git import Git
from buildbot.steps.source.github import GitHub
from buildbot.steps.transfer import FileUpload, FileDownload
from buildbot.steps.worker import MakeDirectory, SetPropertiesFromEnv, RemoveDirectory
from buildbot.worker import Worker
from buildbot.www.auth import UserPasswordAuth
from buildbot.www.authz import Authz
from buildbot.www.authz.roles import RolesFromUsername
from buildbot.www.hooks.github import GitHubEventHandler
from twisted.internet import defer
from custom_steps import CTest, CleanOldFiles, SetPropertiesFromCMakeCache
# This is the dictionary that the buildmaster pays attention to. We also use
# a shorter alias to save typing.
c = BuildmasterConfig = {}
# SECRETS
GITHUB_TOKEN = Path('github_token.txt').read_text().strip()
WORKER_SECRET = Path('halide_bb_pass.txt').read_text().strip()
WEBHOOK_SECRET = Path('webhook_token.txt').read_text().strip()
WWW_PASSWORD = Path('buildbot_www_pass.txt').read_text().strip()
# SERVER SETTINGS
ARTIFACTS_DIR = os.environ.get('HALIDE_BB_MASTER_ARTIFACTS_DIR', '/home/halidenightly/artifacts')
REPO_DIR = Path(__file__, '..', '..').resolve()
# LLVM
# At any given time, we test (at least) 3 LLVM versions:
# - the current main (changes daily)
# - the most recent release (expected to be stable)
# - an older release (expected to be stable)
#
# The branches that correspond to these will rotate as new versions
# are released, but the underlying test logic should not need changing.
Version = namedtuple('Version', ['major','minor', 'patch'])
VersionedBranch = namedtuple('VersionedBranch', ['ref','version'])
LLVM_MAIN ='main'
LLVM_RELEASE_17 ='release_17'
LLVM_RELEASE_16 ='release_16'
LLVM_RELEASE_15 ='release_15'
LLVM_BRANCHES = {LLVM_MAIN: VersionedBranch(ref='main', version=Version(18, 0, 0)),
LLVM_RELEASE_17: VersionedBranch(ref='release/17.x', version=Version(17, 0, 0)),
LLVM_RELEASE_16: VersionedBranch(ref='llvmorg-16.0.6', version=Version(16, 0, 6)),
LLVM_RELEASE_15: VersionedBranch(ref='llvmorg-15.0.7', version=Version(15, 0, 7))}
# At any given time, Halide has a main branch, which supports (at least)
# the LLVM main branch and the most recent release branch (and maybe one older).
#
# We also support previous release branches; a release branch tracks *only* the
# corresponding version of LLVM (i.e., Halide 13 is'release/13.x' and is only
# built/tested against LLVM13, even though it might still work with other LLVM versions).
#
# Note that we deliberately chose branch names that match LLVM's conventions.
#
# (Note that there are older releases of Halide that we no longer bother to build/test regularly.)
HALIDE_MAIN ='main'
HALIDE_RELEASE_16 ='release_16'
HALIDE_RELEASE_15 ='release_15'
_HALIDE_RELEASES = [
HALIDE_RELEASE_16,
HALIDE_RELEASE_15,
]
HALIDE_BRANCHES = {HALIDE_MAIN: VersionedBranch(ref='main', version=Version(17, 0, 0)),
HALIDE_RELEASE_16: VersionedBranch(ref='release/16.x', version=Version(16, 0, 6)),
HALIDE_RELEASE_15: VersionedBranch(ref='release/15.x', version=Version(15, 0, 1))}
# This lists the Halide branch(es) for which we want to build nightlies;
# it's usually desirable to constrain these to save buildbot time (esp on the slower bots)
# and avoid branches that aren't changing much (i.e. -- recent releases that aren't
# likely to need new updates soon).
HALIDE_NIGHTLIES = [HALIDE_MAIN]
# Given a halide branch, return the 'native' llvm version we expect to use with it.
# For halide release branches, this is the corresponding llvm release branch; for
# halide main, it's llvm main.
LLVM_FOR_HALIDE = {
HALIDE_MAIN: [LLVM_MAIN, LLVM_RELEASE_17, LLVM_RELEASE_16],
HALIDE_RELEASE_16: [LLVM_RELEASE_16],
HALIDE_RELEASE_15: [LLVM_RELEASE_15],
}
# WORKERS
# Can use Python 3.7 dataclasses instead, if we choose to upgrade to that.
WorkerConfig = namedtuple('WorkerConfig', ['max_builds', 'j', 'arch', 'bits', 'os'])
# Using nproc+2 on the arm32 builds causes internal errors in gcc-armeabihf. Let's just use nproc.
_NPROC = Interpolate("%(worker:numcpus)s")
# For machines with max_builds=1, using nproc+2 cores for building is the conventional choice
# (and what ninja defaults to). Oddly, "ninja -j 0" means "use as many threads as you like" which
# is definitely not what we want.
_NPROC_PLUS_2 = Transform(lambda x: f'{int(x) + 2}', _NPROC)
_WORKERS = [
('linux-worker-1', WorkerConfig(max_builds=4, j=8, arch='x86', bits=[32, 64], os='linux')),
('linux-worker-4', WorkerConfig(max_builds=4, j=8, arch='x86', bits=[32, 64], os='linux')),
# 2013 Mac Pro running a 6-core Xeon.
('mac-x86-worker-1', WorkerConfig(max_builds=2, j=8, arch='x86', bits=[64], os='osx')),
# Mac Mini 2018, 3.2 GHz 6-Core Intel Core i7, 16GB memory
('mac-x86-worker-2', WorkerConfig(max_builds=2, j=8, arch='x86', bits=[64], os='osx')),
# Mac Mini 2018,??? details TBD
('mac-x86-worker-3', WorkerConfig(max_builds=2, j=8, arch='x86', bits=[64], os='osx')),
('mac-arm-worker-1', WorkerConfig(max_builds=2, j=8, arch='arm', bits=[64], os='osx')),
# The arm-linux bots here have 4 cores but apparently don't have enough RAM to do more
# than -j=2 without crashing during LLVM builds.
('arm32-linux-worker-1', WorkerConfig(max_builds=1, j=2, arch='arm', bits=[32], os='linux')),
('arm32-linux-worker-2', WorkerConfig(max_builds=1, j=2, arch='arm', bits=[32], os='linux')),
('arm64-linux-worker-1', WorkerConfig(max_builds=1, j=2, arch='arm', bits=[64], os='linux')),
('arm64-linux-worker-2', WorkerConfig(max_builds=1, j=2, arch='arm', bits=[64], os='linux')),
# The rpi4 has 8GB ram, so apparently it's OK with -j=nproc for now.
('rpi4-linux-worker-1', WorkerConfig(max_builds=1, j=_NPROC, arch='arm', bits=[32], os='linux')),
# TODO: should normally be offline because every D3D12 test fails
('win-worker-2', WorkerConfig(max_builds=1, j=_NPROC_PLUS_2, arch='x86', bits=[32, 64], os='windows')),
# TODO: broken, pending repair till Monday
# ('win-worker-3', WorkerConfig(max_builds=2, j=_NPROC_PLUS_2, arch='x86', bits=[32, 64], os='windows')),
]
# The 'workers' list defines the set of recognized buildworkers. Each element is
# a Worker object, specifying a unique worker name and password. The same
# worker name and password must be configured on the worker.
c['workers'] = [Worker(n,
WORKER_SECRET,
keepalive_interval=300, # default is 3600 (1 hour). We'll do 5 mins.
max_builds=cfg.max_builds,
properties={'WORKER_BUILD_PARALLELISM': cfg.j}) for n, cfg in _WORKERS]
_SANITIZERS = [
'asan',
'fuzzer', # this isn't *technically* a sanitizer, but is close enough that it's a good fit
]
# LOCKS
# Performance testing requires exclusive use of a worker
# Compute-intensive build steps will grab this lock in reader
# mode. The performance test will grab it in exclusive mode.
performance_lock = WorkerLock("performance_lock", maxCount=9999)
# When building the LLVM nightlies, we can sync & build LLVM independently
# from other work, but when we update the install directory, we need to ensure
# we have an exclusive lock across the entire worker. (Since we have a small
# number of LLVM versions, and since'make install' doesn't take very long,
# we could probably just get by with a single lock for *any* llvm install,
# but this isn't much harder to do.)
llvm_build_locks = {}
for llvm_branch, info in LLVM_BRANCHES.items():
for bits in [32, 64]:
llvm_build_locks[llvm_branch + str(bits)] = WorkerLock(
f'llvm_install_lock_{info.version.major}_{bits}', maxCount=9999)
# CHANGESOURCES
# Here we point the buildbot at third-party codebases, ie. dependencies.
# Currently, we only have LLVM's `main` branch configured.
c['change_source'] = [
GitPoller(
repourl='https://github.com/llvm/llvm-project.git',
workdir='gitpoller-llvm-workdir',
branch=LLVM_BRANCHES[LLVM_MAIN].ref,
pollInterval=60 * 60 * 24, # Only check llvm once every 24 hours
pollAtLaunch=True)
]
# CODEBASES
all_repositories = {
'https://github.com/halide/Halide.git': 'halide',
'https://github.com/llvm/llvm-project.git': 'llvm',
}
def codebase_generator(chdict):
repo = chdict['repository']
assert repo in all_repositories, "Codebase not found for chdict: " + str(chdict)
return all_repositories[repo]
c['codebaseGenerator'] = codebase_generator
# BUILDERS
# The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
# what steps, and which workers can execute them. Note that any particular build will
# only take place on one worker.
class Purpose(Enum):
halide_nightly = 1
halide_testbranch = 2
llvm_nightly = 3
class BuildSystem(Enum):
make = 0
cmake = 1
class BuilderType:
"""A class to encapsulate the settings for a specific Builder.
(Do not confuse with CMake's 'BUILD_TYPE', which is something else.)
It includes:
- Halide 'target' in the form of arch-bits-os
- LLVM branch to be used
- CMake vs Make
- halide-nightly vs halide-testbranch vs llvm-nightly
- sanitizers vs none
It doesn't currently include any 'features' because we don't currently
bake any in at build time.
It doesn't currently include the C++ compiler used (eg gcc7 vs gcc8 vs clang),
mainly because we currently never test with multiple compilers for a given
setup. (If we ever need to do so, compiler should be added to this.)
"""
def __init__(self, arch, bits, os, halide_branch, llvm_branch, purpose, sanitizer=None,
buildsystem=BuildSystem.cmake):
assert arch in ['arm', 'x86']
assert bits in [32, 64]
assert os in ['linux', 'windows', 'osx']
assert llvm_branch in LLVM_BRANCHES, f'{llvm_branch} not recognized'
self.arch = arch
self.bits = bits
self.os = os
self.halide_branch = halide_branch
self.llvm_branch = llvm_branch
self.buildsystem = buildsystem
self.purpose = purpose
self.sanitizer = sanitizer
if self.halide_branch:
assert self.purpose!= Purpose.llvm_nightly
assert self.halide_branch in HALIDE_BRANCHES, f'unknown branch {self.halide_branch}'
assert (self.purpose == Purpose.halide_testbranch or # if not testbranch...
self.llvm_branch in LLVM_FOR_HALIDE[self.halide_branch])
else:
assert self.purpose == Purpose.llvm_nightly
if self.sanitizer:
assert self.sanitizer in _SANITIZERS
# The armbots aren't configured with Python at all.
# We don't support the Python bindings on 32-bit at all.
def handles_python(self):
if self.bits == 32:
return False
if self.arch == 'arm' and self.os == 'linux':
return False
if self.sanitizer_preset() is not None:
return False
return True
def handles_sanitizers(self):
if self.buildsystem!= BuildSystem.cmake:
return False
return (self.arch == 'x86'
and self.bits == 64
and self.os == 'linux'
and self.llvm_branch == LLVM_MAIN)
def sanitizer_preset(self):
if self.handles_sanitizers():
if self.sanitizer == 'asan':
return 'linux-x64-asan'
if self.sanitizer == 'fuzzer':
return 'linux-x64-fuzzer'
return None
def handles_riscv(self):
# Only support RISCV on LLVM16 or later.
return self.llvm_branch not in [LLVM_RELEASE_15]
def handles_hexagon(self):
return (self.arch == 'x86'
and self.bits == 64
and self.os == 'linux'
and self.llvm_branch == LLVM_MAIN)
def handles_wasm(self):
is_linux_x64 = (self.arch == 'x86'
and self.bits == 64
and self.os == 'linux')
return (self.llvm_branch == LLVM_MAIN
and (is_linux_x64 or self.os == 'osx'))
def handles_wasm_wabt(self):
return self.handles_wasm()
def handles_wasm_v8(self):
# OSX machines don't have V8 installed
return self.handles_wasm() and self.os == 'linux'
def has_nvidia(self):
return (self.arch == 'x86'
and self.bits == 64
and self.os in ['windows', 'linux'])
def handles_vulkan(self):
# TODO: disabled temporarily pending fixes to the Vulkan runtime
return False
# Stick with Linux on x86-64 for now. Others TBD.
# return (self.arch == 'x86'
# and self.bits == 64
# and self.os == 'linux'
# and self.halide_branch in [HALIDE_MAIN, HALIDE_RELEASE_16])
def handles_webgpu(self):
# At the moment, the WebGPU team recommends the OSX versions of Dawn/Node
# as the most robust for testing, so that's all we're set up to test with.
# (Note that 'Dawn' must be built/installed on the test machines manually;
# there are no binaries/prebuilts available at this time.)
return self.os == 'osx' and self.halide_branch not in [HALIDE_RELEASE_15]
def has_tflite(self):
if self.arch == 'x86' and self.bits == 64 and self.os == 'linux':
return True
if self.arch == 'arm' and self.bits == 64 and self.os == 'osx':
return True
return False
def has_ccache(self):
return self.os in ['osx', 'linux']
def halide_target(self):
return '%s-%d-%s' % (self.arch, self.bits, self.os)
def llvm_builder_label(self):
return 'llvm-%s-%s' % (LLVM_BRANCHES[self.llvm_branch].version.major, self.halide_target())
def halide_builder_label(self):
# This currently tries to (somewhat) mimic the existing label pattern,
# but is arbitrary. (If changed, manual purging of buildbot temporaries
# is appropriate)
a = ['halide']
if self.sanitizer:
a.append(self.sanitizer)
if self.purpose == Purpose.halide_testbranch:
a.append('testbranch')
elif self.purpose == Purpose.halide_nightly:
a.append('nightly')
a.append(self.halide_branch)
if self.halide_branch == HALIDE_MAIN:
# Halide master is built against multiple LLVM versions,
# so append that here for clarity
a.append(f'llvm{LLVM_BRANCHES[self.llvm_branch].version.major}')
a.append(self.halide_target())
a.append(self.buildsystem.name)
return '-'.join(a)
def builder_label(self):
if self.purpose == Purpose.llvm_nightly:
return self.llvm_builder_label()
else:
return self.halide_builder_label()
def builder_tags(self):
return self.builder_label().split('-')
def get_worker_names(self):
return [n for n, cfg in _WORKERS
if self.arch == cfg.arch and self.bits in cfg.bits and self.os == cfg.os]
def __str__(self):
return self.halide_target()
def get_builddir_subpath(subpath):
# Normalize paths to use forward slashes.
return Transform(lambda x: x.replace('\\', '/'), Interpolate(f'%(prop:builddir)s/{subpath}'))
def get_llvm_toolchains_path(*subpaths):
return get_builddir_subpath(os.path.join('llvm-toolchains', *subpaths))
# TODO: make private to the LLVM code
def get_llvm_source_path(*subpaths):
return get_builddir_subpath(os.path.join('llvm-project', *subpaths))
# TODO: make private to the LLVM code
def get_llvm_build_path(*subpaths):
return get_builddir_subpath(os.path.join('llvm-build', *subpaths))
def get_llvm_install_path(builder_type, *subpaths):
# Note that `builder_type.purpose` can be a Halide builder or an LLVM builder;
# we want to ignore that aspect and produce the same effective path
# regardless (ie, based only on the other aspects of the builder_type).
llvm_workdir = builder_type.llvm_builder_label()
return get_builddir_subpath(os.path.join('..', llvm_workdir, 'llvm-install', *subpaths))
def get_halide_source_path(*subpaths):
return get_builddir_subpath(os.path.join('halide-source', *subpaths))
def get_halide_build_path(*subpaths):
return get_builddir_subpath(os.path.join('halide-build', *subpaths))
def get_halide_install_path(builder_type, *subpaths):
s = 'halide-install'
if builder_type.sanitizer:
s += '-' + builder_type.sanitizer
return get_builddir_subpath(os.path.join(s, *subpaths))
def add_get_halide_source_steps(factory, builder_type):
factory.addStep(GitHub(name='Get Halide source',
locks=[performance_lock.access('counting')],
codebase='halide',
workdir=get_halide_source_path(),
repourl='https://github.com/halide/Halide.git',
branch=HALIDE_BRANCHES[builder_type.halide_branch].ref,
mode='incremental'))
def add_get_llvm_source_steps(factory, builder_type):
factory.addStep(Git(name=f'Get LLVM {LLVM_BRANCHES[builder_type.llvm_branch].version.major}',
locks=[performance_lock.access('counting')],
codebase='llvm',
workdir=get_llvm_source_path(),
repourl='https://github.com/llvm/llvm-project.git',
branch=LLVM_BRANCHES[builder_type.llvm_branch].ref,
mode='incremental'))
# Always download the toolchains, even on platforms we don't need 'em
toolchains_dir = get_llvm_toolchains_path()
factory.addStep(MakeDirectory(name="Make CMake toolchain directory",
locks=[performance_lock.access('counting')],
dir=toolchains_dir,
haltOnFailure=False))
factory.addStep(FileDownload(name='Download CMake toolchains',
mastersrc='toolchain.linux-arm32.cmake', # relative to base dir
workerdest='toolchain.linux-arm32.cmake', # relative to workdir
workdir=toolchains_dir,
locks=[performance_lock.access('counting')],
haltOnFailure=True,
mode=0o644))
# Determined by running `set` in cmd.exe before and after vcvarsall.bat
# and diffing the output. It's likely that we don't need all of these
# to make things work, but I haven't bothered to figure out what is irrelevant,
# so I'm erring on the side of maybe too much.
# noinspection SpellCheckingInspection
VCVARSALL_ENV_VARS = [
"COMMANDPROMPTTYPE",
"DEVENVDIR",
"EXTENSIONSDKDIR",
"FRAMEWORK40VERSION",
"FRAMEWORKDIR",
"FRAMEWORKDIR64",
"FRAMEWORKVERSION",
"FRAMEWORKVERSION64",
"INCLUDE",
"LIB",
"LIBPATH",
"NETFXSDKDIR",
"PATH",
"PLATFORM",
"UCRTVERSION",
"UNIVERSALCRTSDKDIR",
"VCIDEINSTALLDIR",
"VCINSTALLDIR",
"VCTOOLSINSTALLDIR",
"VCTOOLSREDISTDIR",
"VCTOOLSVERSION",
"VISUALSTUDIOVERSION",
"VS110COMNTOOLS",
"VS120COMNTOOLS",
"VS140COMNTOOLS",
"VS160COMNTOOLS",
"VSCMD_ARG_APP_PLAT",
"VSCMD_ARG_HOST_ARCH",
"VSCMD_ARG_TGT_ARCH",
"VSCMD_VER",
"VSINSTALLDIR",
"WINDOWSLIBPATH",
"WINDOWSSDKBINPATH",
"WINDOWSSDKDIR",
"WINDOWSSDKLIBVERSION",
"WINDOWSSDKVERBINPATH",
"WINDOWSSDKVERSION",
"WINDOWSSDK_EXECUTABLEPATH_X64",
"WINDOWSSDK_EXECUTABLEPATH_X86",
"__DOTNET_ADD_64BIT",
"__DOTNET_PREFERRED_BITNESS",
"__VSCMD_PREINIT_PATH",
"__VSCMD_SCRIPT_ERR_COUNT",
]
def get_msvc_config_steps(factory, builder_type):
# ensure that we use the x64 host compiler, not the x86 host compiler
arch_for_bits = {32: 'x64_x86', 64: 'x64'}
vcvarsall = 'vcvarsall.bat %s && set' % arch_for_bits[builder_type.bits]
# TODO: surely there is a better way of locating vcvarsall
# vcvarsdir = "c:/Program Files (x86)/Microsoft Visual Studio/2019/Community/VC/Auxiliary/Build"
vcvarsdir = "C:/Program Files/Microsoft Visual Studio/2022/Community/VC/Auxiliary/Build"
# `vsvarsall && set` dumps all the settings to stdout;
# we'll extract & save just the subset we think are likely to be relevant.
def save_interesting_env_vars(rc, stdout, stderr):
d = {}
for line in stdout.split('\n'):
match = re.match("^([a-zA-Z0-9_-]+)=(.*)$", line.strip())
if match:
key = match.group(1).upper()
value = match.group(2)
if key in VCVARSALL_ENV_VARS:
d[key] = value
return {'env': d}
factory.addStep(
SetPropertyFromCommand(name='Run VcVarsAll',
description='Run VcVarsAll',
workdir=vcvarsdir,
locks=[performance_lock.access('counting')],
haltOnFailure=True,
command=vcvarsall,
extract_fn=save_interesting_env_vars))
def merge_renderable(_base, _extn):
@renderer
@defer.inlineCallbacks
def render(props):
base = yield props.render(_base)
extn = yield props.render(_extn)
base.update(extn)
return base
return render
def get_distrib_name(_version, target, ext):
@renderer
@defer.inlineCallbacks
def render(props):
rev = props.getProperty('got_revision')['halide']
version = yield props.render(_version)
return os.path.join(ARTIFACTS_DIR, f'Halide-{version}-{target}-{rev}.{ext}')
return render
def get_cmake_generator(builder_type):
return 'Ninja'
def get_llvm_cmake_options(builder_type):
options = []
return options
def get_halide_cmake_options(builder_type, build_dir):
options = []
if builder_type.sanitizer and builder_type.purpose!= Purpose.llvm_nightly:
assert builder_type.handles_sanitizers()
options.append("--preset=%s" % builder_type.sanitizer_preset())
# append *after* preset so we override the build dir
options += ['-B', build_dir]
return options
def get_ctest_options(builder_type, build_dir):
assert builder_type.purpose!= Purpose.llvm_nightly
if builder_type.sanitizer:
assert builder_type.handles_sanitizers()
# No, this won't work, see https://gitlab.kitware.com/cmake/cmake/-/issues/23982 --
# fortunately, we don't need to specify the current sanitizer toolchains
# at test time (just at configure time).
# return {'preset': builder_type.sanitizer_preset(), 'test_dir': build_dir}
return {'build_config': builder_type.sanitizer_preset()}
else:
return {'build_config': 'Release'}
def get_halide_cmake_definitions(builder_type, halide_target='host', wasm_jit='wabt'):
cmake_definitions = {
'Clang_DIR': get_llvm_install_path(builder_type, 'lib/cmake/clang'),
'CMAKE_INSTALL_PREFIX': get_halide_install_path(builder_type),
'Halide_TARGET': halide_target,
'LLD_DIR': get_llvm_install_path(builder_type, 'lib/cmake/lld'),
'LLVM_DIR': get_llvm_install_path(builder_type, 'lib/cmake/llvm'),
'LLVM_ROOT': get_llvm_install_path(builder_type),
'WITH_PYTHON_BINDINGS': 'ON' if builder_type.handles_python() else 'OFF',
'WITH_TEST_FUZZ': 'ON' if builder_type.sanitizer == 'fuzzer' else 'OFF'
}
if builder_type.sanitizer and builder_type.handles_sanitizers():
pass
else:
cmake_definitions['CMAKE_BUILD_TYPE'] = 'Release'
# Sanitizer builds intermittently fail when using CCache for reasons that aren't
# clear ("precompiled header modified") -- for now, just ignore CCache for them
if builder_type.has_ccache() and not builder_type.sanitizer_preset():
cmake_definitions['Halide_CCACHE_BUILD'] = 'ON'
if builder_type.arch == 'arm' and builder_type.bits == 32 and builder_type.os == 'linux':
# Halide always uses its own toolchain files, from the cmake/ subdir.
cmake_definitions['CMAKE_TOOLCHAIN_FILE'] = get_halide_source_path('cmake', 'toolchain.linux-arm32.cmake')
if builder_type.os == 'windows':
cmake_definitions['CMAKE_TOOLCHAIN_FILE'] = Interpolate('%(prop:VCPKG_ROOT)s/scripts/buildsystems/vcpkg.cmake')
# CMake on Windows can't reliably find our pip-installed PyBind11 unless we set CMAKE_PREFIX_PATH to point to is
cmake_definitions['pybind11_DIR'] = Interpolate('%(prop:VIRTUAL_ENV)s/share/cmake/pybind11')
# Don't bother with anything Python-related if we are targeting WebAssembly.
if "wasm" in halide_target:
cmake_definitions['WITH_PYTHON_BINDINGS'] = 'OFF'
# TODO: HALIDE_NODE_JS_PATH is only necessary until EMSDK updates their built-in version of Node
# to v16.13+; when that is done, remove this definition.
if builder_type.handles_wasm() and halide_target.startswith("wasm-"):
cmake_definitions['CMAKE_TOOLCHAIN_FILE'] = Interpolate(
'%(prop:EMSDK)s/upstream/emscripten/cmake/Modules/Platform/Emscripten.cmake')
cmake_definitions['NODE_JS_EXECUTABLE'] = Property('HALIDE_NODE_JS_PATH')
if wasm_jit == 'v8':
cmake_definitions['WITH_WABT'] = 'OFF'
cmake_definitions['WITH_V8'] = 'ON'
cmake_definitions['V8_INCLUDE_PATH'] = '/home/halidenightly/v8/v8/include'
cmake_definitions['V8_LIB_PATH'] = \
'/home/halidenightly/v8/v8/out/x64.release.static/obj/libv8_monolith.a'
elif wasm_jit == 'wabt':
cmake_definitions['WITH_WABT'] = 'ON'
cmake_definitions['WITH_V8'] = 'OFF'
cmake_definitions['V8_INCLUDE_PATH'] = ''
cmake_definitions['V8_LIB_PATH'] = ''
else:
assert False, "Unknown wasm jit " + str(wasm_jit)
if builder_type.handles_webgpu() and "webgpu" in halide_target:
# TODO(srj): remove these after https://github.com/halide/Halide/pull/7422 lands
cmake_definitions['WEBGPU_NODE_BINDINGS'] = Property('HL_WEBGPU_NODE_BINDINGS')
cmake_definitions['WEBGPU_NATIVE_LIB'] = Property('HL_WEBGPU_NATIVE_LIB')
if builder_type.handles_hexagon() and 'hvx' in halide_target:
cmake_definitions['Halide_BUILD_HEXAGON_REMOTE_RUNTIME'] = 'ON'
return cmake_definitions
def get_cmake_build_command(builder_type, build_dir, targets=None):
cmd = ['ninja',
'-C', build_dir,
'-j', Property('WORKER_BUILD_PARALLELISM')]
# TODO(srj): for debugging apps/c_backend
if builder_type.os == "windows":
cmd.append('-v')
if targets:
cmd.extend(targets)
return cmd
def get_llvm_cmake_definitions(builder_type):
# Keep sorted!
definitions = {
'CMAKE_BUILD_TYPE': 'Release',
'CMAKE_INSTALL_PREFIX': get_llvm_install_path(builder_type),
'LLVM_BUILD_32_BITS': ('ON' if builder_type.bits == 32 else 'OFF'),
'LLVM_ENABLE_ASSERTIONS': 'ON',
'LLVM_ENABLE_BINDINGS': 'OFF',
'LLVM_ENABLE_CURL': 'OFF',
'LLVM_ENABLE_DIA_SDK': 'OFF',
'LLVM_ENABLE_HTTPLIB': 'OFF',
'LLVM_ENABLE_IDE': 'OFF',
'LLVM_ENABLE_LIBXML2': 'OFF',
'LLVM_ENABLE_OCAMLDOC': 'OFF',
'LLVM_ENABLE_RTTI': 'ON',
'LLVM_ENABLE_TERMINFO': 'OFF',
'LLVM_ENABLE_WARNINGS': 'OFF', # silence them, it's not like we're gonna fix them
'LLVM_ENABLE_ZLIB': 'ON',
'LLVM_ENABLE_ZSTD': 'OFF',
'LLVM_INCLUDE_BENCHMARKS': 'OFF',
'LLVM_INCLUDE_EXAMPLES': 'OFF',
'LLVM_INCLUDE_TESTS': 'OFF',
'LLVM_TARGETS_TO_BUILD': 'X86;ARM;NVPTX;AArch64;Hexagon;PowerPC;WebAssembly',
}
if builder_type.bits == 32:
definitions['CMAKE_FIND_ROOT_PATH_MODE_INCLUDE'] = "ONLY"
definitions['CMAKE_FIND_ROOT_PATH_MODE_LIBRARY'] = "ONLY"
definitions['CMAKE_FIND_ROOT_PATH_MODE_PACKAGE'] = "ONLY"
definitions['CMAKE_FIND_ROOT_PATH_MODE_PROGRAM'] = "NEVER"
if builder_type.handles_riscv():
definitions['LLVM_TARGETS_TO_BUILD'] += ";RISCV"
if builder_type.handles_sanitizers():
definitions['LLVM_ENABLE_RUNTIMES'] = "compiler-rt;libcxx;libcxxabi;libunwind"
# We only need clang-tools-extra if building for sanitizers -- skip them
# if the builder will never do this, to save time & space.
definitions['LLVM_ENABLE_PROJECTS'] = "clang;lld;clang-tools-extra"
else:
definitions['LLVM_ENABLE_PROJECTS'] = "clang;lld"
# Some versions of GCC will flood the output with useless warnings about
# "parameter passing for argument of type foo changed in GCC 7.1" unless
# we disable this warning. This isn't *essential*, but it makes looking at the
# LLVM build logs much less noisy.
if builder_type.os!= 'windows':
definitions['CMAKE_CXX_FLAGS'] = '-Wno-psabi'
if builder_type.arch == 'arm' and builder_type.bits == 32 and builder_type.os == 'linux':
# LLVM doesn't provide a toolchain file, and we can't/don't-want-to rely on the
# one from Halide, so we'll rely on one that the buildbot downloads to each worker.
# (Note that this assumes the file has been properly downloaded.)
definitions['CMAKE_TOOLCHAIN_FILE'] = get_llvm_toolchains_path('toolchain.linux-arm32.cmake')
definitions['LLVM_TARGET_ARCH'] = 'ARM'
definitions['LLVM_DEFAULT_TARGET_TRIPLE'] = 'arm-linux-gnueabihf'
if builder_type.arch == 'x86' and builder_type.bits == 32 and builder_type.os == 'linux':
definitions['CMAKE_FIND_ROOT_PATH'] = '/usr/lib/i386-linux-gnu'
definitions['CMAKE_FIND_ROOT_PATH_MODE_LIBRARY'] = 'ONLY'
# This disables an XCode setting that can get enabled by default
# when assertions are enabled, but only if your XCode install has
# certain frameworks installed; we want it disabled, as it prevents
# prebuilt libraries from working properly with XCode 9.x.
if builder_type.os == 'osx':
definitions['LLVM_ENABLE_SUPPORT_XCODE_SIGNPOSTS'] = 'FORCE_OFF'
# We never build LLVM with sanitizers enabled
if builder_type.has_ccache():
definitions['LLVM_CCACHE_BUILD'] = 'ON'
return definitions
def extend_property(dict_name, **kwargs):
@renderer
def render(props):
table = props.getProperty(dict_name, default={})
table.update(kwargs)
return table
return render
def add_env_setup_step(factory, builder_type, enable_ccache=False):
if builder_type.os == 'windows':
# do this first because the SetPropertyFromCommand step isn't smart enough to merge
get_msvc_config_steps(factory, builder_type)
cxx = 'c++'
cc = 'cc'
ld = 'ld'
if builder_type.os == 'linux':
cc = 'gcc-9'
cxx = 'g++-9'
ld = 'ld'
if builder_type.arch == 'x86' and builder_type.bits == 32:
cxx +='-m32'
cc +='-m32'
ld +='-melf_i386'
elif builder_type.os == 'windows':
cxx = 'cl.exe'
cc = 'cl.exe'
# This is only necessary (or desirable) for make-based builds of Halide;
# CMake-based builds handle it via Halide_CCACHE_BUILD and/or LLVM_CCACHE_BUILD
if enable_ccache and builder_type.has_ccache():
cxx = 'ccache'+ cxx
cc = 'ccache'+ cc
env = {
'CC': cc,
'CXX': cxx,
'LD': ld,
}
# TODO: HALIDE_NODE_JS_PATH is only necessary until EMSDK updates their built-in version of Node
# to v16.13+; when that is done, remove HALIDE_NODE_JS_PATH here and on the workers.
factory.addStep(SetPropertiesFromEnv(name='Read worker environment',
variables=['EMSDK',
'HALIDE_NODE_JS_PATH',
'HL_HEXAGON_TOOLS',
'HL_WEBGPU_NATIVE_LIB',
'HL_WEBGPU_NODE_BINDINGS',
'LD_LIBRARY_PATH',
'VIRTUAL_ENV',
'VCPKG_ROOT']))
vcpkg_root = Property('VCPKG_ROOT', default=None)
if builder_type.handles_hexagon():
# Environment variables for testing Hexagon DSP
hexagon_remote_bin = get_halide_build_path('src', 'runtime', 'hexagon_remote')
# Assume that HL_HEXAGON_TOOLS points to the correct directory (it might not be /usr/local/hexagon)
env['HL_HEXAGON_SIM_REMOTE'] = Transform(os.path.join,
hexagon_remote_bin,
'hexagon',
'bin',
'hexagon_sim_remote')
env['HL_HEXAGON_SIM_CYCLES'] = '1'
env['LD_LIBRARY_PATH'] = [
# no, this will cause a failure at runtime if LD_LIBRARY_PATH is unset (or empty!)
# Property('LD_LIBRARY_PATH'),
hexagon_remote_bin,
Interpolate('%(prop:HL_HEXAGON_TOOLS)s/lib/iss'),
]
env['HEXAGON_SDK_ROOT'] = Interpolate('%(prop:HL_HEXAGON_TOOLS)s/../../../..')
# Force Vulkan validation layer on to catch any driver related errors
#... this enables a suite of diagnostic checks implemented in the Vulkan SDK
# that verifies the driver and application conform to the Vulkan runtime
# specification. This should not be enabled in production due to the overhead,
# but we want to catch any changes in driver behaviour and/or spurious errors that
# may be hard to find (but easy to fix if the right error messages are present)
if builder_type.has_nvidia() and builder_type.handles_vulkan():
env['VK_INSTANCE_LAYERS'] = "VK_LAYER_KHRONOS_validation"
if builder_type.os == 'osx':
# Environment variable for turning on Metal API validation
# This will have no effect on CPU testing, just Metal testing
env['METAL_DEVICE_WRAPPER_TYPE'] = '1'
if builder_type.os == 'windows':
vcpkg_root = Property('VCPKG_ROOT', default='C:/vcpkg')
env['VCPKG_ROOT'] = vcpkg_root
# Current NVidia drivers on our Windows buildbots can corrupt their own
# cache, leading to many spurious failures. Disable the cache
# for now, pending NVidia investigation.
env['CUDA_CACHE_DISABLE'] = '1'
# We don't ever want an Abort, Retry, Ignore dialog in our tests
env['HL_DISABLE_WINDOWS_ABORT_DIALOG'] = '1'
# Leaving this here (but commented out) in case we need to temporarily
# disable leak-checking in the future.
#
# if builder_type.handles_sanitizers():
# # Disable leak-checking (for now) for ASAN builds
# env['ASAN_OPTIONS'] = 'detect_leaks=0'
factory.addStep(SetProperties(
name='Initialize environment',
properties=dict(
env=extend_property('env', **env),
VCPKG_ROOT=vcpkg_root)))
@renderer
def get_llvm_latest_commit(props):
# Note that this property is a dict for multi-codebase builds,
# but just a string for single-codebase builds.
build_dir = props.getProperty('builddir')
assert not isinstance(build_dir, dict)
build_dir = build_dir.replace('\\', '/')
# Can't use got_revision here since we may be using git directly.
return "cd %s/llvm-project && git log -1 > %s/llvm-install/llvm_latest_commit.txt" % (build_dir, build_dir)
def add_llvm_steps(factory, builder_type, clean_rebuild):
build_dir = get_llvm_build_path()
install_dir = get_llvm_install_path(builder_type)
llvm_name = str(LLVM_BRANCHES[builder_type.llvm_branch].version.major)
if clean_rebuild:
factory.addStep(RemoveDirectory(name="Remove LLVM %s Build Dir" % llvm_name,
locks=[performance_lock.access('counting')],
dir=build_dir,
haltOnFailure=False))
factory.addStep(RemoveDirectory(name="Remove LLVM %s Install Dir" % llvm_name,
locks=[performance_lock.access('counting')],
dir=install_dir,
haltOnFailure=False))
factory.addStep(MakeDirectory(name="Make LLVM %s Build Dir" % llvm_name,
locks=[performance_lock.access('counting')],
dir=build_dir,
haltOnFailure=False))
factory.addStep(MakeDirectory(name="Make LLVM %s Install Dir" % llvm_name,
locks=[performance_lock.access('counting')],
dir=install_dir,
haltOnFailure=False))
factory.addStep(
CMake(name='Configure LLVM %s' % llvm_name,
locks=[performance_lock.access('counting')],
haltOnFailure=True,
env=Property('env'),
workdir=build_dir,
path=get_llvm_source_path('llvm'),
generator=get_cmake_generator(builder_type),
definitions=get_llvm_cmake_definitions(builder_type),
options=get_llvm_cmake_options(builder_type)))
factory.addStep(
ShellCommand(name='Build LLVM %s' % llvm_name,
locks=[performance_lock.access('counting')],
haltOnFailure=True,
workdir=build_dir,
env=Property('env'),
command=get_cmake_build_command(builder_type, build_dir, targets=['install'])))
# Save the SHA of LLVM's head rev into ${INSTALL}/llvm_version.txt,
# just to make debugging simpler
#
factory.addStep(
ShellCommand(name='Stamp Install Directory for LLVM %s' % llvm_name,
locks=[performance_lock.access('counting')],
haltOnFailure=True,
workdir=get_llvm_source_path(),
env=Property('env'),
command=get_llvm_latest_commit))
def add_halide_cmake_build_steps(factory, builder_type):
# Always do a clean build for Halide
source_dir = get_halide_source_path()
build_dir = get_halide_build_path()
install_dir = get_halide_install_path(builder_type)
factory.addStep(RemoveDirectory(name="Remove Halide Build Dir",
locks=[performance_lock.access('counting')],
dir=build_dir,
haltOnFailure=False))
factory.addStep(MakeDirectory(name="Make Halide Build Dir",
locks=[performance_lock.access('counting')],
dir=build_dir,
haltOnFailure=False))
factory.addStep(RemoveDirectory(name="Remove Halide Install Dir",
locks=[performance_lock.access('counting')],
dir=install_dir,
haltOnFailure=False))
factory.addStep(MakeDirectory(name="Make Halide Install Dir",
locks=[performance_lock.access('counting')],
dir=install_dir,
haltOnFailure=False))
factory.addStep(CMake(name='Configure Halide',
description='Configure Halide',
locks=[performance_lock.access('counting')],
haltOnFailure=True,
workdir=build_dir,
env=Property('env'),
path=source_dir,
generator=get_cmake_generator(builder_type),
definitions=get_halide_cmake_definitions(builder_type),
options=get_halide_cmake_options(builder_type, build_dir)))
factory.addStep(
ShellCommand(name='Build Halide',
description='Build Halide',
|
4dde161d25ed41154e13b94cc9640c6aac055f87
|
Python
|
<|begin_of_text|># coding: utf-8
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=too-many-lines
"""Constants."""
UNK_TOKEN = '<unk> '
BOS_TOKEN = '<bos>'
EOS_TOKEN = '<eos>'
PAD_TOKEN = '<pad>'
UNK_IDX = 0 # This should not be changed as long as serialized token
# embeddings redistributed on S3 contain an unknown token.
# Blame this code change and see commit for more context.
LARGE_POSITIVE_FLOAT = 1e18
LARGE_NEGATIVE_FLOAT = -LARGE_POSITIVE_FLOAT
GLOVE_NPZ_SHA1 = \
{'glove.42B.300d': ('glove.42B.300d.npz',
'7deee8f4860744db53ed9e50892effe9883e6d89'),
'glove.6B.100d': ('glove.6B.100d.npz',
'01f80f202fcabcc3e0804898349087bfc191dd1c'),
'glove.6B.200d': ('glove.6B.200d.npz',
'5e6e2bdab346c257f88d80d215d518e680d86e32'),
'glove.6B.300d': ('glove.6B.300d.npz',
'1db264aa936be62f055dfb72854204450bdf4399'),
'glove.6B.50d': ('glove.6B.50d.npz',
'aa16be8d184399d2199f83fd62586f2c30497bfa'),
'glove.840B.300d': ('glove.840B.300d.npz',
'b4ba390c1154736e07c0e67d9180935f5930e83c'),
'glove.twitter.27B.100d': ('glove.twitter.27B.100d.npz',
'0f7b82c223451d0002f79ba23596983cdbe0e2b1'),
'glove.twitter.27B.200d': ('glove.twitter.27B.200d.npz',
'41cc2d26f58a54622ce96bf6c8434360ab524f20'),
'glove.twitter.27B.25d': ('glove.twitter.27B.25d.npz',
'9f563d2f296995598cc46812b2fda05ad4c3c879'),
'glove.twitter.27B.50d': ('glove.twitter.27B.50d.npz',
'ce9959c056f2a0a780c468feeb4f823af51630e9')}
FAST_TEXT_NPZ_SHA1 = \
{'crawl-300d-2M': ('crawl-300d-2M.npz',
'9dd611a1fe280c63050cd546d3595400fc0eede4'),
'wiki.aa': ('wiki.aa.npz',
'48f163b80eb37f1806142169d3d4c05cf75b7339'),
'wiki.ab': ('wiki.ab.npz',
'860ceff119dd27e5b701b605879037c1310cbc3e'),
'wiki.ace': ('wiki.ace.npz',
'62938287464040491719f56a6f521f8f808beee8'),
'wiki.ady': ('wiki.ady.npz',
'646843afa260d018ed711df3f1ca9c3e000447b6'),
'wiki.af': ('wiki.af.npz',
'7b14cd27690b67fea318d0bac2283c16430680e2'),
'wiki.ak': ('wiki.ak.npz',
'20f309adad1c45958c97b6055d5838e05bbaea72'),
'wiki.als': ('wiki.als.npz',
'a8b03aa133c4f7da12fc27c2b167b7918b1e9805'),
'wiki.am': ('wiki.am.npz',
'ed3dd10cea64737f7a1623612ee099df9dc19f66'),
'wiki.ang': ('wiki.ang.npz',
'8efe64706d9d6b8eae38b2c7ff0b277e20592bc7'),
'wiki.an': ('wiki.an.npz',
'168046283c719ab96a29b1abae2e25a6575c7be8'),
'wiki.arc': ('wiki.arc.npz',
'049021b7decea4bc009b12936e56b4dbf5b760e7'),
'wiki.ar': ('wiki.ar.npz',
'7e325e1e98dfcdc9368d2ebe40ee834a2ed44912'),
'wiki.arz': ('wiki.arz.npz',
'7d851c2c7be3ee6f7fd896de7b76ea08e3fb08b0'),
'wiki.as': ('wiki.as.npz',
'01d38c29cd4bd99c1a8534abc058822da14a5b9c'),
'wiki.ast': ('wiki.ast.npz',
'9c9846ba5084505a0adea89c95c66e04efbf5ce9'),
'wiki.av': ('wiki.av.npz',
'7ef6a920c364638504e673cfde5f7675503fa81e'),
'wiki.ay': ('wiki.ay.npz',
'c1202e110930e3902397f5cb64a8359e013b469f'),
'wiki.azb': ('wiki.azb.npz',
'10351b7ef14ec2cb610d290cb6a3f6987ef5d8b3'),
'wiki.az': ('wiki.az.npz',
'74257c3bcd533a606afae509ea835dc036d61546'),
'wiki.ba': ('wiki.ba.npz',
'4a2857ed694d66864df562b376c2fa12fcb03646'),
'wiki.bar': ('wiki.bar.npz',
'e65c6b7e9ff83798d1eea05d166148837d53e615'),
'wiki.bat_smg': ('wiki.bat_smg.npz',
'6420584ae28ba6c9dd145fea8f096243d457c2d8'),
'wiki.bcl': ('wiki.bcl.npz',
'33606c970ab336b678393e2bdb8af2116d11cf7b'),
'wiki.be': ('wiki.be.npz',
'84487d341e333344cf71bc12c7a205d923762498'),
'wiki.bg': ('wiki.bg.npz',
'56f2a175b1a1d1a9cf9f1cea277cd0b46ffd7f66'),
'wiki.bh': ('wiki.bh.npz',
'07473989853a344a41aaa18f41030dc56d0d01c7'),
'wiki.bi': ('wiki.bi.npz',
'08adfa3c9ef3016d30ef69ea539d217ff67eda09'),
'wiki.bjn': ('wiki.bjn.npz',
'998a551283222931d3a26922308449950bfa3ec7'),
'wiki.bm': ('wiki.bm.npz',
'454ff9fbd4790e4a076d9a2087a51da28aa1332f'),
'wiki.bn': ('wiki.bn.npz',
'1f36f6f39c9a9b33bb8035c9a4dc7e04933604fd'),
'wiki.bo': ('wiki.bo.npz',
'b9fe87318428de0a7790de175b5fec80c5af482d'),
'wiki.bpy': ('wiki.bpy.npz',
'5c7853173d27e2c018c24eca69de8d5f34511b0d'),
'wiki.br': ('wiki.br.npz',
'7aa66a2034fbfaa1d39e637385d48610238797c9'),
'wiki.bs': ('wiki.bs.npz',
'a019a4677677c2e9e4d899326b2b6c15ad6c011a'),
'wiki.bug': ('wiki.bug.npz',
'09ae3477941d7a99d1df494368d7efb0b2c18913'),
'wiki.bxr': ('wiki.bxr.npz',
'b832c691b8ddd95896c052d3d15e1f98d72068d5'),
'wiki.ca': ('wiki.ca.npz',
'391e0d4daad08649251274fa1cc2a5f49c7728b1'),
'wiki.cbk_zam': ('wiki.cbk_zam.npz',
'02e57a763bc9f9eadaba57953383dd12a0a78a37'),
'wiki.cdo': ('wiki.cdo.npz',
'd6e8f422327e8b2273f1f2662d793707ece6695d'),
'wiki.ceb': ('wiki.ceb.npz',
'23bc0bb9aeaa57dff35092766941a866de142aae'),
'wiki.ce': ('wiki.ce.npz',
'182b2a889256119a6d379d501c55c7621e5855db'),
'wiki.ch': ('wiki.ch.npz',
'82dd77512fcb463481f43c9cef3507e2baa90d7b'),
'wiki.cho': ('wiki.cho.npz',
'b0b620fc2442d1a6e2440e71a424861c80175f0c'),
'wiki.chr': ('wiki.chr.npz',
'3d62c6b95c5af46abd6234426ae760cca65d5bd0'),
'wiki.chy': ('wiki.chy.npz',
'34a28a22da79aebc100e3714b825c95c8d5f54a3'),
'wiki.ckb': ('wiki.ckb.npz',
'ad19461e4be583d08b7693ff5b1e9d590ed41add'),
'wiki.co': ('wiki.co.npz',
'fa60d9f0e79f1c7e15f381aef983a0f4f31c05a8'),
'wiki.crh': ('wiki.crh.npz',
'540270ba6edd9d7b2f7efca52b3b407524ac67d1'),
'wiki.cr': ('wiki.cr.npz',
'f06b77465a38ec960d7d5a7554b848c37e945c76'),
'wiki.csb': ('wiki.csb.npz',
'b8b28559cf2541341af98e2aa755856765bdeabf'),
'wiki.cs': ('wiki.cs.npz',
'19881e931fe06abf341450f00c342d364313e232'),
'wiki.cu': ('wiki.cu.npz',
'731e0d00abd53bc2a8eb6cf37f6ab883cff34e15'),
'wiki.cv': ('wiki.cv.npz',
'e60034fcffb7dfef7b236ddba1194c3aa20b7967'),
'wiki.cy': ('wiki.cy.npz',
'5a0fb967b5556f007c0d5065f951a3d3b1c1005a'),
'wiki.da': ('wiki.da.npz',
'd06258014ba2c7450bc2d55edfdf1731433e42e5'),
'wiki.de': ('wiki.de.npz',
'a21694dfd2af63bd7bb00f0b60b28e88bd1153f1'),
'wiki.diq': ('wiki.diq.npz',
'4f6c77a86b39834a7130419967759afd8cc26b84'),
'wiki.dsb': ('wiki.dsb.npz',
'e74f1d346a8db96987bff0c33ee5f886907c380a'),
'wiki.dv': ('wiki.dv.npz',
'5d6fe6f0eec2e7704121d5aba03b4edbb28af873'),
'wiki.dz': ('wiki.dz.npz',
'77c639d36d0355b2de5adead7996eae342b852a6'),
'wiki.ee': ('wiki.ee.npz',
'4b5a76127d57515d3e8a76787cdefde5856b754a'),
'wiki.el': ('wiki.el.npz',
'a00bcb97e7898931196a1c69f7a492e5b6202661'),
'wiki.eml': ('wiki.eml.npz',
'b475d626b3d97e7a68c02827fdc7900599e838c6'),
'wiki.en': ('wiki.en.npz',
'ad5ec6d49db6c6fe76b8e85ff05d34e5d0e1eb6a'),
'wiki.eo': ('wiki.eo.npz',
'18049b0010520d13e676f5a82e8bb90153d99003'),
'wiki.es': ('wiki.es.npz',
'a6d192ba7d82d762f8367e75ca951aad4d11e410'),
'wiki.et': ('wiki.et.npz',
'4beb7025cf88f1aa62d025b187f0cb09aee61858'),
'wiki.eu': ('wiki.eu.npz',
'5e1a8197e35f20a2476798bbb935b4c131289c4f'),
'wiki.ext': ('wiki.ext.npz',
'049b2d1b0a8b102b45907cf487cac30aa294e0a0'),
'wiki.fa': ('wiki.fa.npz',
'81ed274997c87ef87d73d25e166ca06272ce426f'),
'wiki.ff': ('wiki.ff.npz',
'4867dc74cd53ca0b0f769af4fa1ea420406b59bf'),
'wiki.fi': ('wiki.fi.npz',
'6d1291b854045179f8171ac7d62ede7d8ac159a2'),
'wiki.fiu_vro': ('wiki.fiu_vro.npz',
'dd87806d9dc8833fa0e21e35a50815ebdbaa6c8b'),
'wiki.fj': ('wiki.fj.npz',
'cf5c31b0a69276f5dd18ab738ed92444abaeb755'),
'wiki.fo': ('wiki.fo.npz',
'ffc19807d528af000861a94cfb8097bd686e14fc'),
'wiki.fr': ('wiki.fr.npz',
'8f06d5dbe3cf7214354fe9b2f6eca0ef7419f063'),
'wiki.frp': ('wiki.frp.npz',
'c8b200ae592478d3cd0bfaafcd7aa19de8a3bfe5'),
'wiki.frr': ('wiki.frr.npz',
'fa5e5c39ea2a45793c679eacea290a35e37405ea'),
'wiki.fur': ('wiki.fur.npz',
'a61a8940d059f25000e3fe23933e5ed0d37e65d3'),
'wiki.fy': ('wiki.fy.npz',
'46f9f41bdf6f4fb8e27a753290413d745465963b'),
'wiki.gag': ('wiki.gag.npz',
'49fb01230e6803544122d47ab7d3fe694d1444f2'),
'wiki.gan': ('wiki.gan.npz',
'716b7b26acc15975f30caf3c6effa111516fcca5'),
'wiki.ga': ('wiki.ga.npz',
'ea934bc1fdc1acf6caf9ac746c6c499251f1fdee'),
'wiki.gd': ('wiki.gd.npz',
'597017b5a32d933f194595d3656f858e37e70a62'),
'wiki.glk': ('wiki.glk.npz',
'91a5834658bc2d48714e8807ef24efb79567b4b5'),
'wiki.gl': ('wiki.gl.npz',
'2fa8e48d6ae1e9c9d542eb3f2156cf9e359e66c2'),
'wiki.gn': ('wiki.gn.npz',
'e359eef3928e1f1b5d8fcf0ea532e8794c66289a'),
'wiki.gom': ('wiki.gom.npz',
'8cd361481c23f7545cc2bd8f1bf22aa7400edd4d'),
'wiki.got': ('wiki.got.npz',
'd05daf105611150695e61775fdff2c500b36be3f'),
'wiki.gu': ('wiki.gu.npz',
'0ce175c5fc39bab4032892f70c9d2bb850af0f4a'),
'wiki.gv': ('wiki.gv.npz',
'2c573f873d607831ff01b64603c17b8db79bd7e1'),
'wiki.hak': ('wiki.hak.npz',
'e6048727799cdf149f5c50037e0fc59300d33a94'),
'wiki.ha': ('wiki.ha.npz',
'f18ea7286bbd390c5470896b2c99cb1adc740064'),
'wiki.haw': ('wiki.haw.npz',
'18bcd85d2e06b1b889f0835fc5b62697fdf32d72'),
'wiki.he': ('wiki.he.npz',
'76915ff167b6ecb7b7e22ff0ca46914a55d344af'),
'wiki.hif': ('wiki.hif.npz',
'12153aaf98d76d5502ab77a27cd0b9a539f61513'),
'wiki.hi': ('wiki.hi.npz',
'249666a598991f6ec147954c6af9e531fd1cd94e'),
'wiki.ho': ('wiki.ho.npz',
'3f804fd69780c0789708b56ea9d48715f8e38f26'),
'wiki.hr': ('wiki.hr.npz',
'9a3de28e69f97048bfb480b4f83eaab6149f66ad'),
'wiki.hsb': ('wiki.hsb.npz',
'7070bf64e13299dd66ac0e9f8e24011a56b6bfe8'),
'wiki.ht': ('wiki.ht.npz',
'a607093d511afeb584d02dc676bc5a27eff66287'),
'wiki.hu': ('wiki.hu.npz',
'9b2c4750daf1bcf39768572e874b5afda0e2f0bc'),
'wiki.hy': ('wiki.hy.npz',
'ec0461a102a6fb00bd324f66cefd3c8d55a7093a'),
'wiki.hz': ('wiki.hz.npz',
'5dfb8afbdae6b4148c3e55ab459c56a74b46b463'),
'wiki.ia': ('wiki.ia.npz',
'4cfaaf053b9513bbf5b2423258c0f01d20256de6'),
'wiki.id': ('wiki.id.npz',
'bace396bb9941cc9e5b2e5f5a19be6db833c5fd4'),
'wiki.ie': ('wiki.ie.npz',
'1bae7256c2e763ce6d692d1c0a603d99a8b22826'),
'wiki.ig': ('wiki.ig.npz',
'23128e54a5e143891d392d621723bad9cfc8cf7b'),
'wiki.ii': ('wiki.ii.npz',
'54bc16d05da512481865a89ecf30260b0acc04dc'),
'wiki.ik': ('wiki.ik.npz',
'f8015227e893d2375699b7d132b306ba381f02ac'),
'wiki.ilo': ('wiki.ilo.npz',
'185a11f81bd5d24a34558dda81ee4735f5ba150b'),
'wiki.io': ('wiki.io.npz',
'ddf8180a90aa6ee5be93a2582cc99c535f21363e'),
'wiki.is': ('wiki.is.npz',
'968f8dd2a093b279a6f7aaa734008454bf51d724'),
'wiki.it': ('wiki.it.npz',
'fdfb857a309b2c3d29482bb5cc55f21b858d2e6f'),
'wiki.iu': ('wiki.iu.npz',
'fa8896730bd6c24c3473daa22116d1016294e7f7'),
'wiki.jam': ('wiki.jam.npz',
'a8f0d0b99c89ace0a6401b8fcda261d06065faaf'),
'wiki.ja': ('wiki.ja.npz',
'8d42e5a40e4d1d8645b2d80b873a65cadcf68b5c'),
'wiki.jbo': ('wiki.jbo.npz',
'145fc999ab004b348cf9bf445f0a93a7a145308b'),
'wiki.jv': ('wiki.jv.npz',
'66978770bf06e42414395cf5fd8c596044d72bec'),
'wiki.kaa': ('wiki.kaa.npz',
'624a640ecb9901b2aba2e9f44ab615146ecb2862'),
'wiki.kab': ('wiki.kab.npz',
'e97f93b6ba65e95c85b7541932cf53c5ad9eb896'),
'wiki.ka': ('wiki.ka.npz',
'1ca8376e1e0cbd58001c1b51a2d488a2874a6743'),
'wiki.kbd': ('wiki.kbd.npz',
'f2d2a05b06723ac549784ad5470d84f5742a1352'),
'wiki.kg': ('wiki.kg.npz',
'fa7f6d5f660a173a3e75342d449980eedcdc789e'),
'wiki.ki': ('wiki.ki.npz',
'21a8c7c616c0050c51c288861f3423f313e4f634'),
'wiki.kj': ('wiki.kj.npz',
'f3c347509a0d81f4f7fdbb8b22889b8d76e5014e'),
'wiki.kk': ('wiki.kk.npz',
'bc24a3289e1c1e18e16b6789c2f9f92af1e73071'),
'wiki.kl': ('wiki.kl.npz',
'b8b7e7359f067836e2be2ecfe9f35a820b00fe1d'),
'wiki.km': ('wiki.km.npz',
'e053799fd01463808432dc035bef3e36620e2f36'),
'wiki.kn': ('wiki.kn.npz',
'2849a0a8b3453e9bf6af05d4c7bd3db881dd1068'),
'wiki.koi': ('wiki.koi.npz',
'a9b02e9bd41833bcd54769f94626019c03f29997'),
'wiki.ko': ('wiki.ko.npz',
'764d9896e74b5a26c6884d48bce3bed8ed3a7822'),
'wiki.krc': ('wiki.krc.npz',
'bfe39598c718f1cc95909db7544b3214b308a97c'),
'wiki.kr': ('wiki.kr.npz',
'1e6af853d4a8ea7830e116eb9b61ac5d7d9a315c'),
'wiki.ksh': ('wiki.ksh.npz',
'66cd0e3e0a0b0282a13960571ebe7cddd7706bf2'),
'wiki.ks': ('wiki.ks.npz',
'85f1adaa05b854df4dede745a1aaab3836e60770'),
'wiki.ku': ('wiki.ku.npz',
'faf90584e5a45e6d0f9eeb88399b82abe037d584'),
'wiki.kv': ('wiki.kv.npz',
'9f2b41822013a412da9c99fac06eed8be03ca192'),
'wiki.kw': ('wiki.kw.npz',
'3eed8a8fc97a2fc79241b8474a458c98d00fc897'),
'wiki.ky': ('wiki.ky.npz',
'0116ff90f10a6c0728e1ea86d8a44896ea83270a'),
'wiki.lad': ('wiki.lad.npz',
'5af2015b3d1c5e8563f0e92721580988ebe2ce50'),
'wiki.la': ('wiki.la.npz',
'7143303a3ea13c7668eb90ea6e3d2ca69857a3be'),
'wiki.lbe': ('wiki.lbe.npz',
'f206a3c35a184ba5d2b32ee68640eadf66c847da'),
'wiki.lb': ('wiki.lb.npz',
'143dc6337f3690379282034c460c613d7f144923'),
'wiki.lez': ('wiki.lez.npz',
'b29a680decc6b29f24e8eb9e4f8e11e3419d45f1'),
'wiki.lg': ('wiki.lg.npz',
'866640ce62cedbc1d453b7ea3c289c291ad76e13'),
'wiki.lij': ('wiki.lij.npz',
'0dcd3d7009ae89b1016ca6cdb99a9f0d70bc4baf'),
'wiki.li': ('wiki.li.npz',
'4666b3c238256d7b7623a136db19b8b9f4754734'),
'wiki.lmo': ('wiki.lmo.npz',
'ac89fa7cfe0675950bcb31c66bf3f88a3cfc98f0'),
'wiki.ln': ('wiki.ln.npz',
'fba158719944aabe58e0002a90be0ed77e11702d'),
'wiki.lo': ('wiki.lo.npz',
'1e113e340a8a93d385e14502c9c4e3bcdf6c3101'),
'wiki.lrc': ('wiki.lrc.npz',
'42cb755f398fba6f0da7949c91e92b55654bd482'),
'wiki.ltg': ('wiki.ltg.npz',
'182f75859e228d1162215f28fe7f2dca127624a4'),
'wiki.lt': ('wiki.lt.npz',
'66aa944bd2e777cb82d6d59b1f2f837b6c48cb37'),
'wiki.lv': ('wiki.lv.npz',
'2be8f926da85694fa998bf79d80b61ebb8d67576'),
'wiki.mai': ('wiki.mai.npz',
'b8a9c36e2a0f1bb84a44dc762250d2a9007ef637'),
'wiki.map_bms': ('wiki.map_bms.npz',
'6f0394d6b3d08a946e3df4b9355efe94148f018a'),
'wiki.mdf': ('wiki.mdf.npz',
'774ee35334641db57f9ac9069961c5372a5d92e8'),
'wiki.mg': ('wiki.mg.npz',
'496c48ef668f08ce95ebb11ce1ce5026b52d935c'),
'wiki.mh': ('wiki.mh.npz',
'352edd84f99c5aa277a7306f6cacea1fab065ed3'),
'wiki.mhr': ('wiki.mhr.npz',
'dd78b27a674ac10411cdf74ac32f9391506b17e0'),
'wiki.min': ('wiki.min.npz',
'628b406441ab03bc8aa68195ada50bfdc8226f34'),
'wiki.mi': ('wiki.mi.npz',
'754127b473861cd4f9ae034c9f527a34827b1f00'),
'wiki.mk': ('wiki.mk.npz',
'b09fed4f56c296f13c4020ef1fec498382a38b73'),
'wiki.ml': ('wiki.ml.npz',
'02fb55d97ca2f0408f0e7e8dd6a661bbc3319a2a'),
'wiki.mn': ('wiki.mn.npz',
'08b2c45689aa5d9ec49df96dc7c777ce9b9a0b4b'),
'wiki.mo': ('wiki.mo.npz',
'638c2e8bd2352fd52921b9ae62f578b8357bab49'),
'wiki.mrj': ('wiki.mrj.npz',
'ec5cf1f4fb8dfdca64d8172974e620eb8fa41626'),
'wiki.mr': ('wiki.mr.npz',
'074dd68c947c2f137a3e84b55012925f00213139'),
'wiki.ms': ('wiki.ms.npz',
'3dbe9e9d70251de8a374776ff1250a9c3103ee59'),
'wiki.mt': ('wiki.mt.npz',
'f5103998a68d1b178387417436a83123d44aba01'),
'wiki.multi.ar': ('wiki.multi.ar.npz',
'a010d1d81a465c56ebaf596b3e8e8795e7f0f8e3'),
'wiki.multi.bg': ('wiki.multi.bg.npz',
'c04018f3a600cee170f12a36cdd35b4727a2aade'),
'wiki.multi.ca': ('wiki.multi.ca.npz',
'eef52a0cf20c133ca9065de25f0702861a8cfa29'),
'wiki.multi.cs': ('wiki.multi.cs.npz',
'c5f547aa78c0e3d7dae67a0334d500bf2a86aa30'),
'wiki.multi.da': ('wiki.multi.da.npz',
'24374f2ee169b33327feeee46da31b0de1622fe4'),
'wiki.multi.de': ('wiki.multi.de.npz',
'2e6c119b345bebd34b56eaaf855d6703889b11f7'),
'wiki.multi.el': ('wiki.multi.el.npz',
'9d122beedb80a2e5334946641e5bafd32c01e76b'),
'wiki.multi.en': ('wiki.multi.en.npz',
'8c3c480b4cb2690304173713a646280613b244a8'),
'wiki.multi.es': ('wiki.multi.es.npz',
'483a22656e4fb2a01e9f4ef8156b261e780850ab'),
'wiki.multi.et': ('wiki.multi.et.npz',
'22498c7b91645a3874fa738b5cfb16bf98b6f97c'),
'wiki.multi.fi': ('wiki.multi.fi.npz',
'765a6f0b63777bff4ae6ca2b461c5889c03d6a70'),
'wiki.multi.fr': ('wiki.multi.fr.npz',
'decd9aacf600114b8a36072535c0309874a37c83'),
'wiki.multi.he': ('wiki.multi.he.npz',
'7eee940c1b85936f59122f4b1a166223dd946674'),
'wiki.multi.hr': ('wiki.multi.hr.npz',
'1673963416af088f8bf15576afb33d58115db35c'),
'wiki.multi.hu': ('wiki.multi.hu.npz',
'a1fbe6ededf3cbaa3eaa22dd8b20cce4b36cfc6d'),
'wiki.multi.id': ('wiki.multi.id.npz',
'6c3e721febb511ede7db7bf978d65769e4270f5c'),
'wiki.multi.it': ('wiki.multi.it.npz',
'fc5bfc11e0165e8d95c1708573dad5e456826c73'),
'wiki.multi.mk': ('wiki.multi.mk.npz',
'6cd50198355674f156fc863108d9bebf11cfabd9'),
'wiki.multi.nl': ('wiki.multi.nl.npz',
'4fa06b9230c95dfa5a9e9a5d80f1f5ba614d3cbf'),
'wiki.multi.no': ('wiki.multi.no.npz',
'63756168c1101e73fba8d1a5015f32b8892819e6'),
'wiki.multi.pl': ('wiki.multi.pl.npz',
'958b8e8bead965ba1bb1433e1c960fc3e12a10fb'),
'wiki.multi.pt': ('wiki.multi.pt.npz',
'22f07df1609d79b95344ee575ea43141424a1528'),
'wiki.multi.ro': ('wiki.multi.ro.npz',
'73180b3e382519004bf38ea7b86237aacbbe813a'),
'wiki.multi.ru': ('wiki.multi.ru.npz',
'3b2eb9163f35e90bf2ce1cd3c997b354d0c34f59'),
'wiki.multi.sk': ('wiki.multi.sk.npz',
'606a0c3ba9849070c6b6b8c22d920fdeed9a1385'),
'wiki.multi.sl': ('wiki.multi.sl.npz',
'3cfdab5043b8cfe1535cb6dbd4c9e68847ad5904'),
'wiki.multi.sv': ('wiki.multi.sv.npz',
'4f1494885b9a831e87cfa3c15f2204c4a73c0779'),
'wiki.multi.tr': ('wiki.multi.tr.npz',
'54f90d5ddb9a65538a41e37c5a67ed933a5e4885'),
'wiki.multi.uk': ('wiki.multi.uk.npz',
'500fd26b1d7a25b42458012e99f9f76642e0c787'),
'wiki.multi.vi': ('wiki.multi.vi.npz',
'3955809cceb300965c15f9372221417719bb0db8'),
'wiki.mus': ('wiki.mus.npz',
'a5f48934a3fa6eaf4929098046c93fc94dd6bcb6'),
'wiki.mwl': ('wiki.mwl.npz',
'8a5e2c272166f8a72c5694ca6c3104d5f49179ec'),
'wiki.my': ('wiki.my.npz',
'5e035aca16700d7d6695af8a6d3a88ac847aaeb7'),
'wiki.myv': ('wiki.myv.npz',
'd4cfaab70c640033e02c0fc0c5a3615ae836c569'),
'wiki.mzn': ('wiki.mzn.npz',
'ad09ac584ae455b5862b95125ef409360ae18445'),
'wiki.nah': ('wiki.nah.npz',
'2dc454ef37d059f2053af46cfa1f4f0ca939cba0'),
'wiki.na': ('wiki.na.npz',
'401f0f880eb7aa78d21348bc1e0a3953b3e81bf0'),
'wiki.nap': ('wiki.nap.npz',
'996da46aeeab5644ba766d00c5e343b1553361d7'),
'wiki.nds_nl': ('wiki.nds_nl.npz',
'5a9307e16b13a5a82ec19a52b33254537e7198e7'),
'wiki.nds': ('wiki.nds.npz',
'b249a87c78c52becf51e7b50aaf9f9b6a36585f1'),
'wiki.ne': ('wiki.ne.npz',
'a601db2647a74ffd2b4b43dcb8584735f555459c'),
'wiki.new': ('wiki.new.npz',
'c398a3775aba9c68ce765cfdfb6b188f7c47e4c6'),
'wiki-news-300d-1M': ('wiki-news-300d-1M.npz',
'0a03bbd508e5381e140476140fb121afeb0050ed'),
'wiki-news-300d-1M-subword': ('wiki-news-300d-1M-subword.npz',
'69edae21375407781c727dcb9e534e79d712d137'),
'wiki.ng': ('wiki.ng.npz',
'befd774d15f69d43547e13e5ea3a97c4cb1ab405'),
'wiki.nl': ('wiki.nl.npz',
'5a7cb6f1dd0a7621202abba9461ac2c5bf905219'),
'wiki.nn': ('wiki.nn.npz',
'8e5059ddeb24050fadaa5cc4622b13feb3e4a226'),
'wiki.no': ('wiki.no.npz',
'5ce6e0f793e66f081652f64013968099de03d9f9'),
'wiki.nov': ('wiki.nov.npz',
'95ed23b4cfd7a65afa1c12c7dbdce6af53923d77'),
'wiki.vec': ('wiki.vec.npz',
'08ebb912efeb9df1c7d05e1af90484d210dff47e'),
'wiki.nrm': ('wiki.nrm.npz',
'e58614b4508ff9810f0b58fd818f973775bc918d'),
'wiki.nso': ('wiki.nso.npz',
'56a2ebe260241402d117cd89c5c872b9c96ff05b'),
'wiki.nv': ('wiki.nv.npz',
'c713051fe03ec1f60314bb42161b2a47fb5e169a'),
'wiki.ny': ('wiki.ny.npz',
'ba5a1725955cbc13e7fd93ab499f8085840c992c'),
'wiki.oc': ('wiki.oc.npz',
'259e7d994c38a4cfc140fb07016b82d6781e5027'),
'wiki.olo': ('wiki.olo.npz',
'0fea70f887def4779ee70a79366b88f1ada65004'),
'wiki.om': ('wiki.om.npz',
'47e2d756b5f8913085d901375c1b4e0b118a4221'),
'wiki.or': ('wiki.or.npz',
'7e274ab060219b019aa02bb97941cc6e162fd01f'),
'wiki.os': ('wiki.os.npz',
'19e8199cc2aaffdb07b6c558dbc5465ac6e03155'),
'wiki.pag': ('wiki.pag.npz',
'eddf4931547649026c02f893297ef673ec6158bb'),
'wiki.pam': ('wiki.pam.npz',
'40109aa174bd9f0fa657839bb548e2b0646c58d3'),
'wiki.pa': ('wiki.pa.npz',
'8a5870717e9e641b1f757f13259171698118de2e'),
'wiki.pap': ('wiki.pap.npz',
'999c8e5b005ca20d9998fbbe4fa79177f69e24c0'),
'wiki.pcd': ('wiki.pcd.npz',
'e975066b323a65cdc5e4c27138ef674d2cf7250b'),
'wiki.pdc': ('wiki.pdc.npz',
'5c770b9d56f276b0aa535845f175c05ee1cea615'),
'wiki.pfl': ('wiki.pfl.npz',
'0063d0b633ee529a75482b36ed4f4da7d64994ec'),
'wiki.pih': ('wiki.pih.npz',
|
9dd59fee46bd4bec87cc8c40099110b483ad0496
|
Python
|
<|begin_of_text|>import ambulance_game as abg
import numpy as np
import sympy as sym
from sympy.abc import a, b, c, d, e, f, g, h, i, j
def get_symbolic_pi(num_of_servers, threshold, system_capacity, buffer_capacity):
Q_sym = abg.markov.get_symbolic_transition_matrix(
num_of_servers=num_of_servers,
threshold=threshold,
system_capacity=system_capacity,
buffer_capacity=buffer_capacity,
)
dimension = Q_sym.shape[0]
if dimension > 7:
return "Capacity of 6 exceeded"
M_sym = sym.Matrix([Q_sym.transpose()[:-1, :], sym.ones(1, dimension)])
b_sym = sym.Matrix([sym.zeros(dimension - 1, 1), [1]])
system = M_sym.col_insert(dimension, b_sym)
sol = sym.solve_linear_system_LU(system, [a, b, c, d, e, f, g])
return sol
def get_symbolic_state_probabilities_1222():
num_of_servers = 1
threshold = 2
system_capacity = 2
buffer_capacity = 2
sym_pi_1222 = get_symbolic_pi(
num_of_servers=num_of_servers,
threshold=threshold,
system_capacity=system_capacity,
buffer_capacity=buffer_capacity,
)
all_states_1222 = abg.markov.build_states(
threshold=threshold,
system_capacity=system_capacity,
buffer_capacity=buffer_capacity,
)
sym_state_probs_1222 = [0 for _ in range(len(all_states_1222))]
sym_state_probs_1222[0] = sym.factor(sym_pi_1222[a]) # (0,0)
sym_state_probs_1222[1] = sym.factor(sym_pi_1222[b]) # (0,1)
sym_state_probs_1222[2] = sym.factor(sym_pi_1222[c]) # (1,1)
sym_state_probs_1222[3] = sym.factor(sym_pi_1222[d]) # (0,2)
sym_state_probs_1222[4] = sym.factor(sym_pi_1222[e]) # (1,2)
sym_state_recursive_ratios_1222 = sym.zeros(
buffer_capacity + 1, system_capacity + 1
)
sym_state_recursive_ratios_1222[0, 0] = 1
sym_state_recursive_ratios_1222[0, 1] = sym.factor(
sym_state_probs_1222[1] / sym_state_probs_1222[0]
) # (0,0) -> (0,1)
sym_state_recursive_ratios_1222[0, 2] = sym.factor(
sym_state_probs_1222[2] / sym_state_probs_1222[1]
) # (0,1) -> (1,1)
sym_state_recursive_ratios_1222[1, 2] = sym.factor(
sym_state_probs_1222[3] / sym_state_probs_1222[2]
) # (0,1) -> (0,2)
sym_state_recursive_ratios_1222[2, 2] = sym.factor(
sym_state_probs_1222[4] / sym_state_probs_1222[3]
) # (0,2) -> (1,2)
return sym_state_probs_1222, sym_state_recursive_ratios_1222
def get_symbolic_state_probabilities_1121():
num_of_servers = 1
threshold = 1
system_capacity = 2
buffer_capacity = 1
all_states_1121 = abg.markov.build_states(
threshold=threshold,
system_capacity=system_capacity,
buffer_capacity=buffer_capacity,
)
sym_pi_1121 = get_symbolic_pi(
num_of_servers=num_of_servers,
threshold=threshold,
system_capacity=system_capacity,
buffer_capacity=buffer_capacity,
)
sym_state_probs_1121 = [0 for _ in range(len(all_states_1121))]
sym_state_probs_1121[0] = sym.factor(sym_pi_1121[a]) # (0,0)
sym_state_probs_1121[1] = sym.factor(sym_pi_1121[b]) # (0,1)
sym_state_probs_1121[2] = sym.factor(sym_pi_1121[c]) # (1,1)
sym_state_probs_1121[3] = sym.factor(sym_pi_1121[d]) # (0,2)
sym_state_probs_1121[4] = sym.factor(sym_pi_1121[e]) # (1,2)
sym_state_recursive_ratios_1121 = sym.zeros(
buffer_capacity + 1, system_capacity + 1
)
sym_state_recursive_ratios_1121[0, 0] = 1
sym_state_recursive_ratios_1121[0, 1] = sym.factor(
sym_state_probs_1121[1] / sym_state_probs_1121[0]
) # (0,0) -> (0,1)
sym_state_recursive_ratios_1121[1, 1] = sym.factor(
sym_state_probs_1121[2] / sym_state_probs_1121[1]
) # (0,1) -> (1,1)
sym_state_recursive_ratios_1121[0, 2] = sym.factor(
sym_state_probs_1121[3] / sym_state_probs_1121[1]
) # (0,1) -> (0,2)
sym_state_recursive_ratios_1121[1, 2] = sym.factor(
sym_state_probs_1121[4] / sym_state_probs_1121[3]
) # (0,2) -> (1,2)
sym_state_recursive_ratios_right_1121 = sym_state_recursive_ratios_1121.copy()
sym_state_recursive_ratios_right_1121[1, 2] = sym.factor(
sym_state_probs_1121[4] / sym_state_probs_1121[2]
) # (1,1) -> (1,2)
sym_state_recursive_ratios_P0_1121 = sym.zeros(
buffer_capacity + 1, system_capacity + 1
)
sym_state_recursive_ratios_P0_1121[0, 0] = 1
sym_state_recursive_ratios_P0_1121[0, 1] = sym.factor(
sym_state_probs_1121[1] / sym_state_probs_1121[0]
) # (0,0) -> (0,1)
sym_state_recursive_ratios_P0_1121[1, 1] = sym.factor(
sym_state_probs_1121[2] / sym_state_probs_1121[0]
) # (0,0) -> (1,1)
sym_state_recursive_ratios_P0_1121[0, 2] = sym.factor(
sym_state_probs_1121[3] / sym_state_probs_1121[0]
) # (0,0) -> (0,2)
sym_state_recursive_ratios_P0_1121[1, 2] = sym.factor(
sym_state_probs_1121[4] / sym_state_probs_1121[0]
) # (0,0) -> (1,2)
return (
sym_state_probs_1121,
sym_state_recursive_ratios_1121,
sym_state_recursive_ratios_right_1121,
sym_state_recursive_ratios_P0_1121,
)
def get_symbolic_state_probabilities_1122():
# num_of_servers = 1
threshold = 1
system_capacity = 2
buffer_capacity = 2
all_states_1122 = abg.markov.build_states(
threshold=threshold,
system_capacity=system_capacity,
buffer_capacity=buffer_capacity,
)
sym_state_probs_1122 = [0 for _ in range(len(all_states_1122))]
sym_Lambda = sym.symbols("Lambda")
sym_lambda_1 = sym.symbols("lambda_1")
sym_lambda_2 = sym.symbols("lambda_2")
sym_mu = sym.symbols("mu")
sym_state_probs_1122[0] = (
(sym_mu**6)
+ 2 * (sym_lambda_2) * (sym_mu**5)
+ (sym_lambda_2**2) * (sym_mu**4)
) # (0,0)
sym_state_probs_1122[1] = (sym_Lambda * sym_mu**3) * (
sym_mu**2 + 2 * sym_mu * sym_lambda_2 + sym_lambda_2**2
) # (0,1)
sym_state_probs_1122[2] = (sym_Lambda * sym_lambda_2 * sym_mu**2) * (
sym_lambda_2**2
+ sym_lambda_2 * sym_lambda_1
+ sym_lambda_1 * sym_mu
+ sym_mu**2
+ 2 * sym_lambda_2 * sym_mu
) # (1,1)
sym_state_probs_1122[3] = (sym_Lambda * sym_lambda_2**2 * sym_mu) * (
sym_lambda_2**2
+ 2 * sym_lambda_1 * sym_lambda_2
+ 3 * sym_lambda_1 * sym_mu
+ sym_mu**2
+ 2 * sym_lambda_2 * sym_mu
+ sym_lambda_1**2
) # (2,1)
sym_state_probs_1122[4] = (sym_Lambda * sym_lambda_1 * sym_mu**3) * (
sym_lambda_2 + sym_mu
) # (0,2)
sym_state_probs_1122[5] = (
sym_Lambda * sym_lambda_1 * sym_lambda_2 * sym_mu**2
) * (
2 * sym_mu + sym_lambda_1 + sym_lambda_2
) # (1,2)
sym_state_probs_1122[6] = (sym_Lambda * sym_lambda_1 * sym_lambda_2**2) * (
sym_lambda_1**2
+ 4 * sym_lambda_1 * sym_mu
+ 2 * sym_lambda_1 * sym_lambda_2
+ 3 * sym_mu**2
+ sym_lambda_2**2
+ 3 * sym_lambda_2 * sym_mu
) # (2,2)
total_1122 = np.sum(sym_state_probs_1122)
sym_state_probs_1122 = [i / total_1122 for i in sym_state_probs_1122]
sym_state_recursive_ratios_1122 = sym.zeros(
buffer_capacity + 1, system_capacity + 1
)
sym_state_recursive_ratios_1122[0, 0] = 1
sym_state_recursive_ratios_1122[0, 1] = sym.factor(
sym_state_probs_1122[1] / sym_state_probs_1122[0]
) # (0,0) -> (0,1)
sym_state_recursive_ratios_1122[1, 1] = sym.factor(
sym_state_probs_1122[2] / sym_state_probs_1122[1]
) # (0,1) -> (1,1)
sym_state_recursive_ratios_1122[2, 1] = sym.factor(
sym_state_probs_1122[3] / sym_state_probs_1122[2]
) # (1,1) -> (2,1)
sym_state_recursive_ratios_1122[0, 2] = sym.factor(
sym_state_probs_1122[4] / sym_state_probs_1122[1]
) # (0,1) -> (0,2)
sym_state_recursive_ratios_1122[1, 2] = sym.factor(
sym_state_probs_1122[5] / sym_state_probs_1122[4]
) # (0,2) -> (1,2)
sym_state_recursive_ratios_1122[2, 2] = sym.factor(
sym_state_probs_1122[6] / sym_state_probs_1122[5]
) # (1,2) -> (2,2)
sym_state_recursive_ratios_right_1122 = sym_state_recursive_ratios_1122.copy()
sym_state_recursive_ratios_right_1122[1, 2] = sym.factor(
sym_state_probs_1122[5] / sym_state_probs_1122[2]
) # (1,1) -> (1,2)
sym_state_recursive_ratios_right_1122[2, 2] = sym.factor(
sym_state_probs_1122[6] / sym_state_probs_1122[3]
) # (2,1) -> (2,2)
sym_state_recursive_ratios_P0_1122 = sym.zeros(
buffer_capacity + 1, system_capacity + 1
)
sym_state_recursive_ratios_P0_1122[0, 0] = 1
sym_state_recursive_ratios_P0_1122[0, 1] = sym.factor(
sym_state_probs_1122[1] / sym_state_probs_1122[0]
) # (0,0) -> (0,1)
sym_state_recursive_ratios_P0_1122[1, 1] = sym.factor(
sym_state_probs_1122[2] / sym_state_probs_1122[0]
) # (0,0) -> (1,1)
sym_state_recursive_ratios_P0_1122[2, 1] = sym.factor(
sym_state_probs_1122[3] / sym_state_probs_1122[0]
) # (0,0) -> (2,1)
sym_state_recursive_ratios_P0_1122[0, 2] = sym.factor(
sym_state_probs_1122[4] / sym_state_probs_1122[0]
) # (0,0) -> (0,2)
sym_state_recursive_ratios_P0_1122[1, 2] = sym.factor(
sym_state_probs_1122[5] / sym_state_probs_1122[0]
) # (0,0) -> (1,2)
sym_state_recursive_ratios_P0_1122[2, 2] = sym.factor(
sym_state_probs_1122[6] / sym_state_probs_1122[0]
) # (0,0) -> (2,2)
return (
sym_state_probs_1122,
sym_state_recursive_ratios_1122,
sym_state_recursive_ratios_right_1122,
sym_state_recursive_ratios_P0_1122,
)
def get_symbolic_state_probabilities_1123():
num_of_servers = 1
threshold = 1
system_capacity = 2
buffer_capacity = 3
Q_sym_1123 = abg.markov.get_symbolic_transition_matrix(
num_of_servers, threshold, system_capacity, buffer_capacity
)
p00, p01, p11, p21, p31, p02, p12, p22, p32 = sym.symbols(
"p00, p01, p11, p21, p31, p02, p12, p22, p32"
)
pi_1123 = sym.Matrix([p00, p01, p11, p21, p31, p02, p12, p22, p32])
dimension_1123 = Q_sym_1123.shape[0]
M_sym_1123 = sym.Matrix(
[Q_sym_1123.transpose()[:-1, :], sym.ones(1, dimension_1123)]
)
sym_diff_equations_1123 = M_sym_1123 @ pi_1123
b_sym_1123 = sym.Matrix([sym.zeros(dimension_1123 - 1, 1), [1]])
eq0_1123 = sym.Eq(sym_diff_equations_1123[0], b_sym_1123[0])
eq1_1123 = sym.Eq(sym_diff_equations_1123[1], b_sym_1123[1])
eq2_1123 = sym.Eq(sym_diff_equations_1123[2], b_sym_1123[2])
eq3_1123 = sym.Eq(sym_diff_equations_1123[3], b_sym_1123[3])
eq4_1123 = sym.Eq(sym_diff_equations_1123[4], b_sym_1123[4])
eq5_1123 = sym.Eq(sym_diff_equations_1123[5], b_sym_1123[5])
eq6_1123 = sym.Eq(sym_diff_equations_1123[6], b_sym_1123[6])
eq7_1123 = sym.Eq(sym_diff_equations_1123[7], b_sym_1123[7])
eq8_1123 = sym.Eq(sym_diff_equations_1123[8], b_sym_1123[8])
sym_state_probs_1123 = sym.solve(
[
eq0_1123,
eq1_1123,
eq2_1123,
eq3_1123,
eq4_1123,
eq5_1123,
eq6_1123,
eq7_1123,
eq8_1123,
],
(p00, p01, p11, p21, p31, p02, p12, p22, p32),
)
sym_state_recursive_ratios_1123 = sym.zeros(
buffer_capacity + 1, system_capacity + 1
)
sym_state_recursive_ratios_1123[0, 0] = 1
sym_state_recursive_ratios_1123[0, 1] = sym.factor(
sym_state_probs_1123[p01] / sym_state_probs_1123[p00]
) # (0,0) -> (0,1)
sym_state_recursive_ratios_1123[1, 1] = sym.factor(
sym_state_probs_1123[p11] / sym_state_probs_1123[p01]
) # (0,1) -> (1,1)
sym_state_recursive_ratios_1123[2, 1] = sym.factor(
sym_state_probs_1123[p21] / sym_state_probs_1123[p11]
) # (1,1) -> (2,1)
sym_state_recursive_ratios_1123[3, 1] = sym.factor(
sym_state_probs_1123[p31] / sym_state_probs_1123[p21]
) # (2,1) -> (3,1)
sym_state_recursive_ratios_1123[0, 2] = sym.factor(
sym_state_probs_1123[p02] / sym_state_probs_1123[p01]
) # (0,1) -> (0,2)
sym_state_recursive_ratios_1123[1, 2] = sym.factor(
sym_state_probs_1123[p12] / sym_state_probs_1123[p02]
) # (0,2) -> (1,2)
sym_state_recursive_ratios_1123[2, 2] = sym.factor(
sym_state_probs_1123[p22] / sym_state_probs_1123[p12]
) # (1,2) -> (2,2)
sym_state_recursive_ratios_1123[2, 2] = sym.factor(
sym_state_probs_1123[p32] / sym_state_probs_1123[p22]
) # (2,2) -> (3,2)
sym_state_recursive_ratios_right_1123 = sym_state_recursive_ratios_1123.copy()
sym_state_recursive_ratios_right_1123[1, 2] = sym.factor(
sym_state_probs_1123[p12] / sym_state_probs_1123[p11]
) # (1,1) -> (1,2)
sym_state_recursive_ratios_right_1123[2, 2] = sym.factor(
sym_state_probs_1123[p22] / sym_state_probs_1123[p21]
) # (2,1) -> (2,2)
sym_state_recursive_ratios_right_1123[3, 2] = sym.factor(
sym_state_probs_1123[p32] / sym_state_probs_1123[p22]
) # (2,2) -> (3,2)
sym_state_recursive_ratios_P0_1123 = sym.zeros(
buffer_capacity + 1, system_capacity + 1
)
sym_state_recursive_ratios_P0_1123[0, 0] = 1
sym_state_recursive_ratios_P0_1123[0, 1] = sym.factor(
sym_state_probs_1123[p01] / sym_state_probs_1123[p00]
) # (0,0) -> (0,1)
sym_state_recursive_ratios_P0_1123[1, 1] = sym.factor(
sym_state_probs_1123[p11] / sym_state_probs_1123[p00]
) # (0,0) -> (1,1)
sym_state_recursive_ratios_P0_1123[2, 1] = sym.factor(
sym_state_probs_1123[p21] / sym_state_probs_1123[p00]
) # (0,0) -> (2,1)
sym_state_recursive_ratios_P0_1123[3, 1] = sym.factor(
sym_state_probs_1123[p31] / sym_state_probs_1123[p00]
) # (0,0) -> (3,1)
sym_state_recursive_ratios_P0_1123[0, 2] = sym.factor(
sym_state_probs_1123[p02] / sym_state_probs_1123[p00]
) # (0,0) -> (0,2)
sym_state_recursive_ratios_P0_1123[1, 2] = sym.factor(
sym_state_probs_1123[p12] / sym_state_probs_1123[p00]
) # (0,0) -> (1,2)
sym_state_recursive_ratios_P0_1123[2, 2] = sym.factor(
sym_state_probs_1123[p22] / sym_state_probs_1123[p00]
) # (0,0) -> (2,2)
sym_state_recursive_ratios_P0_1123[3, 2] = sym.factor(
sym_state_probs_1123[p32] / sym_state_probs_1123[p00]
) # (0,0) -> (3,2)
return (
sym_state_probs_1123,
sym_state_recursive_ratios_1123,
sym_state_recursive_ratios_right_1123,
sym_state_recursive_ratios_P0_1123,
)
def get_symbolic_state_probabilities_1341():
# num_of_servers = 1
threshold = 3
system_capacity = 4
buffer_capacity = 1
all_states_1341 = abg.markov.build_states(
threshold=threshold,
system_capacity=system_capacity,
buffer_capacity=buffer_capacity,
)
sym_state_probs_1341 = [0 for _ in range(len(all_states_1341))]
sym_Lambda = sym.symbols("Lambda")
sym_lambda_1 = sym.symbols("lambda_1")
sym_lambda_2 = sym.symbols("lambda_2")
sym_mu = sym.symbols("mu")
sym_state_probs_1341[0] = (sym_lambda_2) * (sym_mu**5) + (sym_mu**6) # (0,0)
sym_state_probs_1341[1] = sym_Lambda * sym_lambda_2 * (sym_mu**4) + sym_Lambda * (
sym_mu**5
) # (0,1)
sym_state_probs_1341[2] = (sym_Lambda**2) * sym_lambda_2 * (sym_mu**3) + (
sym_Lambda**2
) * (
sym_mu**4
) # (0,2)
sym_state_probs_1341[3] = (sym_Lambda**3) * sym_lambda_2 * (sym_mu**2) + (
sym_Lambda**3
) * (
sym_mu**3
) # (0,3)
sym_state_probs_1341[4] = (
(sym_Lambda**3) * sym_lambda_1 * sym_lambda_2 * sym_mu
+ (sym_Lambda**3) * sym_lambda_2 * (sym_mu**2)
+ (sym_Lambda**3) * sym_lambda_2 * sym_lambda_2 * sym_mu
) # (1,3)
sym_state_probs_1341[5] = (sym_Lambda**3) * sym_lambda_1 * (sym_mu**2) # (0,4)
sym_state_probs_1341[6] = (
(sym_Lambda**3) * (sym_lambda_1**2) * sym_lambda_2
+ (sym_Lambda**3) * sym_lambda_1 * (sym_lambda_2**2)
+ 2 * (sym_Lambda**3) * sym_lambda_1 * sym_lambda_2 * sym_mu
) # (1,4)
total_1341 = np.sum(sym_state_probs_1341)
sym_state_probs_1341 = [i / total_1341 for i in sym_state_probs_1341]
sym_state_recursive_ratios_1341 = sym.zeros(
buffer_capacity + 1, system_capacity + 1
)
sym_state_recursive_ratios_1341[0, 0] = 1
sym_state_recursive_ratios_1341[0, 1] = sym.factor(
sym_state_probs_1341[1] / sym_state_probs_1341[0]
) # (0,0) -> (0,1)
sym_state_recursive_ratios_1341[0, 2] = sym.factor(
sym_state_probs_1341[2] / sym_state_probs_1341[1]
) # (0,1) -> (0,2)
sym_state_recursive_ratios_1341[0, 3] = sym.factor(
sym_state_probs_1341[3] / sym_state_probs_1341[2]
) # (0,2) -> (0,3)
sym_state_recursive_ratios_1341[0, 4] = sym.factor(
sym_state_probs_1341[5] / sym_state_probs_1341[3]
) # (0,3) -> (0,4)
sym_state_recursive_ratios_1341[1, 3] = sym.factor(
sym_state_probs_1341[4] / sym_state_probs_1341[3]
) # (0,3) -> (1,3)
sym_state_recursive_ratios_1341[1, 4] = sym.factor(
sym_state_probs_1341[6] / sym_state_probs_1341[5]
) # (0,4) -> (1,4)
sym_state_recursive_ratios_right_1341 = sym_state_recursive_ratios_1341.copy()
sym_state_recursive_ratios_right_1341[1, 4] = sym.factor(
sym_state_probs_1341[6] / sym_state_probs_1341[4]
) # (1,3) -> (1,4)
sym_state_recursive_ratios_P0_1341 = sym.zeros(
buffer_capacity + 1, system_capacity + 1
)
sym_state_recursive_ratios_P0_1341[0, 0] = 1
sym_state_recursive_ratios_P0_1341[0, 1] = sym.factor(
sym_state_probs_1341[1] / sym_state_probs_1341[0]
) # (0,0) -> (0,1)
sym_state_recursive_ratios_P0_1341[0, 2] = sym.factor(
sym_state_probs_1341[2] / sym_state_probs_1341[0]
) # (0,0) -> (0,2)
sym_state_recursive_ratios_P0_1341[0, 3] = sym.factor(
sym_state_probs_1341[3] / sym_state_probs_1341[0]
) # (0,0) -> (0,3)
sym_state_recursive_ratios_P0_1341[1, 3] = sym.factor(
sym_state_probs_1341[4] / sym_state_probs_1341[0]
) # (0,0) -> (1,3)
sym_state_recursive_ratios_P0_1341[0, 4] = sym.factor(
sym_state_probs_1341[5] / sym_state_probs_1341[0]
) # (0,0) -> (0,4)
sym_state_recursive_ratios_P0_1341[1, 4] = sym.factor(
sym_state_probs_1341[6] / sym_state_probs_1341[0]
) # (0,0) -> (1,4)
return (
sym_state_probs_1341,
sym_state_recursive_ratios_1341,
sym_state_recursive_ratios_right_1341,
sym_state_recursive_ratios_P0_1341,
)
def get_symbolic_state_probabilities_1131():
# num_of_servers = 1
threshold = 1
system_capacity = 3
buffer_capacity = 1
all_states_1131 = abg.markov.build_states(
threshold=threshold,
system_capacity=system_capacity,
buffer_capacity=buffer_capacity,
)
sym_state_probs_1131 = [0 for _ in range(len(all_states_1131))]
sym_Lambda = sym.symbols("Lambda")
sym_lambda_1 = sym.symbols("lambda_1")
sym_lambda_2 = sym.symbols("lambda_2")
sym_mu = sym.symbols("mu")
# (0,0)
sym_state_probs_1131[0] = (
(sym_mu**6)
+ 2 * (sym_lambda_2 * (sym_mu**5))
+ ((sym_lambda_2**2) * (sym_mu**4))
+ (sym_lambda_1 * sym_lambda_2 * (sym_mu**4))
)
# (0,1)
sym_state_probs_1131[1] = sym_state_probs_1131[0] * sym_Lambda / sym_mu
# (1,1)
sym_state_probs_1131[2] = (
(sym_Lambda * (sym_lambda_1**2) * sym_lambda_2 * (sym_mu**2))
+ (sym_Lambda * sym_lambda_2 * sym_lambda_1 * (sym_mu**3))
+ 2 * (sym_Lambda * sym_lambda_1 * (sym_lambda_2**2) * (sym_mu**2))
+ 2 * (sym_Lambda * (sym_lambda_2**2) * (sym_mu**3))
+ (sym_Lambda * (sym_lambda_2**3) * (sym_mu**2))
+ (sym_Lambda * sym_lambda_2 * (sym_mu**4))
)
# (0,2)
sym_state_probs_1131[3] = (
sym_Lambda * sym_lambda_1 * sym_mu**3 * (sym_lambda_2 + sym_mu)
)
# (1,2)
sym_state_probs_1131[4] = (sym_Lambda * sym_lambda_2 * sym_lambda_1 * sym_mu) * (
(sym_lambda_2**2)
+ 2 * sym_lambda_2 * sym_lambda_1
+ 3 * sym_lambda_2 * sym_mu
+ (sym_lambda_1**2)
+ 2 * sym_lambda_1 * sym_mu
+ 2 * (sym_mu**2)
)
# (0,3)
sym_state_probs_1131[5] = sym_Lambda * (sym_lambda_1**2) * (sym_mu**3)
# (1,3)
sym_state_probs_1131[6] = (sym_Lambda * sym_lambda_2 * (sym_lambda_1**2)) * (
(sym_lambda_2**2)
+ 2 * sym_lambda_2 * sym_lambda_1
+ 3 * sym_lambda_2 * sym_mu
+ (sym_lambda_1**2)
+ 2 * sym_lambda_1 * sym_mu
+ 3 * (sym_mu**2)
)
denominator = (
sym_Lambda * sym_lambda_2**3 * sym_lambda_1**2
+ sym_Lambda * sym_lambda_2**3 * sym_lambda_1 * sym_mu
+ sym_Lambda * sym_lambda_2**3 * sym_mu**2
+ 2 * sym_Lambda * sym_lambda_2**2 * sym_lambda_1**3
+ 5 * sym_Lambda * sym_lambda_2**2 * sym_lambda_1**2 * sym_mu
+ 5 * sym_Lambda * sym_lambda_2**2 * sym_lambda_1 * sym_mu**2
+ 3 * sym_Lambda * sym_lambda_2**2 * sym_mu**3
+ sym_Lambda * sym_lambda_2 * sym_lambda_1**4
+ 3 * sym_Lambda * sym_lambda_2 * sym_lambda_1**3 * sym_mu
+ 6 * sym_Lambda * sym_lambda_2 * sym_lambda_1**2 * sym_mu**2
+ 5 * sym_Lambda * sym_lambda_2 * sym_lambda_1 * sym_mu**3
+ 3 * sym_Lambda * sym_lambda_2 * sym_mu**4
+ sym_Lambda * sym_lambda_1**2 * sym_mu**3
+ sym_Lambda * sym_lambda_1 * sym_mu**4
+ sym_Lambda * sym_mu**5
+ sym_lambda_2**2 * sym_mu**4
+ sym_lambda_2 * sym_lambda_1 * sym_mu**4
+ 2 * sym_lambda_2 * sym_mu**5
+ sym_mu**6
)
sym_state_probs_1131 = [i / denominator for i in sym_state_probs_1131]
sym_state_recursive_ratios_1131 = sym.zeros(
buffer_capacity + 1, system_capacity + 1
)
sym_state_recursive_ratios_1131[0, 0] = 1
sym_state_recursive_ratios_1131[0, 1] = sym.factor(
sym_state_probs_1131[1] / sym_state_probs_1131[0]
) # (0,0) -> (0,1)
sym_state_recursive_ratios_1131[1, 1] = sym.factor(
sym_state_probs_1131[2] / sym_state_probs_1131[1]
) # (0,1) -> (1,1)
sym_state_recursive_ratios_1131[0, 2] = sym.factor(
sym_state_probs_1131[3] / sym_state_probs_1131[1]
) # (0,1) -> (0,2)
sym_state_recursive_ratios_1131[1, 2] = sym.factor(
sym_state_probs_1131[4] / sym_state_probs_1131[3]
) # (0,2) -> (1,2)
sym_state_recursive_ratios_1131[0, 3] = sym.factor(
sym_state_probs_1131[5] / sym_state_probs_1131[3]
) # (0,2) -> (0,3)
sym_state_recursive_ratios_1131[1, 3] = sym.factor(
sym_state_probs_1131[6] / sym_state_probs_1131[5]
) # (0,3) -> (1,3)
sym_state_recursive_ratios_right_1131 = sym_state_recursive_ratios_1131.copy()
sym_state_recursive_ratios_right_1131[1, 2] = sym.factor(
sym_state_probs_1131[4] / sym_state_probs_1131[2]
) # (1,1) -> (1,2)
sym_state_recursive_ratios_right_1131[1, 3] = sym.factor(
sym_state_probs_1131[6] / sym_state_probs_1131[4]
) # (1,2) -> (1,3)
sym_state_recursive_ratios_P0_1131 = sym.zeros(
buffer_capacity + 1, system_capacity + 1
)
sym_state_recursive_ratios_P0_1131[0, 0] = 1
sym_state_recursive_ratios_P0_1131[0, 1] = sym.factor(
sym_state_probs_1131[1] / sym_state_probs_1131[0]
) # (0,0) -> (0,1)
sym_state_recursive_ratios_P0_1131[1, 1] = sym.factor(
sym_state_probs_1131[2] / sym_state_probs_1131[0]
) # (0,0) -> (1,1)
sym_state_recursive_ratios_P0_1131[0, 2] = sym.factor(
sym_state_probs_1131[3] / sym_state_probs_1131[0]
) # (0,0) -> (0,2)
sym_state_recursive_ratios_P0_1131[1, 2] = sym.factor(
sym_state_probs_1131[4] / sym_state_probs_1131[0]
) # (0,0) -> (1,2)
sym_state_recursive_ratios_P0_1131[0, 3] = sym.factor(
sym_state_probs_1131[5] / sym_state_probs_1131[0]
) # (0,0) -> (0,3)
sym_state_recursive_ratios_P0_1131[1, 3] = sym.factor(
sym_state_probs_1131[6] / sym_state_probs_1131[0]
) # (0,0) -> (1,3)
return (
sym_state_probs_1131,
sym_state_recursive_ratios_1131,
sym_state_recursive_ratios_right_1131,
sym_state_recursive_ratios_P0_1131,
)
def get_symbolic_state_probabilities_1132():
num_of_servers = 1
threshold = 1
system_capacity = 3
buffer_capacity = 2
Q_sym_1132 = abg.markov.get_symbolic_transition_matrix(
num_of_servers, threshold, system_capacity, buffer_capacity
)
p00, p01, p11, p21, p02, p12, p22, p03, p13, p23 = sym.symbols(
"p00, p01, p11, p21, p02, p12, p22, p03, p13, p23"
)
pi_1132 = sym.Matrix([p00, p01, p11, p21, p02, p12, p22, p03, p13, p23])
dimension_1132 = Q_sym_1132.shape[0]
M_sym_1132 = sym.Matrix(
[Q_sym_1132.transpose()[:-1, :], sym.ones(1, dimension_1132)]
)
sym_diff_equations_1132 = M_sym_1132 @ pi_1132
b_sym_1132 = sym.Matrix([sym.zeros(dimension_1132 - 1, 1), [1]])
eq0_1132 = sym.Eq(sym_diff_equations_1132[0], b_sym_1132[0])
eq1_1132 = sym.Eq(sym_diff_equations_1132[1], b_sym_1132[1])
eq2_1132 = sym.Eq(sym_diff_equations_1132[2], b_sym_1132[2])
eq3_1132 = sym.Eq(sym_diff_equations_1132[3], b_sym_1132[3])
eq4_1132 = sym.Eq(sym_diff_equations_1132[4], b_sym_1132[4])
eq5_1132 = sym.Eq(sym_diff_equations_1132[5], b_sym_1132[5])
eq6_1132 = sym.Eq(sym_diff_equations_1132[6], b_sym_1132[6])
eq7_1132 = sym.Eq(sym_diff_equations_1132[7], b_sym_1132[7])
eq8_1132 = sym.Eq(sym_diff_equations_1132[8], b_sym_1132[8])
eq9_1132 = sym.Eq(sym_diff_equations_1132[9], b_sym_1132[9])
sym_state_probs_1132 = sym.solve(
[
eq0_1132,
eq1_1132,
eq2_1132,
eq3_1132,
eq4_1132,
eq5_1132,
eq6_1132,
eq7_1132,
eq8_1132,
eq9_1132,
],
(p00, p01, p11, p21, p02, p12, p22, p03, p13, p23),
)
sym_state_recursive_ratios_1132 = sym.zeros(
buffer_capacity + 1, system_capacity + 1
)
sym_state_recursive_ratios_1132[0, 0] = 1
sym_state_recursive_ratios_1132[0, 1] = sym.factor(
sym_state_probs_1132[p01] / sym_state_probs_1132[p00]
) # (0,0) -> (0,1)
sym_state_recursive_ratios_1132[1, 1] = sym.factor(
sym_state_probs_1132[p11] / sym_state_probs_1132[p01]
) # (0,1) -> (1,1)
sym_state_recursive_ratios_1132[2, 1] = sym.factor(
sym_state_probs_1132[p21] / sym_state_probs_1132[p11]
) # (1,1) -> (2,1)
sym_state_recursive_ratios_1132[0, 2] = sym.factor(
sym_state_probs_1132[p02] / sym_state_probs_1132[p01]
) # (0,1) -> (0,2)
sym_state_recursive_ratios_1132[1, 2] = sym.factor(
sym_state_probs_1132[p12] / sym_state_probs_1132[p02]
) # (0,2) -> (1,2)
sym_state_recursive_ratios_1132[2, 2] = sym.factor(
sym_state_probs_1132[p22] / sym_state_probs_1132[p12]
) # (1,2) -> (2,2)
sym_state_recursive_ratios_1132[0, 3] = sym.factor(
sym_state_probs_1132[p03] / sym_state_probs_1132[p02]
) # (0,2) -> (0,3)
sym_state_recursive_ratios_1132[1, 3] = sym.factor(
sym_state_probs_1132[p13] / sym_state_probs_1132[p03]
) # (0,3) -> (1,3)
sym_state_recursive_ratios_1132[2, 3] = sym.factor(
sym_state_probs_1132[p23] / sym_state_probs_1132[p13]
) # (1,3) -> (2,3)
sym_state_recursive_ratios_right_1132 = sym_state_recursive_ratios_1132.copy()
sym_state_recursive_ratios_right_1132[1, 2] = sym.factor(
sym_state_probs_1132[p12] / sym_state_probs_1132[p11]
) # (1,1
|
27976e9f7fbe030910b3595ea1a13e0e505183e5
|
Python
|
<|begin_of_text|>#!/software/python-2.7-2014q3-el6-x86_64/bin/python
import SNANA_Reader as simread
import REAL_Reader as dataread
#import astropy.cosmology as cosmo
import traceback
import scipy
import scipy.stats as stats
import numpy as np
import matplotlib.pyplot as plt
plt.switch_backend('Agg')
#import Cosmology
import scipy.stats.mstats as mstats
import scipy.stats as stats
from scipy.interpolate import UnivariateSpline
from sys import argv
import glob
import time
import os
import gzip
import shutil
import numpy.ma as ma
import subprocess
import iminuit as iM
from iminuit import Minuit as M
from discreteChi2Func import discreteChi2Func as chi2func
import pandas as pd
class Rate_Fitter:
def __init__(self, realfilename, realName, simfilename, simName, simgenfilename, MCBeta, MCK, zminSamp=0.1, zmaxSamp=1.20, zminFit = 0.1, zmaxFit = 1.20, simOmegaM=0.3, simOmegaL=0.7, simH0=70.0, simw=-1.0, simOb0=0.049, simSigma8=0.81, simNs=0.95, Rate_Model = 'powerlaw', cheatType = False, cheatZ = False, cheatCCSub = False, cheatCCScale = False, cuts = None, nprint = 5, MURESCuts = None, noCCMC = False, priorRate = None, priorZEff = None, ratePriorErrUp = None, ratePriorErrDown =None, ratePriorErrAll = None, fixCCScale = False):
print "Rate_Fitter"
print "np version {0}".format(np.__version__)
self.zminSamp = zminSamp
self.zmaxSamp = zmaxSamp
self.zminFit = zminFit
self.zmaxFit = zmaxFit
self.MCBeta = MCBeta
self.MCK = MCK
self.Rate_Model = Rate_Model
self.cheatType = cheatType
self.cheatZ = cheatZ
self.cheatCCSub = cheatCCSub
self.cheatCCScale = cheatCCScale
self.cuts = cuts
self.nprint = nprint
self.MURESCuts = MURESCuts
self.priorRate = priorRate
self.priorZEff = priorZEff
self.ratePriorErrUp = ratePriorErrUp
self.ratePriorErrDown = ratePriorErrDown
self.ratePriorErrAll = ratePriorErrAll
self.fixCCScale = fixCCScale
#print "PRIORS"
#print priorRate
#print priorZEff
#print ratePriorErrUp
#print ratePriorErrDown
if self.cheatZ:
self.ztype = 'SIM_ZCMB'
else:
#self.ztype = 'zHD'
self.ztype = 'zPHOT'
self.shiftFlagData = False
self.shiftFlagSim = False
self.globalChi2Storage = []
self.globalNDataStorage = []
'''
self.globalZPhotBinStorage = []
self.globalNDataIaPhotBinStorage = []
self.globalNDataCCPhotBinStorage = []
self.globalZTrueBinStorage = []
self.globalNDataIaTrueBinStorage = []
self.globalNDataCCTrueBinStorage = []
'''
print 'a'
try:
self.simcat = simread.SNANA_Cat(simfilename, simName, simOmegaM=0.3, simOmegaL=0.7, simH0=70.0, simw=-1.0, simOb0=0.049, simSigma8=0.81, simNs=0.95)
except:
try:
self.simcat = simread.SNANA_Cat(simfilename, simName, simOmegaM=0.3, simOmegaL=0.7, simH0=70.0, simw=-1.0, simOb0=0.049, simSigma8=0.81, simNs=0.95, skip_header = 5)
except:
self.simcat = simread.SNANA_Cat(simfilename, simName, simOmegaM=0.3, simOmegaL=0.7, simH0=70.0, simw=-1.0, simOb0=0.049, simSigma8=0.81, simNs=0.95, skip_header = 6)
print 'b'
self.simName = simName
self.simgencat = simread.SNANA_Cat(simfilename, simName, simOmegaM=0.3, simOmegaL=0.7, simH0=70.0, simw=-1.0, simOb0=0.049, simSigma8=0.81, simNs=0.95)
print 'c'
try:
#with np.load(simgenfilename+'.npz', allow_pickle = True) as data0:
# SIMGEN = data0['a']
SIMGEN = np.load(simgenfilename + '.npy', allow_pickle = True)
except:
SIMGEN = np.genfromtxt(simgenfilename, dtype=None, names = True, skip_footer=3, invalid_raise=False)
print "Compress save A"
SIMGEN.dtype.names = map(str, SIMGEN.dtype.names)
#np.savez_compressed(simgenfilename+'.npz', a = SIMGEN)
np.save(simgenfilename+'.npy', SIMGEN)
print "WHY DO YOU HATE ME WHEN I SHOW YOU NOTHING BUT LOVE"
print simgenfilename
#SIMGEN = pd.read_csv(simgenfilename, delim_whitespace=True, comment="#").to_records(index = False)
print 'd'
SIMGEN = SIMGEN[SIMGEN['GENZ']!= 'GENZ']
self.simgencat.params = {'flat':True, 'H0': simH0, 'Om0':simOmegaM, 'Ob0': simOb0,'sigma8': simSigma8, 'ns': simNs}
#self.simgencat.cosmo = Cosmology.setCosmology('simCosmo', self.simcat.params)
self.simgencat.OrigCatalog = np.copy(SIMGEN)
self.simgencat.Catalog = np.copy(SIMGEN)
self.simgencat.Catalog = self.simgencat.Catalog[self.simgencat.Catalog['GENZ']!= 'GENZ']
self.simgencat.simname = simName
self.simgencat.NSN = self.simgencat.Catalog['GENZ'].shape[2]
print "SIMGEN NUMBER"
print self.simgencat.NSN
print "TEST2"
print self.simgencat.Catalog['GENZ'].shape[0]
print self.simgencat.Catalog['GENZ'].shape[1]
print self.simgencat.Catalog['GENZ'].shape[2]
print "SIMGENCAT FILE"
print simfilename
self.realName = realName
try:
print 'q'
self.realcat = simread.SNANA_Cat(realfilename, realName, simOmegaM=0.3, simOmegaL=0.7, simH0=70.0, simw=-1.0, simOb0=0.049, simSigma8=0.81, simNs=0.95, skip_header = 6)
except:
#self.realcat = simread.SNANA_Cat(realfilename, realName, simOmegaM=0.3, simOmegaL=0.7, simH0=70.0, simw=-1.0, simOb0=0.049, simSigma8=0.81, simNs=0.95)
try:
print 'r'
self.realcat = simread.SNANA_Cat(realfilename, realName, simOmegaM=0.3, simOmegaL=0.7, simH0=70.0, simw=-1.0, simOb0=0.049, simSigma8=0.81, simNs=0.95)
except:
print's'
self.realcat = dataread.REAL_Cat(realfilename, realName, skip_header =11 )
if self.cheatType:
print "WARNING, THE FITTER IS CHEATING AND ELIMINATED NON-IAs USING SIM INFO"
self.realcat.Catalog = self.realcat.Catalog[self.realcat.Catalog['SIM_TYPE_INDEX'].astype(int) == 1]
self.simcat.Catalog = self.simcat.Catalog[self.simcat.Catalog['SIM_TYPE_INDEX'].astype(int) == 1]
print "Pre cut Catalog"
print self.realcat.Catalog.shape
for cut in cuts:
print 'a'
print cut
print self.realcat.Catalog.shape
self.realcat.Catalog = self.realcat.Catalog[(self.realcat.Catalog[cut[0]].astype(type(cut[1])) > cut[1]) & (self.realcat.Catalog[cut[0]].astype(type(cut[2])) < cut[2])]
self.simcat.Catalog = self.simcat.Catalog[(self.simcat.Catalog[cut[0]].astype(type(cut[1])) > cut[1]) & (self.simcat.Catalog[cut[0]].astype(type(cut[2])) < cut[2])]
print 'b'
print cut
print self.realcat.Catalog.shape
self.postCutRealCat = np.copy(self.realcat.Catalog)
self.postCutSimCat = np.copy(self.simcat.Catalog)
self.realcat.Catalog = self.realcat.Catalog[(self.realcat.Catalog[self.ztype].astype(float) > self.zminSamp) & (self.realcat.Catalog[self.ztype].astype(float) < self.zmaxSamp)]
self.simcat.Catalog = self.simcat.Catalog[(self.simcat.Catalog[self.ztype].astype(float) > self.zminSamp) & (self.simcat.Catalog[self.ztype].astype(float) < self.zmaxSamp)]
print 'zCut Pre MURESCut'
print np.sum((self.realcat.Catalog[self.ztype].astype(float) > self.zminFit) & (self.realcat.Catalog[self.ztype].astype(float) < self.zmaxFit))
print 'MURESCUT'
print self.MURESCuts
print self.realcat.Catalog.shape
if not (self.MURESCuts is None):
'''
#MURES Cut format: (zmin, zmax, neg Cut, pos Cut)
for mc in self.MURESCuts:
realCond = (self.realcat.Catalog[self.ztype] < mc[0]) | (self.realcat.Catalog[self.ztype] > mc[1])| ((self.realcat.Catalog['MURES'] > mc[2])& (self.realcat.Catalog['MURES'] < mc[3]))
simCond = (self.simcat.Catalog[self.ztype] < mc[0]) | (self.simcat.Catalog[self.ztype] > mc[1])| ((self.simcat.Catalog['MURES'] > mc[2])& (self.simcat.Catalog['MURES'] < mc[3]))
self.realcat.Catalog = self.realcat.Catalog[realCond]
self.simcat.Catalog = self.simcat.Catalog[simCond]
'''
self.realcat.Catalog = self.realcat.Catalog[ np.abs( self.realcat.Catalog['MURES'] * 1.0 / self.realcat.Catalog['MUERR'] ) < MURESCuts]
self.simcat.Catalog = self.simcat.Catalog[ np.abs( self.simcat.Catalog['MURES'] * 1.0 / self.simcat.Catalog['MUERR'] ) < MURESCuts]
print "PostMURESCut Shape"
print self.realcat.Catalog.shape
print 'zCut Post MURESCut'
print np.sum((self.realcat.Catalog[self.ztype].astype(float) > self.zminFit) & (self.realcat.Catalog[self.ztype].astype(float) < self.zmaxFit))
print "Post cut Catalog"
print self.realcat.Catalog.shape
if noCCMC:
self.simgencat.Catalog = self.simgencat.Catalog[self.simgencat.Catalog['GENTYPE'] == 1]
self.simcat.Catalog = self.simcat.Catalog[self.simcat.Catalog['SIM_TYPE_INDEX'] == 1]
def newData(self, realfilename, realName, simInd =100):
self.realName = realName
self.shiftFlagData = False
try:
self.realcat = simread.SNANA_Cat(realfilename, realName, simOmegaM=0.3, simOmegaL=0.7, simH0=70.0, simw=-1.0, simOb0=0.049, simSigma8=0.81, simNs=0.95)
except:
self.realcat = simread.SNANA_Cat(realfilename, realName, simOmegaM=0.3, simOmegaL=0.7, simH0=70.0, simw=-1.0, simOb0=0.049, simSigma8=0.81, simNs=0.95, skip_header = 6 )
if self.cheatType:
print "WARNING, THE FITTER IS CHEATING AND ELIMINATED NON-IAs USING SIM INFO"
self.realcat.Catalog = self.realcat.Catalog[self.realcat.Catalog['SIM_TYPE_INDEX'].astype(int) == 1]
if simInd < self.nprint:
print 'N precuts'
print self.realcat.Catalog['FITPROB'].shape
print "Pre cut Catalog"
print self.realcat.Catalog.shape
for cut in cuts:
self.realcat.Catalog = self.realcat.Catalog[(self.realcat.Catalog[cut[0]].astype(type(cut[1])) > cut[1]) & (self.realcat.Catalog[cut[0]].astype(type(cut[2])) < cut[2])]
self.realcat.Catalog = self.realcat.Catalog[(self.realcat.Catalog[self.ztype].astype(float) > self.zminSamp) & (self.realcat.Catalog[self.ztype].astype(float) < self.zmaxSamp)]
print "Post cut Catalog"
print self.realcat.Catalog.shape
self.postCutRealCat = np.copy(self.realcat.Catalog)
print 'MURESCUT'
print self.MURESCuts
print self.realcat.Catalog.shape
if not (self.MURESCuts is None):
#MURES Cut format: (zmin, zmax, neg Cut, pos Cut)
'''
for mc in self.MURESCuts:
realCond = (self.realcat.Catalog[self.ztype] < mc[0]) | (self.realcat.Catalog[self.ztype] > mc[1])| ((self.realcat.Catalog['MURES'] > mc[2])& (self.realcat.Catalog['MURES'] < mc[3]))
self.realcat.Catalog = self.realcat.Catalog[realCond]
'''
self.realcat.Catalog = self.realcat.Catalog[np.abs(self.realcat.Catalog['MURES']*1.0/self.realcat.Catalog['MUERR']) < MURESCuts]
print "PostMURESCut Shape"
print self.realcat.Catalog.shape
if simInd < self.nprint:
print "Minimum Fitprob"
print np.min(self.realcat.Catalog['FITPROB'])
print 'N postcuts'
print self.realcat.Catalog['FITPROB'].shape
def zSystematic(self, binList = None, nbins = None):
assert(0)
if nbins is None:
try:
self.nbins = len(binList) - 1
self.binList = binList
except:
self.nbins = binList.shape[0] - 1
self.binList = binList
else:
binList = np.linspace(self.zmin, self.zmax, nbins+1)
self.nbins = nbins
self.binList = binList
if self.shiftFlagData:
print "DONT DOUBLE SHIFT"
return 0
if not self.shiftFlagSim:
oldsimz = self.simcat.Catalog['zPHOT']
oldsimtruez = self.simcat.Catalog['SIM_ZCMB']
stat, bins, binnum = stats.binned_statistic(oldsimz, oldsimz - oldsimtruez, bins = self.binList, statistic ='mean')
self.zBiasShifts = stat
newsimz = oldsimz - stat[binnum]
assert(np.sum(np.abs(newsimz - oldsimz)) > 0)
assert((oldzshape - np.arange(0, oldz.shape[0]).shape[0])< 1)
self.shiftFlagSim = True
oldz = self.realcat.Catalog['zPHOT']
_,_, binnum = stats.binned_statistic(oldz, oldz, bins = self.binList, statistic ='mean')
newz = oldz - self.zBiasShifts[binnum]
oldzshape = oldz.shape[0]
self.realcat.Catalog['zPHOT'].put(np.arange(0, oldz.shape[0]), newz)
assert(np.sum(np.abs(newz - oldz)) > 0)
assert((oldzshape - np.arange(0, oldz.shape[0]).shape[0])< 1)
self.simFlagData = True
def effCalc(self, fracContamCut = 0.0, nbinsSamp = None, nbinsFit = None, binListSamp = None, binListFit = None, simInd =100):
#### Do we want SNIas or all SN for efficiency?
import matplotlib as mpl
if nbinsSamp is None:
try:
self.nbinsSamp = len(binListSamp) - 1
self.binListSamp = binListSamp
except:
self.nbinsSamp = binListSamp.shape[0] - 1
self.binListSamp = binListSamp
else:
binListSamp = np.linspace(self.zminSamp, self.zmaxSamp, nbinsSamp+1)
self.nbinsSamp = nbinsSamp
self.binListSamp = binListSamp
if nbinsFit is None:
try:
self.nbinsFit = len(binListFit) - 1
self.binListFit = binListFit
except:
self.nbinsFit = binListFit.shape[0] - 1
self.binListFit = binListFit
else:
binListFit = np.linspace(self.zminFit, self.zmaxFit, nbinsFit+1)
self.nbinsFit = nbinsFit
self.binListFit = binListFit
self.typeString = ''
#if self.cheatZ:
# self.ztype = 'SIM_ZCMB'
#else:
# self.ztype = 'zPHOT'
'''
if (fracContamCut > 0.000000001) & (fracContamCut < 1.0):
print " Cutting based on Frac Contam"
histTot, binsX, binsY = np.histogram2d(self.simcat.Catalog[ztype], self.simcat.Catalog['MURES'], bins = nbins)
histCC, binsX, binsY = np.histogram2d(self.simcat.Catalog[self.simcat.Catalog['SIM_TYPE_INDEX'].astype(int)!= 1][ztype], self.simcat.Catalog[self.simcat.Catalog['SIM_TYPE_INDEX'].astype(int)!= 1]['MURES'], bins = (binsX, binsY))
fracContam = histCC.astype(np.float)/histTot.astype(np.float)
for fcRow, i in zip(fracContam, xrange(binsX.shape[0])):
for fc, j in zip(fcRow, xrange(binsY.shape[0])):
if fc < fracContamCut:
continue
else:
simInBin = (self.simcat.Catalog[ztype] > binsX[i]) & (self.simcat.Catalog[ztype] < binsX[i+1]) & (self.simcat.Catalog['MURES'] > binsY[j]) & (self.simcat.Catalog['MURES'] < binsY[j+1])
realInBin = (self.realcat.Catalog[ztype] > binsX[i]) & (self.realcat.Catalog[ztype] < binsX[i+1]) & (self.realcat.Catalog['MURES'] > binsY[j]) & (self.realcat.Catalog['MURES'] < binsY[j+1])
self.simcat.Catalog = self.simcat.Catalog[np.invert(simInBin)]
self.realcat.Catalog = self.realcat.Catalog[np.invert(realInBin)]
'''
zPHOTs = self.simcat.Catalog[self.simcat.Catalog['SIM_TYPE_INDEX'].astype(int) == 1][self.ztype].astype(float)
zTRUEs = self.simcat.Catalog[self.simcat.Catalog['SIM_TYPE_INDEX'].astype(int) == 1]['SIM_ZCMB'].astype(float)
self.typeString = self.typeString + 'A1'
print "Type Location A"
print "Choice A1"
print zPHOTs.shape
print zTRUEs.shape
print binList
counts, zPhotEdges, zTrueEdges, binnumber = scipy.stats.binned_statistic_2d(zPHOTs, zTRUEs, zTRUEs, statistic = 'count', bins = (self.binListFit, self.binListSamp))
assert(zPhotEdges.shape[0] == (self.nbinsFit + 1))
print "Type Location B"
print "Choice B1"
self.typeString = self.typeString + 'B1'
zGenHist, zGenBins = np.histogram(self.simgencat.Catalog[self.simgencat.Catalog['GENTYPE'].astype(int) == 1]['GENZ'].astype(float), bins = self.binListSamp)
#zSim1Hist, zSim1Bins = np.histogram(self.simcat.Catalog[self.simcat.Catalog['SIM_TYPE_INDEX'].astype(int) ==1]['SIM_ZCMB'].astype(float), bins = self.binListSamp)
print "counts of zTrue in each zPhot vs zTrue bin"
print counts.astype(int)
print "zGen Bins"
print zGenBins
print 'zGen Histogram'
print zGenHist
print "sum zGen events"
print np.sum(zGenHist)
print "sum zPhot events"
print np.sum(counts)
#print "DEBUG HERE"
#assert(0)
self.effmat = np.zeros((self.nbinsFit, self.nbinsSamp))
xMax = zPhotEdges.shape[0] - 2
yMax = zTrueEdges.shape[0] - 2
print zGenHist
print counts.astype(int)
'''
for zPhotLedge, zPhotRedge, row, i in zip(zPhotEdges[:-1], zPhotEdges[1:], counts, xrange(xMax + 1)):
zPhotCenter = (zPhotLedge + zPhotRedge)/2.0
for zTrueLedge, zTrueRedge, count, j in zip(zTrueEdges[:-1], zTrueEdges[1:], row, xrange(yMax + 1)):
zTrueCenter = (zTrueLedge + zTrueRedge)/2.0
inCell = (zPHOTs > zPhotLedge) & (zPHOTs < zPhotRedge) & (zTRUEs > zTrueLedge)& (zTRUEs < zTrueRedge)
zPhotCell = zPHOTs[inCell];zTrueCell = zTRUEs[inCell]
self.effmat[i][j] = count # np.sum(inCell)
#print "inCell"
#print np.sum(inCell)
#print "count"
#print count
#try:
# assert(np.abs(np.sum(inCell) - count) < 2)
#except:
# print "CHECK ABOVE"
for row, i in zip(self.effmat, xrange(self.effmat.shape[0])):
for j in xrange(row.shape[0]):
self.effmat[i][j] /= zGenHist[j]
'''
self.effmat = counts/zGenHist
#if simInd < self.nprint:
print 'effmat'
print self.effmat
extent = [zPhotEdges[0], zPhotEdges[-1], zTrueEdges[0], zTrueEdges[-1]]
if (simInd == 0) or (not ('sim' in self.realName.lower())):
plt.figure()
plt.imshow(np.flipud(counts.T), extent = extent, cmap = 'Blues')
plt.colorbar()
plt.savefig(self.realName +'redshiftDistro.png')
plt.clf()
plt.close()
plt.figure()
plt.imshow(np.flipud(self.effmat.T), extent = extent, cmap = 'Blues', norm=mpl.colors.LogNorm())
plt.colorbar()
plt.savefig(self.realName + 'efficiencyMatrixLog.png')
plt.clf()
plt.close()
plt.figure()
plt.imshow(np.flipud(self.effmat.T), extent = extent, cmap = 'Blues')
plt.colorbar()
plt.savefig(self.realName + 'efficiencyMatrix.png')
plt.clf()
plt.close()
def fit_rate(self, fixK = False, fixBeta = False, simInd =100, trueBeta = 0, CCScale = 1.0, CCScaleErr = None, TrueCCScale = 1.0, BetaInit = 0.0, kInit = 1.0, BetaErr = 1, kErr = 1, f_Js = None, CCZbins = None, scaleZBins = None, Blind = False):
#import iminuit as iM
#from iminuit import Minuit as M
#import numpy as np
#import matplotlib as mpl
#import matplotlib.pyplot as plt
#if self.cheatZ:
# self.ztype = 'SIM_ZCMB'
#else:
# self.ztype = 'zPHOT'
plt.switch_backend('Agg')
if simInd < self.nprint:
print "Type Location C"
print "Choice C1"
if len(self.typeString) <= 4:
self.typeString = self.typeString + 'C1'
nSim, simBins = np.histogram(self.simgencat.Catalog[self.simgencat.Catalog['GENTYPE'].astype(int) == 1]['GENZ'].astype(float), bins=self.binListSamp)
if simInd < self.nprint:
print "nSim1"
print nSim
print self.simgencat.Catalog.shape
print "FIGURE OUT WHY YOU MADE THIS ASSERT STATEMENT LATER"
#assert(0)
nSim2, simBins2 = np.histogram(self.simcat.Catalog[self.simcat.Catalog['SIM_TYPE_INDEX'].astype(int) ==1][self.ztype].astype(float), bins=self.binListFit)
nSim3, simBins3 = np.histogram(self.simcat.Catalog[self.ztype].astype(float), bins=self.binListFit)
NCC, _ = np.histogram(self.simcat.Catalog[self.simcat.Catalog['SIM_TYPE_INDEX']!= 1][self.ztype].astype(float), bins=self.binListFit)
if self.fixCCScale:
print "Fix CC Scale at 1"
else:
if simInd < self.nprint:
print "nSim2"
print nSim2
print "nSim3"
print nSim3
print "nCC"
print NCC
OrigNCC = np.copy(NCC)
if self.cheatCCSub:
if self.cheatCCScale:
print "WARNING: Only cheating on CC Subtraction not scale"
print "Setting NCC to infinity to make sure that cheating correctly"
print "Diagnostics after this point may be nonsense"
print self.cheatCCSub
print "NCC BeforeFck"
print NCC
NCC = NCC*1E100
print "NCC AfterFck"
print NCC
elif self.cheatCCScale:
print "NCC Before1"
print NCC
print TrueCCScale
NCC = applyCCScale(NCC, TrueCCScale, CCScaleErr, zbins = CCZbins, datazbins = self.binListFit)
print "NCC After1"
print NCC
else:
print "NCC Before2"
print NCC
print CCScale
NCC = applyCCScale(NCC, CCScale, CCScaleErr, zbins = CCZbins, datazbins = self.binListFit)
print "NCC After2"
print NCC
#assert(0)
NIa, _ = np.histogram(self.simcat.Catalog[self.simcat.Catalog['SIM_TYPE_INDEX'] == 1][self.ztype].astype(float), bins=self.binListFit)
'''
DebugNIaPhot, _ = np.histogram(self.simcat.Catalog[self.simcat.Catalog['SIM_TYPE_INDEX'] == 1]['zPHOT'].astype(float), bins=self.binListFit)
DebugNCCPhot, _ = np.histogram(self.simcat.Catalog[self.simcat.Catalog['SIM_TYPE_INDEX']!= 1]['zPHOT'].astype(float), bins=self.binListFit)
DebugNCCPhot = applyCCScale(DebugNCCPhot, CCScale, CCScaleErr, zbins = scaleZBins, datazbins = self.binListFit)
DebugNIaTrue, _ = np.histogram(self.simcat.Catalog[self.simcat.Catalog['SIM_TYPE_INDEX'] == 1]['SIM_ZCMB'].astype(float), bins=self.binListSamp)
DebugNCCTrue, _ = np.histogram(self.simcat.Catalog[self.simcat.Catalog['SIM_TYPE_INDEX']!= 1]['SIM_ZCMB'].astype(float), bins=self.binListSamp)
DebugNCCTrue = applyCCScale(DebugNCCTrue, CCScale, CCScaleErr, zbins = scaleZBins, datazbins = self.binListSamp)
uselessCtr = 0
for niap, nccp, niat, ncct, zp, zt in zip(DebugNIaPhot, DebugNCCPhot, DebugNIaTrue, DebugNCCTrue,(self.binListFit[1:] + self.binListFit[:-1])/2.0, (self.binListSamp[1:] + self.binListSamp[:-1])/2.0 ):
uselessCtr +=1
self.globalZTrueBinStorage.append(zt)
self.globalZPhotBinStorage.append(zp)
self.globalNDataIaPhotBinStorage.append(niap)
self.globalNDataCCPhotBinStorage.append(nccp)
self.globalNDataIaTrueBinStorage.append(niat)
self.globalNDataCCTrueBinStorage.append(ncct)
print "UselessCtr"
print uselessCtr
'''
try:
TrueNCC, _ = np.histogram(self.realcat.Catalog[self.realcat.Catalog['SIM_TYPE_INDEX']!=1][self.ztype].astype(float), bins=self.binListFit)
if simInd < self.nprint:
print "True NCC Data"
print TrueNCC
except:
print "Using real data"
TrueNCC = 0.0
nData, dataBins = np.histogram(self.realcat.Catalog[self.ztype].astype(float), bins=self.binListFit)
print "nData"
print nData
if not(self.cheatCCSub):
FracBad = NCC*1.0/(1.0*(NCC+NIa))
nCCData = nData*FracBad
else:
nCCData = TrueNCC*1.0
FracBad = TrueNCC*1.0/nData
if simInd < self.nprint:
print "PreScale NCC/nSim"
print OrigNCC*1.0/(OrigNCC+NIa)
print "PreScale Pred NCC Data"
print OrigNCC*1.0/(OrigNCC+NIa)*nData
print "PreScale Pred NCC Data if 2NCC"
print OrigNCC*2.0/(2.0*OrigNCC+NIa)*nData
print "TrueNCC"
print TrueNCC
if type(TrueNCC)!= int:
if simInd < self.nprint:
print "PreScale PredNCCData - TrueNCCData"
print OrigNCC*2.0/(2.0*OrigNCC+NIa)*nData - TrueNCC
print "PreScale PredNCCData - TrueNCCData/ PredNCCData"
print (OrigNCC*2.0/(2.0*OrigNCC+NIa)*nData - TrueNCC)/(OrigNCC*2.0/(2.0*OrigNCC+NIa)*nData)
else:
print "Using real data"
print "Mean of PreScale PredNCCData - TrueNCCData/ PredNCCData"
print np.nanmean((OrigNCC*2.0/(2.0*OrigNCC+NIa)*nData - TrueNCC)/(OrigNCC*2.0/(2.0*OrigNCC+NIa)*nData))
print "PostScale NCC/nData"
print NCC*1.0/(NCC+NIa)
if simInd < self.nprint:
print "Fraction of CCs in each bin"
print FracBad
print 'NCC'
print NCC
print 'nSim2'
print nSim2
print "nData, dataBins, realcat shape pre contam correction"
print nData
print dataBins
print np.sum(self.realcat.Catalog[self.ztype].astype(float) > self.zmaxFit)
print np.sum(self.realcat.Catalog[self.ztype].astype(float) < self.zminFit)
print self.realcat.Catalog[self.ztype].shape
print "Ratio nData/nSim"
print 1.0*nData/(1.0*nSim3)
print "Ratio nSim2/nData"
print 1.0*nSim3/(1.0*nData)
print "FracBad"
print FracBad
print 'NCCData'
print nCCData
if simInd < self.nprint:
print "overall Contam"
print np.sum(NCC)*1.0/(np.sum(nSim3)*1.0)
def chi2func(nData, nSim, effmat, fnorm, zCentersSamp, zCentersFit, k = 1.0, Beta = 0.0, zBreak = 1.0, dump = False, complexdump = False, modelError = False, nIA = None, nCC = None, Rate_Model = 'powerlaw', zbins = None, simInd = 100, BetaPrior = (-3, 3), KPrior = (0.0, 50.0), priorRate = None, priorZEff = None, ratePriorErrUp = None, ratePriorErrDown =None, ratePriorErrAll = None, TrueNCCData = None, f_1 = 1.0, f_2 = 1.0, f_3 = 1.0, f_4 = 1.0, f_5 = 1.0, f_6 = 1.0, f_7 = 1.0, f_8 = 1.0, f_9 = 1.0, f_10 = 1.0, f_11 = 1.0):
if simInd < self.nprint:
print "PRIORS2"
print priorRate
print priorZEff
print ratePriorErrUp
print ratePriorErrDown
Chi2Temp = 0.0
if Rate_Model == 'powerlaw':
f_Js = k*(1+zCentersSamp)**Beta
elif Rate_Model == 'discrete':
f_Js = np.array([f_1, f_2, f_3, f_4, f_5, f_6, f_7, f_8, f_9, f_10, f_11])
elif (Rate_Model == 'brokenpowerlaw') | (Rate_Model == 'brokenpowerlawVar'):
f_Js = []
#zCenters = (zbins[1:]+zbins[:-1])/2.0
temp = None
for zC in zCentersSamp:
if zC < zBreak:
f_Js.append(k*(1+zC)**Beta)
elif not(temp is None):
f_Js.append(temp)
else:
temp = f_Js[-1]
f_Js.append(temp)
f_Js = np.array(f_Js)
else:
assert(0)
if simInd < self.nprint:
if Rate_Model == 'discrete':
print "f_Js init"
print f_Js
else:
print "Beta init"
print Beta
print "k init"
print k
#chi2Mat = np.zeros((self.nbinsFit))
#adjNMC = np.zeros((self.nbinsFit))
if Rate_Model == 'discrete':
kprior = 0
betaprior = 0
else:
kprior = weakPrior(k, KPrior)
betaprior = weakPrior(Beta, BetaPrior)
if dump and (self.nprint > simInd):
print "kprior"
print kprior
print "betaprior"
print betaprior
if (nIA is None) or (nCC is None):
if dump:
print "No CC Cut"
fracCCData = np.zeros(nData.shape)
elif self.cheatCCSub:
fracCCData = TrueNCC*1.0/nData
else:
if Rate_Model == 'discrete':
if dump and (self.nprint > simInd):
print 'f_J adjusted CC Cut'
print Rate_Model
print nCC
print nIA
print np.array(f_Js)
fracCCData = (nCC*1.0)/((1.0*nCC + nIA*np.array(f_Js)))
print fracCCData
else:
if dump and (self.nprint > simInd):
print "Beta Adjusted CC Cut"
print Rate_Model
#BetaRatio = k*(1+zCenters)**(Beta)#/(1+zCenters)**MCBeta
BetaRatio = (1+zCentersFit)**(Beta)#/(1+zCenters)**MCBeta
if dump and (self.nprint > simInd):
print "Beta Ratio"
print BetaRatio
print "BadFracCCData"
print (nCC*1.0)/((1.0*nCC + nIA*BetaRatio))
print "bad NCCData"
print (nCC*1.0)/((1.0*nCC + nIA*BetaRatio))*nData
fracCCData = (nCC*1.0)/((1.0*nCC + nIA*BetaRatio))
if dump and (self.nprint > simInd):
print 'abc'
print "fracCCData2"
print fracCCData
print "unscaled fracCCData"
print (1.0*nCC)/(1.0*(nCC+nIA))
if self.cheatCCSub:
nCCData = TrueNCCData
if dump and (self.nprint < simInd):
print "Cheating CC Sub"
assert(not(TrueNCCData is None))
elif dump and (self.nprint > simInd):
print 'def'
print "Normal CC Sub"
if not self.cheatCCSub:
nCCData = nData*fracCCData
if dump and (self.nprint > simInd):
print "nCCData2"
print nCCData
if not(TrueNCCData is None):
print "TrueNCCData"
print TrueNCCData
#print f_Js
#Check if I am scaling errors down with increasing MC size. Make MC twice as large as "Data" to test.
if dump: chi2Storage = []
if dump: scaledNSimStor = []
if dump: JSumTempNumStor = []
if dump: JSumTempDenStor = []
if dump:
print "actually used NCC"
#print nCC
print nCCData
if dump and (simInd < self.nprint):
print "effmat"
print effmat
print "nData"
print nData
print "nCCData"
print nCCData
print "nSim"
print nSim
print nCCData
for row, nDataI, nCCDataI, i, zc in zip(effmat, nData, nCCData, range(self.nbinsFit), zCentersFit):
if dump and (self.nprint > simInd):
print 'effmat row'
print row
print 'nDataI'
print nDataI
print 'nCCDataI'
print nCCDataI
scaledNSimTemp = 0.0
JSumTempNum = 0.0
JSumTempDen = 0.0
if dump and (simInd < self.nprint):
print "nBinsSamp"
print self.nbinsSamp
assert(row.shape[0] == self.nbinsSamp)
assert(nSim.shape[0] == self.nbinsSamp)
assert(len(f_Js) == self.nbinsSamp)
for eff, nSimJ, f_J, j in zip(row, nSim, f_Js, range(self.nbinsSamp)):
if dump and (self.nprint > simInd):
print 'NGen J'
print nSimJ
print 'JSumTempNum contr'
print nSimJ*f_J*eff*fnorm
print 'JSumTempDen contr'
print nSimJ*f_J*eff*fnorm*f_J*fnorm
#if dump and (i!= j) and self.cheatZ and (self.nprint < simInd):
# if nSimJ*f_J*eff*fnorm > 0:
# print " This should be zero but isnt "
# print nSimJ*f_J*eff*fnorm
# assert(0)
JSumTempNum += nSimJ*f_J*eff*fnorm
JSumTempDen += nSimJ*f_J*eff*fnorm*f_J*fnorm
dataFunc = np.maximum(nDataI,1)
#CCFunc = np.ceil(np.maximum(nCCDataI, 1))
CCFunc = np.maximum(nCCDataI, 1)
c2t = (nDataI - nCCDataI - JSumTempNum)**2/( dataFunc + CCFunc + JSumTempDen)
if dump:
JSumTempNumStor.append(JSumTempNum)
JSumTempDenStor.append(JSumTempDen)
if dump and (self.nprint > simInd):
print i
print 'nDataI'
print nDataI
print 'fnCCDataI'
print nCCDataI
print 'fnorm'
print fnorm
print "JSumTempNum tot"
print JSumTempNum
print "JSumTempDen tot"
print JSumTempDen
print "Chi2Bin"
print c2t
if dump:
chi2Storage.append(c2t)
if c2t > 5:
print 'INSANITY CHECK ABOVE'
# Chi2Temp += ((nDataI - nCCDataI - JSumTempNum)**2/(JSumTempNum + JSumTempDen))#*fnorm**2
if n
|
c5e7fdcbd4a9281597a35a180f2853caac68f811
|
Python
|
<|begin_of_text|># Copyright Contributors to the Pyro project.
# SPDX-License-Identifier: Apache-2.0
from collections import namedtuple
from functools import partial
import inspect
from itertools import product
import math
import os
import numpy as np
from numpy.testing import assert_allclose, assert_array_equal
import pytest
import scipy
from scipy.sparse import csr_matrix
import scipy.stats as osp
import jax
from jax import grad, lax, vmap
import jax.numpy as jnp
import jax.random as random
from jax.scipy.special import expit, logsumexp
from jax.scipy.stats import norm as jax_norm, truncnorm as jax_truncnorm
import numpyro.distributions as dist
from numpyro.distributions import (
SineBivariateVonMises,
constraints,
kl_divergence,
transforms,
)
from numpyro.distributions.batch_util import vmap_over
from numpyro.distributions.discrete import _to_probs_bernoulli, _to_probs_multinom
from numpyro.distributions.flows import InverseAutoregressiveTransform
from numpyro.distributions.gof import InvalidTest, auto_goodness_of_fit
from numpyro.distributions.transforms import (
LowerCholeskyAffine,
PermuteTransform,
PowerTransform,
SimplexToOrderedTransform,
SoftplusTransform,
biject_to,
)
from numpyro.distributions.util import (
matrix_to_tril_vec,
multinomial,
signed_stick_breaking_tril,
sum_rightmost,
vec_to_tril_matrix,
)
from numpyro.nn import AutoregressiveNN
TEST_FAILURE_RATE = 2e-5 # For all goodness-of-fit tests.
def my_kron(A, B):
D = A[..., :, None, :, None] * B[..., None, :, None, :]
ds = D.shape
newshape = (*ds[:-4], ds[-4] * ds[-3], ds[-2] * ds[-1])
return D.reshape(newshape)
def _identity(x):
return x
def _circ_mean(angles):
return jnp.arctan2(
jnp.mean(jnp.sin(angles), axis=0), jnp.mean(jnp.cos(angles), axis=0)
)
def sde_fn1(x, _):
lam = 0.1
sigma2 = 0.1
return lam * x, sigma2
def sde_fn2(xy, _):
tau, a = 2.0, 1.1
x, y = xy[0], xy[1]
dx = tau * (x - x**3.0 / 3.0 + y)
dy = (1.0 / tau) * (a - x)
dxy = jnp.vstack([dx, dy]).reshape(xy.shape)
sigma2 = 0.1
return dxy, sigma2
class T(namedtuple("TestCase", ["jax_dist", "sp_dist", "params"])):
def __new__(cls, jax_dist, *params):
sp_dist = get_sp_dist(jax_dist)
return super(cls, T).__new__(cls, jax_dist, sp_dist, params)
def _mvn_to_scipy(loc, cov, prec, tril):
jax_dist = dist.MultivariateNormal(loc, cov, prec, tril)
mean = jax_dist.mean
cov = jax_dist.covariance_matrix
return osp.multivariate_normal(mean=mean, cov=cov)
def _multivariate_t_to_scipy(df, loc, tril):
if scipy.__version__ < "1.6.0":
pytest.skip(
"Multivariate Student-T distribution is not available in scipy < 1.6"
)
jax_dist = dist.MultivariateStudentT(df, loc, tril)
mean = jax_dist.mean
cov = jax_dist.covariance_matrix
return osp.multivariate_t(loc=mean, shape=cov, df=df)
def _lowrank_mvn_to_scipy(loc, cov_fac, cov_diag):
jax_dist = dist.LowRankMultivariateNormal(loc, cov_fac, cov_diag)
mean = jax_dist.mean
cov = jax_dist.covariance_matrix
return osp.multivariate_normal(mean=mean, cov=cov)
def _truncnorm_to_scipy(loc, scale, low, high):
if low is None:
a = -np.inf
else:
a = (low - loc) / scale
if high is None:
b = np.inf
else:
b = (high - loc) / scale
return osp.truncnorm(a, b, loc=loc, scale=scale)
def _TruncatedNormal(loc, scale, low, high):
return dist.TruncatedNormal(loc=loc, scale=scale, low=low, high=high)
def _TruncatedCauchy(loc, scale, low, high):
return dist.TruncatedCauchy(loc=loc, scale=scale, low=low, high=high)
_TruncatedNormal.arg_constraints = {}
_TruncatedNormal.reparametrized_params = []
_TruncatedNormal.infer_shapes = lambda *args: (lax.broadcast_shapes(*args), ())
class SineSkewedUniform(dist.SineSkewed):
def __init__(self, skewness, **kwargs):
lower, upper = (np.array([-math.pi, -math.pi]), np.array([math.pi, math.pi]))
base_dist = dist.Uniform(lower, upper, **kwargs).to_event(lower.ndim)
super().__init__(base_dist, skewness, **kwargs)
@vmap_over.register
def _vmap_over_sine_skewed_uniform(self: SineSkewedUniform, skewness=None):
return vmap_over.dispatch(dist.SineSkewed)(self, base_dist=None, skewness=skewness)
class SineSkewedVonMises(dist.SineSkewed):
def __init__(self, skewness, **kwargs):
von_loc, von_conc = (np.array([0.0]), np.array([1.0]))
base_dist = dist.VonMises(von_loc, von_conc, **kwargs).to_event(von_loc.ndim)
super().__init__(base_dist, skewness, **kwargs)
@vmap_over.register
def _vmap_over_sine_skewed_von_mises(self: SineSkewedVonMises, skewness=None):
return vmap_over.dispatch(dist.SineSkewed)(self, base_dist=None, skewness=skewness)
class SineSkewedVonMisesBatched(dist.SineSkewed):
def __init__(self, skewness, **kwargs):
von_loc, von_conc = (np.array([0.0, -1.234]), np.array([1.0, 10.0]))
base_dist = dist.VonMises(von_loc, von_conc, **kwargs).to_event(von_loc.ndim)
super().__init__(base_dist, skewness, **kwargs)
@vmap_over.register
def _vmap_over_sine_skewed_von_mises_batched(
self: SineSkewedVonMisesBatched, skewness=None
):
return vmap_over.dispatch(dist.SineSkewed)(self, base_dist=None, skewness=skewness)
class _GaussianMixture(dist.MixtureSameFamily):
arg_constraints = {}
reparametrized_params = []
def __init__(self, mixing_probs, loc, scale):
component_dist = dist.Normal(loc=loc, scale=scale)
mixing_distribution = dist.Categorical(probs=mixing_probs)
super().__init__(
mixing_distribution=mixing_distribution,
component_distribution=component_dist,
)
@property
def loc(self):
return self.component_distribution.loc
@property
def scale(self):
return self.component_distribution.scale
@vmap_over.register
def _vmap_over_gaussian_mixture(self: _GaussianMixture, loc=None, scale=None):
component_distribution = vmap_over(
self.component_distribution, loc=loc, scale=scale
)
return vmap_over.dispatch(dist.MixtureSameFamily)(
self, _component_distribution=component_distribution
)
class _Gaussian2DMixture(dist.MixtureSameFamily):
arg_constraints = {}
reparametrized_params = []
def __init__(self, mixing_probs, loc, covariance_matrix):
component_dist = dist.MultivariateNormal(
loc=loc, covariance_matrix=covariance_matrix
)
mixing_distribution = dist.Categorical(probs=mixing_probs)
super().__init__(
mixing_distribution=mixing_distribution,
component_distribution=component_dist,
)
@property
def loc(self):
return self.component_distribution.loc
@property
def covariance_matrix(self):
return self.component_distribution.covariance_matrix
@vmap_over.register
def _vmap_over_gaussian_2d_mixture(self: _Gaussian2DMixture, loc=None):
component_distribution = vmap_over(self.component_distribution, loc=loc)
return vmap_over.dispatch(dist.MixtureSameFamily)(
self, _component_distribution=component_distribution
)
class _GeneralMixture(dist.MixtureGeneral):
arg_constraints = {}
reparametrized_params = []
def __init__(self, mixing_probs, locs, scales):
component_dists = [
dist.Normal(loc=loc_, scale=scale_) for loc_, scale_ in zip(locs, scales)
]
mixing_distribution = dist.Categorical(probs=mixing_probs)
return super().__init__(
mixing_distribution=mixing_distribution,
component_distributions=component_dists,
)
@property
def locs(self):
# hotfix for vmapping tests, which cannot easily check non-array attributes
return self.component_distributions[0].loc
@property
def scales(self):
return self.component_distributions[0].scale
@vmap_over.register
def _vmap_over_general_mixture(self: _GeneralMixture, locs=None, scales=None):
component_distributions = [
vmap_over(d, loc=locs, scale=scales) for d in self.component_distributions
]
return vmap_over.dispatch(dist.MixtureGeneral)(
self, _component_distributions=component_distributions
)
class _General2DMixture(dist.MixtureGeneral):
arg_constraints = {}
reparametrized_params = []
def __init__(self, mixing_probs, locs, covariance_matrices):
component_dists = [
dist.MultivariateNormal(loc=loc_, covariance_matrix=covariance_matrix)
for loc_, covariance_matrix in zip(locs, covariance_matrices)
]
mixing_distribution = dist.Categorical(probs=mixing_probs)
return super().__init__(
mixing_distribution=mixing_distribution,
component_distributions=component_dists,
)
@property
def locs(self):
# hotfix for vmapping tests, which cannot easily check non-array attributes
return self.component_distributions[0].loc
@property
def covariance_matrices(self):
return self.component_distributions[0].covariance_matrix
@vmap_over.register
def _vmap_over_general_2d_mixture(self: _General2DMixture, locs=None):
component_distributions = [
vmap_over(d, loc=locs) for d in self.component_distributions
]
return vmap_over.dispatch(dist.MixtureGeneral)(
self, _component_distributions=component_distributions
)
class _ImproperWrapper(dist.ImproperUniform):
def sample(self, key, sample_shape=()):
transform = biject_to(self.support)
prototype_value = jnp.zeros(self.event_shape)
unconstrained_event_shape = jnp.shape(transform.inv(prototype_value))
shape = sample_shape + self.batch_shape + unconstrained_event_shape
unconstrained_samples = random.uniform(key, shape, minval=-2, maxval=2)
return transform(unconstrained_samples)
class ZeroInflatedPoissonLogits(dist.discrete.ZeroInflatedLogits):
arg_constraints = {"rate": constraints.positive, "gate_logits": constraints.real}
pytree_data_fields = ("rate",)
def __init__(self, rate, gate_logits, *, validate_args=None):
self.rate = rate
super().__init__(dist.Poisson(rate), gate_logits, validate_args=validate_args)
@vmap_over.register
def _vmap_over_zero_inflated_poisson_logits(
self: ZeroInflatedPoissonLogits, rate=None, gate_logits=None
):
dist_axes = vmap_over.dispatch(dist.discrete.ZeroInflatedLogits)(
self,
base_dist=vmap_over(self.base_dist, rate=rate),
gate_logits=gate_logits,
gate=gate_logits,
)
dist_axes.rate = rate
return dist_axes
class SparsePoisson(dist.Poisson):
def __init__(self, rate, *, validate_args=None):
super().__init__(rate, is_sparse=True, validate_args=validate_args)
class FoldedNormal(dist.FoldedDistribution):
arg_constraints = {"loc": constraints.real, "scale": constraints.positive}
def __init__(self, loc, scale, validate_args=None):
self.loc = loc
self.scale = scale
super().__init__(dist.Normal(loc, scale), validate_args=validate_args)
@vmap_over.register
def _vmap_over_folded_normal(self: "FoldedNormal", loc=None, scale=None):
d = vmap_over.dispatch(dist.FoldedDistribution)(
self, base_dist=vmap_over(self.base_dist, loc=loc, scale=scale)
)
d.loc = loc
d.scale = scale
return d
class _SparseCAR(dist.CAR):
reparametrized_params = ["loc", "correlation", "conditional_precision"]
def __init__(
self,
loc,
correlation,
conditional_precision,
adj_matrix,
*,
is_sparse=True,
validate_args=None,
):
super().__init__(
loc,
correlation,
conditional_precision,
adj_matrix,
is_sparse=True,
validate_args=validate_args,
)
_DIST_MAP = {
dist.AsymmetricLaplace: lambda loc, scale, asymmetry: osp.laplace_asymmetric(
asymmetry, loc=loc, scale=scale
),
dist.BernoulliProbs: lambda probs: osp.bernoulli(p=probs),
dist.BernoulliLogits: lambda logits: osp.bernoulli(p=_to_probs_bernoulli(logits)),
dist.Beta: lambda con1, con0: osp.beta(con1, con0),
dist.BetaProportion: lambda mu, kappa: osp.beta(mu * kappa, (1 - mu) * kappa),
dist.BinomialProbs: lambda probs, total_count: osp.binom(n=total_count, p=probs),
dist.BinomialLogits: lambda logits, total_count: osp.binom(
n=total_count, p=_to_probs_bernoulli(logits)
),
dist.Cauchy: lambda loc, scale: osp.cauchy(loc=loc, scale=scale),
dist.Chi2: lambda df: osp.chi2(df),
dist.Dirichlet: lambda conc: osp.dirichlet(conc),
dist.Exponential: lambda rate: osp.expon(scale=jnp.reciprocal(rate)),
dist.Gamma: lambda conc, rate: osp.gamma(conc, scale=1.0 / rate),
dist.GeometricProbs: lambda probs: osp.geom(p=probs, loc=-1),
dist.GeometricLogits: lambda logits: osp.geom(
p=_to_probs_bernoulli(logits), loc=-1
),
dist.Gumbel: lambda loc, scale: osp.gumbel_r(loc=loc, scale=scale),
dist.HalfCauchy: lambda scale: osp.halfcauchy(scale=scale),
dist.HalfNormal: lambda scale: osp.halfnorm(scale=scale),
dist.InverseGamma: lambda conc, rate: osp.invgamma(conc, scale=rate),
dist.Laplace: lambda loc, scale: osp.laplace(loc=loc, scale=scale),
dist.LogNormal: lambda loc, scale: osp.lognorm(s=scale, scale=jnp.exp(loc)),
dist.LogUniform: lambda a, b: osp.loguniform(a, b),
dist.MultinomialProbs: lambda probs, total_count: osp.multinomial(
n=total_count, p=probs
),
dist.MultinomialLogits: lambda logits, total_count: osp.multinomial(
n=total_count, p=_to_probs_multinom(logits)
),
dist.MultivariateNormal: _mvn_to_scipy,
dist.MultivariateStudentT: _multivariate_t_to_scipy,
dist.LowRankMultivariateNormal: _lowrank_mvn_to_scipy,
dist.Normal: lambda loc, scale: osp.norm(loc=loc, scale=scale),
dist.Pareto: lambda scale, alpha: osp.pareto(alpha, scale=scale),
dist.Poisson: lambda rate: osp.poisson(rate),
dist.StudentT: lambda df, loc, scale: osp.t(df=df, loc=loc, scale=scale),
dist.Uniform: lambda a, b: osp.uniform(a, b - a),
dist.Logistic: lambda loc, scale: osp.logistic(loc=loc, scale=scale),
dist.VonMises: lambda loc, conc: osp.vonmises(
loc=np.array(loc, dtype=np.float64), kappa=np.array(conc, dtype=np.float64)
),
dist.Weibull: lambda scale, conc: osp.weibull_min(
c=conc,
scale=scale,
),
_TruncatedNormal: _truncnorm_to_scipy,
}
def get_sp_dist(jax_dist):
classes = jax_dist.mro() if isinstance(jax_dist, type) else [jax_dist]
for cls in classes:
if cls in _DIST_MAP:
return _DIST_MAP[cls]
CONTINUOUS = [
T(dist.AsymmetricLaplace, 1.0, 0.5, 1.0),
T(dist.AsymmetricLaplace, np.array([1.0, 2.0]), 2.0, 2.0),
T(dist.AsymmetricLaplace, np.array([[1.0], [2.0]]), 2.0, np.array([3.0, 5.0])),
T(dist.AsymmetricLaplaceQuantile, 0.0, 1.0, 0.5),
T(dist.AsymmetricLaplaceQuantile, np.array([1.0, 2.0]), 2.0, 0.7),
T(
dist.AsymmetricLaplaceQuantile,
np.array([[1.0], [2.0]]),
2.0,
np.array([0.2, 0.8]),
),
T(dist.Beta, 0.2, 1.1),
T(dist.Beta, 1.0, np.array([2.0, 2.0])),
T(dist.Beta, 1.0, np.array([[1.0, 1.0], [2.0, 2.0]])),
T(dist.BetaProportion, 0.2, 10.0),
T(dist.BetaProportion, 0.51, np.array([2.0, 1.0])),
T(dist.BetaProportion, 0.5, np.array([[4.0, 4.0], [2.0, 2.0]])),
T(dist.Chi2, 2.0),
T(dist.Chi2, np.array([0.3, 1.3])),
T(dist.Cauchy, 0.0, 1.0),
T(dist.Cauchy, 0.0, np.array([1.0, 2.0])),
T(dist.Cauchy, np.array([0.0, 1.0]), np.array([[1.0], [2.0]])),
T(dist.Dirichlet, np.array([1.7])),
T(dist.Dirichlet, np.array([0.2, 1.1])),
T(dist.Dirichlet, np.array([[0.2, 1.1], [2.0, 2.0]])),
T(
dist.EulerMaruyama,
np.array([0.0, 0.1, 0.2]),
sde_fn1,
dist.Normal(0.1, 1.0),
),
T(
dist.EulerMaruyama,
np.array([0.0, 0.1, 0.2]),
sde_fn2,
dist.Normal(jnp.array([0.0, 1.0]), 1e-3).to_event(1),
),
T(
dist.EulerMaruyama,
np.array([[0.0, 0.1, 0.2], [10.0, 10.1, 10.2]]),
sde_fn2,
dist.Normal(jnp.array([0.0, 1.0]), 1e-3).to_event(1),
),
T(
dist.EulerMaruyama,
np.array([[0.0, 0.1, 0.2], [10.0, 10.1, 10.2]]),
sde_fn2,
dist.Normal(jnp.array([[0.0, 1.0], [2.0, 3.0]]), 1e-2).to_event(1),
),
T(dist.Exponential, 2.0),
T(dist.Exponential, np.array([4.0, 2.0])),
T(dist.Gamma, np.array([1.7]), np.array([[2.0], [3.0]])),
T(dist.Gamma, np.array([0.5, 1.3]), np.array([[1.0], [3.0]])),
T(dist.GaussianRandomWalk, 0.1, 10),
T(dist.GaussianRandomWalk, np.array([0.1, 0.3, 0.25]), 10),
T(
dist.GaussianCopulaBeta,
np.array([7.0, 2.0]),
np.array([4.0, 10.0]),
np.array([[1.0, 0.75], [0.75, 1.0]]),
),
T(dist.GaussianCopulaBeta, 2.0, 1.5, np.eye(3)),
T(dist.GaussianCopulaBeta, 2.0, 1.5, np.full((5, 3, 3), np.eye(3))),
T(dist.Gompertz, np.array([1.7]), np.array([[2.0], [3.0]])),
T(dist.Gompertz, np.array([0.5, 1.3]), np.array([[1.0], [3.0]])),
T(dist.Gumbel, 0.0, 1.0),
T(dist.Gumbel, 0.5, 2.0),
T(dist.Gumbel, np.array([0.0, 0.5]), np.array([1.0, 2.0])),
T(FoldedNormal, 2.0, 4.0),
T(FoldedNormal, np.array([2.0, 50.0]), np.array([4.0, 100.0])),
T(dist.HalfCauchy, 1.0),
T(dist.HalfCauchy, np.array([1.0, 2.0])),
T(dist.HalfNormal, 1.0),
T(dist.HalfNormal, np.array([1.0, 2.0])),
T(_ImproperWrapper, constraints.positive, (), (3,)),
T(dist.InverseGamma, np.array([1.7]), np.array([[2.0], [3.0]])),
T(dist.InverseGamma, np.array([0.5, 1.3]), np.array([[1.0], [3.0]])),
T(dist.Kumaraswamy, 10.0, np.array([2.0, 3.0])),
T(dist.Kumaraswamy, np.array([1.7]), np.array([[2.0], [3.0]])),
T(dist.Kumaraswamy, 0.6, 0.5),
T(dist.Laplace, 0.0, 1.0),
T(dist.Laplace, 0.5, np.array([1.0, 2.5])),
T(dist.Laplace, np.array([1.0, -0.5]), np.array([2.3, 3.0])),
T(dist.LKJ, 2, 0.5, "onion"),
T(dist.LKJ, 5, np.array([0.5, 1.0, 2.0]), "cvine"),
T(dist.LKJCholesky, 2, 0.5, "onion"),
T(dist.LKJCholesky, 2, 0.5, "cvine"),
T(dist.LKJCholesky, 5, np.array([0.5, 1.0, 2.0]), "onion"),
pytest.param(
*T(dist.LKJCholesky, 5, np.array([0.5, 1.0, 2.0]), "cvine"),
marks=pytest.mark.skipif("CI" in os.environ, reason="reduce time for CI"),
),
pytest.param(
*T(dist.LKJCholesky, 3, np.array([[3.0, 0.6], [0.2, 5.0]]), "onion"),
marks=pytest.mark.skipif("CI" in os.environ, reason="reduce time for CI"),
),
T(dist.LKJCholesky, 3, np.array([[3.0, 0.6], [0.2, 5.0]]), "cvine"),
T(dist.Logistic, 0.0, 1.0),
T(dist.Logistic, 1.0, np.array([1.0, 2.0])),
T(dist.Logistic, np.array([0.0, 1.0]), np.array([[1.0], [2.0]])),
T(dist.LogNormal, 1.0, 0.2),
T(dist.LogNormal, -1.0, np.array([0.5, 1.3])),
T(dist.LogNormal, np.array([0.5, -0.7]), np.array([[0.1, 0.4], [0.5, 0.1]])),
T(dist.LogUniform, 1.0, 2.0),
T(dist.LogUniform, 1.0, np.array([2.0, 3.0])),
T(dist.LogUniform, np.array([1.0, 2.0]), np.array([[3.0], [4.0]])),
T(
dist.MatrixNormal,
1.0 * np.arange(6).reshape(3, 2),
np.array([[1.0, 0, 0], [0.3, 0.36, 0], [0.4, 0.49, 4]]),
np.array([[1.0, 0], [0.4, 1]]),
),
T(
dist.MatrixNormal,
1.0 * np.arange(12).reshape((2, 3, 2)),
np.array([[1.0, 0, 0], [0.3, 0.36, 0], [0.4, 0.49, 4]]) * np.ones((2, 3, 3)),
np.array([[1.0, 0], [0.4, 0.5]]) * np.ones((2, 2, 2)),
),
T(
dist.MatrixNormal,
1.0 * np.arange(36).reshape((2, 3, 3, 2)),
np.identity(3),
np.identity(2),
),
T(dist.MultivariateNormal, 0.0, np.array([[1.0, 0.5], [0.5, 1.0]]), None, None),
T(
dist.MultivariateNormal,
np.array([1.0, 3.0]),
None,
np.array([[1.0, 0.5], [0.5, 1.0]]),
None,
),
T(
dist.MultivariateNormal,
np.array([1.0, 3.0]),
None,
np.array([[[1.0, 0.5], [0.5, 1.0]]]),
None,
),
T(
dist.MultivariateNormal,
np.array([2.0]),
None,
None,
np.array([[1.0, 0.0], [0.5, 1.0]]),
),
T(
dist.MultivariateNormal,
np.arange(6, dtype=np.float32).reshape((3, 2)),
None,
None,
np.array([[1.0, 0.0], [0.0, 1.0]]),
),
T(
dist.MultivariateNormal,
0.0,
None,
np.broadcast_to(np.identity(3), (2, 3, 3)),
None,
),
T(
dist.CAR,
1.2,
np.array([-0.2, 0.3]),
0.1,
np.array(
[
[0.0, 1.0, 1.0, 0.0],
[1.0, 0.0, 0.0, 1.0],
[1.0, 0.0, 0.0, 1.0],
[0.0, 1.0, 1.0, 0.0],
]
),
),
T(
dist.CAR,
np.array([0.0, 1.0, 3.0, 4.0]),
0.1,
np.array([0.3, 0.7]),
np.array(
[
[0.0, 1.0, 1.0, 0.0],
[1.0, 0.0, 0.0, 1.0],
[1.0, 0.0, 0.0, 1.0],
[0.0, 1.0, 1.0, 0.0],
]
),
),
T(
_SparseCAR,
np.array([[0.0, 1.0, 3.0, 4.0], [2.0, -1.0, -3.0, 2.0]]),
0.0,
0.1,
np.array(
[
[0.0, 1.0, 1.0, 0.0],
[1.0, 0.0, 0.0, 1.0],
[1.0, 0.0, 0.0, 1.0],
[0.0, 1.0, 1.0, 0.0],
]
),
),
T(
dist.MultivariateStudentT,
15.0,
0.0,
np.array([[1.0, 0.0], [0.5, 1.0]]),
),
T(
dist.MultivariateStudentT,
15.0,
np.array([1.0, 3.0]),
np.array([[1.0, 0.0], [0.5, 1.0]]),
),
T(
dist.MultivariateStudentT,
15.0,
np.array([1.0, 3.0]),
np.array([[[1.0, 0.0], [0.5, 1.0]]]),
),
T(
dist.MultivariateStudentT,
15.0,
np.array([3.0]),
np.array([[1.0, 0.0], [0.5, 1.0]]),
),
T(
dist.MultivariateStudentT,
15.0,
np.arange(6, dtype=np.float32).reshape((3, 2)),
np.array([[1.0, 0.0], [0.5, 1.0]]),
),
T(
dist.MultivariateStudentT,
15.0,
np.ones(3),
np.broadcast_to(np.identity(3), (2, 3, 3)),
),
T(
dist.MultivariateStudentT,
np.array(7.0),
np.array([1.0, 3.0]),
np.array([[1.0, 0.0], [0.5, 1.0]]),
),
T(
dist.MultivariateStudentT,
np.arange(20, 22, dtype=jnp.float32),
np.ones(3),
np.broadcast_to(jnp.identity(3), (2, 3, 3)),
),
T(
dist.MultivariateStudentT,
np.arange(20, 26, dtype=jnp.float32).reshape((3, 2)),
np.ones(2),
np.array([[1.0, 0.0], [0.5, 1.0]]),
),
T(
dist.LowRankMultivariateNormal,
np.zeros(2),
np.array([[1.0], [0.0]]),
np.array([1.0, 1.0]),
),
T(
dist.LowRankMultivariateNormal,
np.arange(6, dtype=jnp.float32).reshape((2, 3)),
np.arange(6, dtype=jnp.float32).reshape((3, 2)),
np.array([1.0, 2.0, 3.0]),
),
T(dist.Normal, 0.0, 1.0),
T(dist.Normal, 1.0, np.array([1.0, 2.0])),
T(dist.Normal, np.array([0.0, 1.0]), np.array([[1.0], [2.0]])),
T(dist.Pareto, 1.0, 2.0),
T(dist.Pareto, np.array([1.0, 0.5]), np.array([0.3, 2.0])),
T(dist.Pareto, np.array([[1.0], [3.0]]), np.array([1.0, 0.5])),
T(dist.RelaxedBernoulliLogits, 2.0, -10.0),
T(dist.RelaxedBernoulliLogits, np.array([1.0, 3.0]), np.array([3.0, 8.0])),
T(dist.SoftLaplace, 1.0, 1.0),
T(dist.SoftLaplace, np.array([-1.0, 50.0]), np.array([4.0, 100.0])),
T(dist.StudentT, 1.0, 1.0, 0.5),
T(dist.StudentT, 2.0, np.array([1.0, 2.0]), 2.0),
T(dist.StudentT, np.array([3.0, 5.0]), np.array([[1.0], [2.0]]), 2.0),
T(_TruncatedCauchy, 0.0, 1.0, -1.0, None),
T(_TruncatedCauchy, 0.0, np.array([1.0, 2.0]), 1.0, None),
T(
_TruncatedCauchy,
np.array([0.0, 1.0]),
np.array([[1.0], [2.0]]),
np.array([-2.0, 2.0]),
None,
),
T(_TruncatedCauchy, 0.0, 1.0, None, 1.0),
T(_TruncatedCauchy, 0.0, 1.0, -1.0, 1.0),
T(_TruncatedNormal, 0.0, 1.0, -1.0, None),
T(_TruncatedNormal, -1.0, np.array([1.0, 2.0]), 1.0, None),
T(
_TruncatedNormal,
np.array([0.0, 1.0]),
np.array([[1.0], [2.0]]),
np.array([-2.0, 2.0]),
None,
),
T(_TruncatedNormal, -1.0, 2.0, 1.0, 5.0),
T(_TruncatedNormal, np.array([-1.0, 4.0]), 2.0, None, 5.0),
T(_TruncatedNormal, -1.0, np.array([2.0, 3.0]), 1.0, None),
T(_TruncatedNormal, -1.0, 2.0, np.array([-6.0, 4.0]), np.array([-4.0, 6.0])),
T(
_TruncatedNormal,
np.array([0.0, 1.0]),
np.array([[1.0], [2.0]]),
None,
np.array([-2.0, 2.0]),
),
T(dist.TwoSidedTruncatedDistribution, dist.Laplace(0.0, 1.0), -2.0, 3.0),
T(dist.Uniform, 0.0, 2.0),
T(dist.Uniform, 1.0, np.array([2.0, 3.0])),
T(dist.Uniform, np.array([0.0, 0.0]), np.array([[2.0], [3.0]])),
T(dist.Weibull, 0.2, 1.1),
T(dist.Weibull, 2.8, np.array([2.0, 2.0])),
T(dist.Weibull, 1.8, np.array([[1.0, 1.0], [2.0, 2.0]])),
T(
_GaussianMixture,
np.ones(3) / 3.0,
np.array([0.0, 7.7, 2.1]),
np.array([4.2, 7.7, 2.1]),
),
T(
_Gaussian2DMixture,
np.array([0.2, 0.5, 0.3]),
np.array([[-1.2, 1.5], [2.0, 2.0], [-1, 4.0]]), # Mean
np.array(
[
[
[0.1, -0.2],
[-0.2, 1.0],
],
[
[0.75, 0.0],
[0.0, 0.75],
],
[
[1.0, 0.5],
[0.5, 0.27],
],
]
), # Covariance
),
T(
_GeneralMixture,
np.array([0.2, 0.3, 0.5]),
np.array([0.0, 7.7, 2.1]),
np.array([4.2, 1.7, 2.1]),
),
T(
_General2DMixture,
np.array([0.2, 0.5, 0.3]),
np.array([[-1.2, 1.5], [2.0, 2.0], [-1, 4.0]]), # Mean
np.array(
[
[
[0.1, -0.2],
[-0.2, 1.0],
],
[
[0.75, 0.0],
[0.0, 0.75],
],
[
[1.0, 0.5],
[0.5, 0.27],
],
]
), # Covariance
),
]
DIRECTIONAL = [
T(dist.VonMises, 2.0, 10.0),
T(dist.VonMises, 2.0, np.array([150.0, 10.0])),
T(dist.VonMises, np.array([1 / 3 * np.pi, -1.0]), np.array([20.0, 30.0])),
pytest.param(
*T(
dist.SineBivariateVonMises,
0.0,
0.0,
5.0,
6.0,
2.0,
),
marks=pytest.mark.skipif("CI" in os.environ, reason="reduce time for CI"),
),
T(
dist.SineBivariateVonMises,
3.003,
-1.343,
5.0,
6.0,
2.0,
),
pytest.param(
*T(
dist.SineBivariateVonMises,
-1.232,
-1.3430,
3.4,
2.0,
1.0,
),
marks=pytest.mark.skipif("CI" in os.environ, reason="reduce time for CI"),
),
pytest.param(
*T(
dist.SineBivariateVonMises,
np.array([math.pi - 0.2, 1.0]),
np.array([0.0, 1.0]),
np.array([5.0, 5.0]),
np.array([7.0, 0.5]),
None,
np.array([0.5, 0.1]),
),
marks=pytest.mark.skipif("CI" in os.environ, reason="reduce time for CI"),
),
T(dist.ProjectedNormal, np.array([0.0, 0.0])),
T(dist.ProjectedNormal, np.array([[2.0, 3.0]])),
T(dist.ProjectedNormal, np.array([0.0, 0.0, 0.0])),
T(dist.ProjectedNormal, np.array([[-1.0, 2.0, 3.0]])),
T(SineSkewedUniform, np.array([-math.pi / 4, 0.1])),
T(SineSkewedVonMises, np.array([0.342355])),
T(SineSkewedVonMisesBatched, np.array([[0.342355, -0.0001], [0.91, 0.09]])),
]
DISCRETE = [
T(dist.BetaBinomial, 2.0, 5.0, 10),
T(
dist.BetaBinomial,
np.array([2.0, 4.0]),
np.array([5.0, 3.0]),
np.array([10, 12]),
),
T(dist.BernoulliProbs, 0.2),
T(dist.BernoulliProbs, np.array([0.2, 0.7])),
T(dist.BernoulliLogits, np.array([-1.0, 3.0])),
T(dist.BinomialProbs, np.array([0.2, 0.7]), np.array([10, 2])),
T(dist.BinomialProbs, np.array([0.2, 0.7]), np.array([5, 8])),
T(dist.BinomialLogits, np.array([-1.0, 3.0]), np.array([5, 8])),
T(dist.CategoricalProbs, np.array([1.0])),
T(dist.CategoricalProbs, np.array([0.1, 0.5, 0.4])),
T(dist.CategoricalProbs, np.array([[0.1, 0.5, 0.4], [0.4, 0.4, 0.2]])),
T(dist.CategoricalLogits, np.array([-5.0])),
T(dist.CategoricalLogits, np.array([1.0, 2.0, -2.0])),
T(dist.CategoricalLogits, np.array([[-1, 2.0, 3.0], [3.0, -4.0, -2.0]])),
T(dist.Delta, 1),
T(dist.Delta, np.array([0.0, 2.0])),
T(dist.Delta, np.array([0.0, 2.0]), np.array([-2.0, -4.0])),
T(dist.Dirichlet
|
4453b8176cda60a3a8f4800860b87bddfdb6cafa
|
Python
|
<|begin_of_text|>
# -*- coding: utf-8 -*-
"""
ORIGINAL PROGRAM SOURCE CODE:
1: from __future__ import division, print_function, absolute_import
2:
3: import os
4: from os.path import join
5:
6: from scipy._build_utils import numpy_nodepr_api
7:
8:
9: def configuration(parent_package='',top_path=None):
10: from numpy.distutils.misc_util import Configuration
11: from numpy.distutils.system_info import get_info
12: config = Configuration('integrate', parent_package, top_path)
13:
14: # Get a local copy of lapack_opt_info
15: lapack_opt = dict(get_info('lapack_opt',notfound_action=2))
16: # Pop off the libraries list so it can be combined with
17: # additional required libraries
18: lapack_libs = lapack_opt.pop('libraries', [])
19:
20: mach_src = [join('mach','*.f')]
21: quadpack_src = [join('quadpack', '*.f')]
22: lsoda_src = [join('odepack', fn) for fn in [
23: 'blkdta000.f', 'bnorm.f', 'cfode.f',
24: 'ewset.f', 'fnorm.f', 'intdy.f',
25: 'lsoda.f', 'prja.f','solsy.f','srcma.f',
26: 'stoda.f', 'vmnorm.f', 'xerrwv.f', 'xsetf.f',
27: 'xsetun.f']]
28: vode_src = [join('odepack', 'vode.f'), join('odepack', 'zvode.f')]
29: dop_src = [join('dop','*.f')]
30: quadpack_test_src = [join('tests','_test_multivariate.c')]
31: odeint_banded_test_src = [join('tests', 'banded5x5.f')]
32:
33: config.add_library('mach', sources=mach_src,
34: config_fc={'noopt':(__file__,1)})
35: config.add_library('quadpack', sources=quadpack_src)
36: config.add_library('lsoda', sources=lsoda_src)
37: config.add_library('vode', sources=vode_src)
38: config.add_library('dop', sources=dop_src)
39:
40: # Extensions
41: # quadpack:
42: include_dirs = [join(os.path.dirname(__file__), '..', '_lib','src')]
43: if 'include_dirs' in lapack_opt:
44: lapack_opt = dict(lapack_opt)
45: include_dirs.extend(lapack_opt.pop('include_dirs'))
46:
47: config.add_extension('_quadpack',
48: sources=['_quadpackmodule.c'],
49: libraries=['quadpack','mach'] + lapack_libs,
50: depends=(['__quadpack.h']
51: + quadpack_src + mach_src),
52: include_dirs=include_dirs,
53: **lapack_opt)
54:
55: # odepack/lsoda-odeint
56: odepack_opts = lapack_opt.copy()
57: odepack_opts.update(numpy_nodepr_api)
58: config.add_extension('_odepack',
59: sources=['_odepackmodule.c'],
60: libraries=['lsoda','mach'] + lapack_libs,
61: depends=(lsoda_src + mach_src),
62: **odepack_opts)
63:
64: # vode
65: config.add_extension('vode',
66: sources=['vode.pyf'],
67: libraries=['vode'] + lapack_libs,
68: depends=vode_src,
69: **lapack_opt)
70:
71: # lsoda
72: config.add_extension('lsoda',
73: sources=['lsoda.pyf'],
74: libraries=['lsoda','mach'] + lapack_libs,
75: depends=(lsoda_src + mach_src),
76: **lapack_opt)
77:
78: # dop
79: config.add_extension('_dop',
80: sources=['dop.pyf'],
81: libraries=['dop'],
82: depends=dop_src)
83:
84: config.add_extension('_test_multivariate',
85: sources=quadpack_test_src)
86:
87: # Fortran+f2py extension module for testing odeint.
88: config.add_extension('_test_odeint_banded',
89: sources=odeint_banded_test_src,
90: libraries=['lsoda','mach'] + lapack_libs,
91: depends=(lsoda_src + mach_src),
92: **lapack_opt)
93:
94: config.add_subpackage('_ivp')
95:
96: config.add_data_dir('tests')
97: return config
98:
99:
100: if __name__ == '__main__':
101: from numpy.distutils.core import setup
102: setup(**configuration(top_path='').todict())
103:
"""
# Import the stypy library necessary elements
from stypy.type_inference_programs.type_inference_programs_imports import *
# Create the module type store
module_type_store = Context(None, __file__)
# ################# Begin of the type inference program ##################
stypy.reporting.localization.Localization.set_current(stypy.reporting.localization.Localization(__file__, 3, 0))
# 'import os' statement (line 3)
import os
import_module(stypy.reporting.localization.Localization(__file__, 3, 0), 'os', os, module_type_store)
stypy.reporting.localization.Localization.set_current(stypy.reporting.localization.Localization(__file__, 4, 0))
# 'from os.path import join' statement (line 4)
update_path_to_current_file_folder('C:/Python27/lib/site-packages/scipy/integrate/')
import_32066 = generate_type_inference_code_for_module(stypy.reporting.localization.Localization(__file__, 4, 0), 'os.path')
if (type(import_32066) is not StypyTypeError):
if (import_32066!= 'pyd_module'):
__import__(import_32066)
sys_modules_32067 = sys.modules[import_32066]
import_from_module(stypy.reporting.localization.Localization(__file__, 4, 0), 'os.path', sys_modules_32067.module_type_store, module_type_store, ['join'])
nest_module(stypy.reporting.localization.Localization(__file__, 4, 0), __file__, sys_modules_32067, sys_modules_32067.module_type_store, module_type_store)
else:
from os.path import join
import_from_module(stypy.reporting.localization.Localization(__file__, 4, 0), 'os.path', None, module_type_store, ['join'], [join])
else:
# Assigning a type to the variable 'os.path' (line 4)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 4, 0), 'os.path', import_32066)
remove_current_file_folder_from_path('C:/Python27/lib/site-packages/scipy/integrate/')
stypy.reporting.localization.Localization.set_current(stypy.reporting.localization.Localization(__file__, 6, 0))
# 'from scipy._build_utils import numpy_nodepr_api' statement (line 6)
update_path_to_current_file_folder('C:/Python27/lib/site-packages/scipy/integrate/')
import_32068 = generate_type_inference_code_for_module(stypy.reporting.localization.Localization(__file__, 6, 0),'scipy._build_utils')
if (type(import_32068) is not StypyTypeError):
if (import_32068!= 'pyd_module'):
__import__(import_32068)
sys_modules_32069 = sys.modules[import_32068]
import_from_module(stypy.reporting.localization.Localization(__file__, 6, 0),'scipy._build_utils', sys_modules_32069.module_type_store, module_type_store, ['numpy_nodepr_api'])
nest_module(stypy.reporting.localization.Localization(__file__, 6, 0), __file__, sys_modules_32069, sys_modules_32069.module_type_store, module_type_store)
else:
from scipy._build_utils import numpy_nodepr_api
import_from_module(stypy.reporting.localization.Localization(__file__, 6, 0),'scipy._build_utils', None, module_type_store, ['numpy_nodepr_api'], [numpy_nodepr_api])
else:
# Assigning a type to the variable'scipy._build_utils' (line 6)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 6, 0),'scipy._build_utils', import_32068)
remove_current_file_folder_from_path('C:/Python27/lib/site-packages/scipy/integrate/')
@norecursion
def configuration(localization, *varargs, **kwargs):
global module_type_store
# Assign values to the parameters with defaults
str_32070 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 9, 33),'str', '')
# Getting the type of 'None' (line 9)
None_32071 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 9, 45), 'None')
defaults = [str_32070, None_32071]
# Create a new context for function 'configuration'
module_type_store = module_type_store.open_function_context('configuration', 9, 0, False)
# Passed parameters checking function
configuration.stypy_localization = localization
configuration.stypy_type_of_self = None
configuration.stypy_type_store = module_type_store
configuration.stypy_function_name = 'configuration'
configuration.stypy_param_names_list = ['parent_package', 'top_path']
configuration.stypy_varargs_param_name = None
configuration.stypy_kwargs_param_name = None
configuration.stypy_call_defaults = defaults
configuration.stypy_call_varargs = varargs
configuration.stypy_call_kwargs = kwargs
arguments = process_argument_values(localization, None, module_type_store, 'configuration', ['parent_package', 'top_path'], None, None, defaults, varargs, kwargs)
if is_error_type(arguments):
# Destroy the current context
module_type_store = module_type_store.close_function_context()
return arguments
# Initialize method data
init_call_information(module_type_store, 'configuration', localization, ['parent_package', 'top_path'], arguments)
# Default return type storage variable (SSA)
# Assigning a type to the variable'stypy_return_type'
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 0, 0),'stypy_return_type', None)
# ################# Begin of 'configuration(...)' code ##################
stypy.reporting.localization.Localization.set_current(stypy.reporting.localization.Localization(__file__, 10, 4))
# 'from numpy.distutils.misc_util import Configuration' statement (line 10)
update_path_to_current_file_folder('C:/Python27/lib/site-packages/scipy/integrate/')
import_32072 = generate_type_inference_code_for_module(stypy.reporting.localization.Localization(__file__, 10, 4), 'numpy.distutils.misc_util')
if (type(import_32072) is not StypyTypeError):
if (import_32072!= 'pyd_module'):
__import__(import_32072)
sys_modules_32073 = sys.modules[import_32072]
import_from_module(stypy.reporting.localization.Localization(__file__, 10, 4), 'numpy.distutils.misc_util', sys_modules_32073.module_type_store, module_type_store, ['Configuration'])
nest_module(stypy.reporting.localization.Localization(__file__, 10, 4), __file__, sys_modules_32073, sys_modules_32073.module_type_store, module_type_store)
else:
from numpy.distutils.misc_util import Configuration
import_from_module(stypy.reporting.localization.Localization(__file__, 10, 4), 'numpy.distutils.misc_util', None, module_type_store, ['Configuration'], [Configuration])
else:
# Assigning a type to the variable 'numpy.distutils.misc_util' (line 10)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 10, 4), 'numpy.distutils.misc_util', import_32072)
remove_current_file_folder_from_path('C:/Python27/lib/site-packages/scipy/integrate/')
stypy.reporting.localization.Localization.set_current(stypy.reporting.localization.Localization(__file__, 11, 4))
# 'from numpy.distutils.system_info import get_info' statement (line 11)
update_path_to_current_file_folder('C:/Python27/lib/site-packages/scipy/integrate/')
import_32074 = generate_type_inference_code_for_module(stypy.reporting.localization.Localization(__file__, 11, 4), 'numpy.distutils.system_info')
if (type(import_32074) is not StypyTypeError):
if (import_32074!= 'pyd_module'):
__import__(import_32074)
sys_modules_32075 = sys.modules[import_32074]
import_from_module(stypy.reporting.localization.Localization(__file__, 11, 4), 'numpy.distutils.system_info', sys_modules_32075.module_type_store, module_type_store, ['get_info'])
nest_module(stypy.reporting.localization.Localization(__file__, 11, 4), __file__, sys_modules_32075, sys_modules_32075.module_type_store, module_type_store)
else:
from numpy.distutils.system_info import get_info
import_from_module(stypy.reporting.localization.Localization(__file__, 11, 4), 'numpy.distutils.system_info', None, module_type_store, ['get_info'], [get_info])
else:
# Assigning a type to the variable 'numpy.distutils.system_info' (line 11)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 11, 4), 'numpy.distutils.system_info', import_32074)
remove_current_file_folder_from_path('C:/Python27/lib/site-packages/scipy/integrate/')
# Assigning a Call to a Name (line 12):
# Call to Configuration(...): (line 12)
# Processing the call arguments (line 12)
str_32077 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 12, 27),'str', 'integrate')
# Getting the type of 'parent_package' (line 12)
parent_package_32078 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 12, 40), 'parent_package', False)
# Getting the type of 'top_path' (line 12)
top_path_32079 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 12, 56), 'top_path', False)
# Processing the call keyword arguments (line 12)
kwargs_32080 = {}
# Getting the type of 'Configuration' (line 12)
Configuration_32076 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 12, 13), 'Configuration', False)
# Calling Configuration(args, kwargs) (line 12)
Configuration_call_result_32081 = invoke(stypy.reporting.localization.Localization(__file__, 12, 13), Configuration_32076, *[str_32077, parent_package_32078, top_path_32079], **kwargs_32080)
# Assigning a type to the variable 'config' (line 12)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 12, 4), 'config', Configuration_call_result_32081)
# Assigning a Call to a Name (line 15):
# Call to dict(...): (line 15)
# Processing the call arguments (line 15)
# Call to get_info(...): (line 15)
# Processing the call arguments (line 15)
str_32084 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 15, 31),'str', 'lapack_opt')
# Processing the call keyword arguments (line 15)
int_32085 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 15, 60), 'int')
keyword_32086 = int_32085
kwargs_32087 = {'notfound_action': keyword_32086}
# Getting the type of 'get_info' (line 15)
get_info_32083 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 15, 22), 'get_info', False)
# Calling get_info(args, kwargs) (line 15)
get_info_call_result_32088 = invoke(stypy.reporting.localization.Localization(__file__, 15, 22), get_info_32083, *[str_32084], **kwargs_32087)
# Processing the call keyword arguments (line 15)
kwargs_32089 = {}
# Getting the type of 'dict' (line 15)
dict_32082 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 15, 17), 'dict', False)
# Calling dict(args, kwargs) (line 15)
dict_call_result_32090 = invoke(stypy.reporting.localization.Localization(__file__, 15, 17), dict_32082, *[get_info_call_result_32088], **kwargs_32089)
# Assigning a type to the variable 'lapack_opt' (line 15)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 15, 4), 'lapack_opt', dict_call_result_32090)
# Assigning a Call to a Name (line 18):
# Call to pop(...): (line 18)
# Processing the call arguments (line 18)
str_32093 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 18, 33),'str', 'libraries')
# Obtaining an instance of the builtin type 'list' (line 18)
list_32094 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 18, 46), 'list')
# Adding type elements to the builtin type 'list' instance (line 18)
# Processing the call keyword arguments (line 18)
kwargs_32095 = {}
# Getting the type of 'lapack_opt' (line 18)
lapack_opt_32091 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 18, 18), 'lapack_opt', False)
# Obtaining the member 'pop' of a type (line 18)
pop_32092 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 18, 18), lapack_opt_32091, 'pop')
# Calling pop(args, kwargs) (line 18)
pop_call_result_32096 = invoke(stypy.reporting.localization.Localization(__file__, 18, 18), pop_32092, *[str_32093, list_32094], **kwargs_32095)
# Assigning a type to the variable 'lapack_libs' (line 18)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 18, 4), 'lapack_libs', pop_call_result_32096)
# Assigning a List to a Name (line 20):
# Obtaining an instance of the builtin type 'list' (line 20)
list_32097 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 20, 15), 'list')
# Adding type elements to the builtin type 'list' instance (line 20)
# Adding element type (line 20)
# Call to join(...): (line 20)
# Processing the call arguments (line 20)
str_32099 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 20, 21),'str','mach')
str_32100 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 20, 28),'str', '*.f')
# Processing the call keyword arguments (line 20)
kwargs_32101 = {}
# Getting the type of 'join' (line 20)
join_32098 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 20, 16), 'join', False)
# Calling join(args, kwargs) (line 20)
join_call_result_32102 = invoke(stypy.reporting.localization.Localization(__file__, 20, 16), join_32098, *[str_32099, str_32100], **kwargs_32101)
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 20, 15), list_32097, join_call_result_32102)
# Assigning a type to the variable'mach_src' (line 20)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 20, 4),'mach_src', list_32097)
# Assigning a List to a Name (line 21):
# Obtaining an instance of the builtin type 'list' (line 21)
list_32103 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 21, 19), 'list')
# Adding type elements to the builtin type 'list' instance (line 21)
# Adding element type (line 21)
# Call to join(...): (line 21)
# Processing the call arguments (line 21)
str_32105 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 21, 25),'str', 'quadpack')
str_32106 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 21, 37),'str', '*.f')
# Processing the call keyword arguments (line 21)
kwargs_32107 = {}
# Getting the type of 'join' (line 21)
join_32104 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 21, 20), 'join', False)
# Calling join(args, kwargs) (line 21)
join_call_result_32108 = invoke(stypy.reporting.localization.Localization(__file__, 21, 20), join_32104, *[str_32105, str_32106], **kwargs_32107)
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 21, 19), list_32103, join_call_result_32108)
# Assigning a type to the variable 'quadpack_src' (line 21)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 21, 4), 'quadpack_src', list_32103)
# Assigning a ListComp to a Name (line 22):
# Calculating list comprehension
# Calculating comprehension expression
# Obtaining an instance of the builtin type 'list' (line 22)
list_32114 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 22, 47), 'list')
# Adding type elements to the builtin type 'list' instance (line 22)
# Adding element type (line 22)
str_32115 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 23, 8),'str', 'blkdta000.f')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 22, 47), list_32114, str_32115)
# Adding element type (line 22)
str_32116 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 23, 23),'str', 'bnorm.f')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 22, 47), list_32114, str_32116)
# Adding element type (line 22)
str_32117 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 23, 34),'str', 'cfode.f')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 22, 47), list_32114, str_32117)
# Adding element type (line 22)
str_32118 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 24, 8),'str', 'ewset.f')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 22, 47), list_32114, str_32118)
# Adding element type (line 22)
str_32119 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 24, 19),'str', 'fnorm.f')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 22, 47), list_32114, str_32119)
# Adding element type (line 22)
str_32120 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 24, 30),'str', 'intdy.f')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 22, 47), list_32114, str_32120)
# Adding element type (line 22)
str_32121 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 25, 8),'str', 'lsoda.f')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 22, 47), list_32114, str_32121)
# Adding element type (line 22)
str_32122 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 25, 19),'str', 'prja.f')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 22, 47), list_32114, str_32122)
# Adding element type (line 22)
str_32123 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 25, 29),'str','solsy.f')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 22, 47), list_32114, str_32123)
# Adding element type (line 22)
str_32124 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 25, 40),'str','srcma.f')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 22, 47), list_32114, str_32124)
# Adding element type (line 22)
str_32125 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 26, 8),'str','stoda.f')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 22, 47), list_32114, str_32125)
# Adding element type (line 22)
str_32126 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 26, 19),'str', 'vmnorm.f')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 22, 47), list_32114, str_32126)
# Adding element type (line 22)
str_32127 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 26, 31),'str', 'xerrwv.f')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 22, 47), list_32114, str_32127)
# Adding element type (line 22)
str_32128 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 26, 43),'str', 'xsetf.f')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 22, 47), list_32114, str_32128)
# Adding element type (line 22)
str_32129 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 27, 8),'str', 'xsetun.f')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 22, 47), list_32114, str_32129)
comprehension_32130 = get_contained_elements_type(stypy.reporting.localization.Localization(__file__, 22, 17), list_32114)
# Assigning a type to the variable 'fn' (line 22)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 22, 17), 'fn', comprehension_32130)
# Call to join(...): (line 22)
# Processing the call arguments (line 22)
str_32110 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 22, 22),'str', 'odepack')
# Getting the type of 'fn' (line 22)
fn_32111 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 22, 33), 'fn', False)
# Processing the call keyword arguments (line 22)
kwargs_32112 = {}
# Getting the type of 'join' (line 22)
join_32109 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 22, 17), 'join', False)
# Calling join(args, kwargs) (line 22)
join_call_result_32113 = invoke(stypy.reporting.localization.Localization(__file__, 22, 17), join_32109, *[str_32110, fn_32111], **kwargs_32112)
list_32131 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 22, 17), 'list')
set_contained_elements_type(stypy.reporting.localization.Localization(__file__, 22, 17), list_32131, join_call_result_32113)
# Assigning a type to the variable 'lsoda_src' (line 22)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 22, 4), 'lsoda_src', list_32131)
# Assigning a List to a Name (line 28):
# Obtaining an instance of the builtin type 'list' (line 28)
list_32132 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 28, 15), 'list')
# Adding type elements to the builtin type 'list' instance (line 28)
# Adding element type (line 28)
# Call to join(...): (line 28)
# Processing the call arguments (line 28)
str_32134 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 28, 21),'str', 'odepack')
str_32135 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 28, 32),'str', 'vode.f')
# Processing the call keyword arguments (line 28)
kwargs_32136 = {}
# Getting the type of 'join' (line 28)
join_32133 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 28, 16), 'join', False)
# Calling join(args, kwargs) (line 28)
join_call_result_32137 = invoke(stypy.reporting.localization.Localization(__file__, 28, 16), join_32133, *[str_32134, str_32135], **kwargs_32136)
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 28, 15), list_32132, join_call_result_32137)
# Adding element type (line 28)
# Call to join(...): (line 28)
# Processing the call arguments (line 28)
str_32139 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 28, 48),'str', 'odepack')
str_32140 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 28, 59),'str', 'zvode.f')
# Processing the call keyword arguments (line 28)
kwargs_32141 = {}
# Getting the type of 'join' (line 28)
join_32138 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 28, 43), 'join', False)
# Calling join(args, kwargs) (line 28)
join_call_result_32142 = invoke(stypy.reporting.localization.Localization(__file__, 28, 43), join_32138, *[str_32139, str_32140], **kwargs_32141)
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 28, 15), list_32132, join_call_result_32142)
# Assigning a type to the variable 'vode_src' (line 28)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 28, 4), 'vode_src', list_32132)
# Assigning a List to a Name (line 29):
# Obtaining an instance of the builtin type 'list' (line 29)
list_32143 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 29, 14), 'list')
# Adding type elements to the builtin type 'list' instance (line 29)
# Adding element type (line 29)
# Call to join(...): (line 29)
# Processing the call arguments (line 29)
str_32145 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 29, 20),'str', 'dop')
str_32146 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 29, 26),'str', '*.f')
# Processing the call keyword arguments (line 29)
kwargs_32147 = {}
# Getting the type of 'join' (line 29)
join_32144 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 29, 15), 'join', False)
# Calling join(args, kwargs) (line 29)
join_call_result_32148 = invoke(stypy.reporting.localization.Localization(__file__, 29, 15), join_32144, *[str_32145, str_32146], **kwargs_32147)
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 29, 14), list_32143, join_call_result_32148)
# Assigning a type to the variable 'dop_src' (line 29)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 29, 4), 'dop_src', list_32143)
# Assigning a List to a Name (line 30):
# Obtaining an instance of the builtin type 'list' (line 30)
list_32149 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 30, 24), 'list')
# Adding type elements to the builtin type 'list' instance (line 30)
# Adding element type (line 30)
# Call to join(...): (line 30)
# Processing the call arguments (line 30)
str_32151 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 30, 30),'str', 'tests')
str_32152 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 30, 38),'str', '_test_multivariate.c')
# Processing the call keyword arguments (line 30)
kwargs_32153 = {}
# Getting the type of 'join' (line 30)
join_32150 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 30, 25), 'join', False)
# Calling join(args, kwargs) (line 30)
join_call_result_32154 = invoke(stypy.reporting.localization.Localization(__file__, 30, 25), join_32150, *[str_32151, str_32152], **kwargs_32153)
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 30, 24), list_32149, join_call_result_32154)
# Assigning a type to the variable 'quadpack_test_src' (line 30)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 30, 4), 'quadpack_test_src', list_32149)
# Assigning a List to a Name (line 31):
# Obtaining an instance of the builtin type 'list' (line 31)
list_32155 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 31, 29), 'list')
# Adding type elements to the builtin type 'list' instance (line 31)
# Adding element type (line 31)
# Call to join(...): (line 31)
# Processing the call arguments (line 31)
str_32157 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 31, 35),'str', 'tests')
str_32158 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 31, 44),'str', 'banded5x5.f')
# Processing the call keyword arguments (line 31)
kwargs_32159 = {}
# Getting the type of 'join' (line 31)
join_32156 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 31, 30), 'join', False)
# Calling join(args, kwargs) (line 31)
join_call_result_32160 = invoke(stypy.reporting.localization.Localization(__file__, 31, 30), join_32156, *[str_32157, str_32158], **kwargs_32159)
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 31, 29), list_32155, join_call_result_32160)
# Assigning a type to the variable 'odeint_banded_test_src' (line 31)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 31, 4), 'odeint_banded_test_src', list_32155)
# Call to add_library(...): (line 33)
# Processing the call arguments (line 33)
str_32163 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 33, 23),'str','mach')
# Processing the call keyword arguments (line 33)
# Getting the type of'mach_src' (line 33)
mach_src_32164 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 33, 39),'mach_src', False)
keyword_32165 = mach_src_32164
# Obtaining an instance of the builtin type 'dict' (line 34)
dict_32166 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 34, 33), 'dict')
# Adding type elements to the builtin type 'dict' instance (line 34)
# Adding element type (key, value) (line 34)
str_32167 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 34, 34),'str', 'noopt')
# Obtaining an instance of the builtin type 'tuple' (line 34)
tuple_32168 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 34, 43), 'tuple')
# Adding type elements to the builtin type 'tuple' instance (line 34)
# Adding element type (line 34)
# Getting the type of '__file__' (line 34)
file___32169 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 34, 43), '__file__', False)
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 34, 43), tuple_32168, file___32169)
# Adding element type (line 34)
int_32170 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 34, 52), 'int')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 34, 43), tuple_32168, int_32170)
set_contained_elements_type(stypy.reporting.localization.Localization(__file__, 34, 33), dict_32166, (str_32167, tuple_32168))
keyword_32171 = dict_32166
kwargs_32172 = {'sources': keyword_32165, 'config_fc': keyword_32171}
# Getting the type of 'config' (line 33)
config_32161 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 33, 4), 'config', False)
# Obtaining the member 'add_library' of a type (line 33)
add_library_32162 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 33, 4), config_32161, 'add_library')
# Calling add_library(args, kwargs) (line 33)
add_library_call_result_32173 = invoke(stypy.reporting.localization.Localization(__file__, 33, 4), add_library_32162, *[str_32163], **kwargs_32172)
# Call to add_library(...): (line 35)
# Processing the call arguments (line 35)
str_32176 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 35, 23),'str', 'quadpack')
# Processing the call keyword arguments (line 35)
# Getting the type of 'quadpack_src' (line 35)
quadpack_src_32177 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 35, 43), 'quadpack_src', False)
keyword_32178 = quadpack_src_32177
kwargs_32179 = {'sources': keyword_32178}
# Getting the type of 'config' (line 35)
config_32174 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 35, 4), 'config', False)
# Obtaining the member 'add_library' of a type (line 35)
add_library_32175 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 35, 4), config_32174, 'add_library')
# Calling add_library(args, kwargs) (line 35)
add_library_call_result_32180 = invoke(stypy.reporting.localization.Localization(__file__, 35, 4), add_library_32175, *[str_32176], **kwargs_32179)
# Call to add_library(...): (line 36)
# Processing the call arguments (line 36)
str_32183 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 36, 23),'str', 'lsoda')
# Processing the call keyword arguments (line 36)
# Getting the type of 'lsoda_src' (line 36)
lsoda_src_32184 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 36, 40), 'lsoda_src', False)
keyword_32185 = lsoda_src_32184
kwargs_32186 = {'sources': keyword_32185}
# Getting the type of 'config' (line 36)
config_32181 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 36, 4), 'config', False)
# Obtaining the member 'add_library' of a type (line 36)
add_library_32182 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 36, 4), config_32181, 'add_library')
# Calling add_library(args, kwargs) (line 36)
add_library_call_result_32187 = invoke(stypy.reporting.localization.Localization(__file__, 36, 4), add_library_32182, *[str_32183], **kwargs_32186)
# Call to add_library(...): (line 37)
# Processing the call arguments (line 37)
str_32190 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 37, 23),'str', 'vode')
# Processing the call keyword arguments (line 37)
# Getting the type of 'vode_src' (line 37)
vode_src_32191 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 37, 39), 'vode_src', False)
keyword_32192 = vode_src_32191
kwargs_32193 = {'sources': keyword_32192}
# Getting the type of 'config' (line 37)
config_32188 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 37, 4), 'config', False)
# Obtaining the member 'add_library' of a type (line 37)
add_library_32189 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 37, 4), config_32188, 'add_library')
# Calling add_library(args, kwargs) (line 37)
add_library_call_result_32194 = invoke(stypy.reporting.localization.Localization(__file__, 37, 4), add_library_32189, *[str_32190], **kwargs_32193)
# Call to add_library(...): (line 38)
# Processing the call arguments (line 38)
str_32197 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 38, 23),'str', 'dop')
# Processing the call keyword arguments (line 38)
# Getting the type of 'dop_src' (line 38)
dop_src_32198 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 38, 38), 'dop_src', False)
keyword_32199 = dop_src_32198
kwargs_32200 = {'sources': keyword_32199}
# Getting the type of 'config' (line
|
5f8303ce91c5de779bbddbaafb3fb828596babe5
|
Python
|
<|begin_of_text|># orm/relationships.py
# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
"""Heuristics related to join conditions as used in
:func:`_orm.relationship`.
Provides the :class:`.JoinCondition` object, which encapsulates
SQL annotation and aliasing behavior focused on the `primaryjoin`
and `secondaryjoin` aspects of :func:`_orm.relationship`.
"""
from __future__ import annotations
import collections
from collections import abc
import dataclasses
import inspect as _py_inspect
import re
import typing
from typing import Any
from typing import Callable
from typing import cast
from typing import Collection
from typing import Dict
from typing import Generic
from typing import Iterable
from typing import Iterator
from typing import List
from typing import NamedTuple
from typing import NoReturn
from typing import Optional
from typing import Sequence
from typing import Set
from typing import Tuple
from typing import Type
from typing import TypeVar
from typing import Union
import weakref
from. import attributes
from. import strategy_options
from._typing import insp_is_aliased_class
from._typing import is_has_collection_adapter
from.base import _DeclarativeMapped
from.base import _is_mapped_class
from.base import class_mapper
from.base import DynamicMapped
from.base import LoaderCallableStatus
from.base import PassiveFlag
from.base import state_str
from.base import WriteOnlyMapped
from.interfaces import _AttributeOptions
from.interfaces import _IntrospectsAnnotations
from.interfaces import MANYTOMANY
from.interfaces import MANYTOONE
from.interfaces import ONETOMANY
from.interfaces import PropComparator
from.interfaces import RelationshipDirection
from.interfaces import StrategizedProperty
from.util import _orm_annotate
from.util import _orm_deannotate
from.util import CascadeOptions
from.. import exc as sa_exc
from.. import Exists
from.. import log
from.. import schema
from.. import sql
from.. import util
from..inspection import inspect
from..sql import coercions
from..sql import expression
from..sql import operators
from..sql import roles
from..sql import visitors
from..sql._typing import _ColumnExpressionArgument
from..sql._typing import _HasClauseElement
from..sql.annotation import _safe_annotate
from..sql.elements import ColumnClause
from..sql.elements import ColumnElement
from..sql.util import _deep_annotate
from..sql.util import _deep_deannotate
from..sql.util import _shallow_annotate
from..sql.util import adapt_criterion_to_null
from..sql.util import ClauseAdapter
from..sql.util import join_condition
from..sql.util import selectables_overlap
from..sql.util import visit_binary_product
from..util.typing import de_optionalize_union_types
from..util.typing import Literal
from..util.typing import resolve_name_to_real_class_name
if typing.TYPE_CHECKING:
from._typing import _EntityType
from._typing import _ExternalEntityType
from._typing import _IdentityKeyType
from._typing import _InstanceDict
from._typing import _InternalEntityType
from._typing import _O
from._typing import _RegistryType
from.base import Mapped
from.clsregistry import _class_resolver
from.clsregistry import _ModNS
from.decl_base import _ClassScanMapperConfig
from.dependency import DependencyProcessor
from.mapper import Mapper
from.query import Query
from.session import Session
from.state import InstanceState
from.strategies import LazyLoader
from.util import AliasedClass
from.util import AliasedInsp
from..sql._typing import _CoreAdapterProto
from..sql._typing import _EquivalentColumnMap
from..sql._typing import _InfoType
from..sql.annotation import _AnnotationDict
from..sql.annotation import SupportsAnnotations
from..sql.elements import BinaryExpression
from..sql.elements import BindParameter
from..sql.elements import ClauseElement
from..sql.schema import Table
from..sql.selectable import FromClause
from..util.typing import _AnnotationScanType
from..util.typing import RODescriptorReference
_T = TypeVar("_T", bound=Any)
_T1 = TypeVar("_T1", bound=Any)
_T2 = TypeVar("_T2", bound=Any)
_PT = TypeVar("_PT", bound=Any)
_PT2 = TypeVar("_PT2", bound=Any)
_RelationshipArgumentType = Union[
str,
Type[_T],
Callable[[], Type[_T]],
"Mapper[_T]",
"AliasedClass[_T]",
Callable[[], "Mapper[_T]"],
Callable[[], "AliasedClass[_T]"],
]
_LazyLoadArgumentType = Literal[
"select",
"joined",
"selectin",
"subquery",
"raise",
"raise_on_sql",
"noload",
"immediate",
"write_only",
"dynamic",
True,
False,
None,
]
_RelationshipJoinConditionArgument = Union[
str, _ColumnExpressionArgument[bool]
]
_RelationshipSecondaryArgument = Union[
"FromClause", str, Callable[[], "FromClause"]
]
_ORMOrderByArgument = Union[
Literal[False],
str,
_ColumnExpressionArgument[Any],
Callable[[], _ColumnExpressionArgument[Any]],
Callable[[], Iterable[_ColumnExpressionArgument[Any]]],
Iterable[Union[str, _ColumnExpressionArgument[Any]]],
]
ORMBackrefArgument = Union[str, Tuple[str, Dict[str, Any]]]
_ORMColCollectionElement = Union[
ColumnClause[Any], _HasClauseElement, roles.DMLColumnRole, "Mapped[Any]"
]
_ORMColCollectionArgument = Union[
str,
Sequence[_ORMColCollectionElement],
Callable[[], Sequence[_ORMColCollectionElement]],
Callable[[], _ORMColCollectionElement],
_ORMColCollectionElement,
]
_CEA = TypeVar("_CEA", bound=_ColumnExpressionArgument[Any])
_CE = TypeVar("_CE", bound="ColumnElement[Any]")
_ColumnPairIterable = Iterable[Tuple[ColumnElement[Any], ColumnElement[Any]]]
_ColumnPairs = Sequence[Tuple[ColumnElement[Any], ColumnElement[Any]]]
_MutableColumnPairs = List[Tuple[ColumnElement[Any], ColumnElement[Any]]]
def remote(expr: _CEA) -> _CEA:
"""Annotate a portion of a primaryjoin expression
with a'remote' annotation.
See the section :ref:`relationship_custom_foreign` for a
description of use.
.. seealso::
:ref:`relationship_custom_foreign`
:func:`.foreign`
"""
return _annotate_columns( # type: ignore
coercions.expect(roles.ColumnArgumentRole, expr), {"remote": True}
)
def foreign(expr: _CEA) -> _CEA:
"""Annotate a portion of a primaryjoin expression
with a 'foreign' annotation.
See the section :ref:`relationship_custom_foreign` for a
description of use.
.. seealso::
:ref:`relationship_custom_foreign`
:func:`.remote`
"""
return _annotate_columns( # type: ignore
coercions.expect(roles.ColumnArgumentRole, expr), {"foreign": True}
)
@dataclasses.dataclass
class _RelationshipArg(Generic[_T1, _T2]):
"""stores a user-defined parameter value that must be resolved and
parsed later at mapper configuration time.
"""
__slots__ = "name", "argument", "resolved"
name: str
argument: _T1
resolved: Optional[_T2]
def _is_populated(self) -> bool:
return self.argument is not None
def _resolve_against_registry(
self, clsregistry_resolver: Callable[[str, bool], _class_resolver]
) -> None:
attr_value = self.argument
if isinstance(attr_value, str):
self.resolved = clsregistry_resolver(
attr_value, self.name == "secondary"
)()
elif callable(attr_value) and not _is_mapped_class(attr_value):
self.resolved = attr_value()
else:
self.resolved = attr_value
class _RelationshipArgs(NamedTuple):
"""stores user-passed parameters that are resolved at mapper configuration
time.
"""
secondary: _RelationshipArg[
Optional[_RelationshipSecondaryArgument],
Optional[FromClause],
]
primaryjoin: _RelationshipArg[
Optional[_RelationshipJoinConditionArgument],
Optional[ColumnElement[Any]],
]
secondaryjoin: _RelationshipArg[
Optional[_RelationshipJoinConditionArgument],
Optional[ColumnElement[Any]],
]
order_by: _RelationshipArg[
_ORMOrderByArgument,
Union[Literal[None, False], Tuple[ColumnElement[Any],...]],
]
foreign_keys: _RelationshipArg[
Optional[_ORMColCollectionArgument], Set[ColumnElement[Any]]
]
remote_side: _RelationshipArg[
Optional[_ORMColCollectionArgument], Set[ColumnElement[Any]]
]
@log.class_logger
class RelationshipProperty(
_IntrospectsAnnotations, StrategizedProperty[_T], log.Identified
):
"""Describes an object property that holds a single item or list
of items that correspond to a related database table.
Public constructor is the :func:`_orm.relationship` function.
.. seealso::
:ref:`relationship_config_toplevel`
"""
strategy_wildcard_key = strategy_options._RELATIONSHIP_TOKEN
inherit_cache = True
""":meta private:"""
_links_to_entity = True
_is_relationship = True
_overlaps: Sequence[str]
_lazy_strategy: LazyLoader
_persistence_only = dict(
passive_deletes=False,
passive_updates=True,
enable_typechecks=True,
active_history=False,
cascade_backrefs=False,
)
_dependency_processor: Optional[DependencyProcessor] = None
primaryjoin: ColumnElement[bool]
secondaryjoin: Optional[ColumnElement[bool]]
secondary: Optional[FromClause]
_join_condition: JoinCondition
order_by: Union[Literal[False], Tuple[ColumnElement[Any],...]]
_user_defined_foreign_keys: Set[ColumnElement[Any]]
_calculated_foreign_keys: Set[ColumnElement[Any]]
remote_side: Set[ColumnElement[Any]]
local_columns: Set[ColumnElement[Any]]
synchronize_pairs: _ColumnPairs
secondary_synchronize_pairs: Optional[_ColumnPairs]
local_remote_pairs: Optional[_ColumnPairs]
direction: RelationshipDirection
_init_args: _RelationshipArgs
def __init__(
self,
argument: Optional[_RelationshipArgumentType[_T]] = None,
secondary: Optional[_RelationshipSecondaryArgument] = None,
*,
uselist: Optional[bool] = None,
collection_class: Optional[
Union[Type[Collection[Any]], Callable[[], Collection[Any]]]
] = None,
primaryjoin: Optional[_RelationshipJoinConditionArgument] = None,
secondaryjoin: Optional[_RelationshipJoinConditionArgument] = None,
back_populates: Optional[str] = None,
order_by: _ORMOrderByArgument = False,
backref: Optional[ORMBackrefArgument] = None,
overlaps: Optional[str] = None,
post_update: bool = False,
cascade: str = "save-update, merge",
viewonly: bool = False,
attribute_options: Optional[_AttributeOptions] = None,
lazy: _LazyLoadArgumentType = "select",
passive_deletes: Union[Literal["all"], bool] = False,
passive_updates: bool = True,
active_history: bool = False,
enable_typechecks: bool = True,
foreign_keys: Optional[_ORMColCollectionArgument] = None,
remote_side: Optional[_ORMColCollectionArgument] = None,
join_depth: Optional[int] = None,
comparator_factory: Optional[
Type[RelationshipProperty.Comparator[Any]]
] = None,
single_parent: bool = False,
innerjoin: bool = False,
distinct_target_key: Optional[bool] = None,
load_on_pending: bool = False,
query_class: Optional[Type[Query[Any]]] = None,
info: Optional[_InfoType] = None,
omit_join: Literal[None, False] = None,
sync_backref: Optional[bool] = None,
doc: Optional[str] = None,
bake_queries: Literal[True] = True,
cascade_backrefs: Literal[False] = False,
_local_remote_pairs: Optional[_ColumnPairs] = None,
_legacy_inactive_history_style: bool = False,
):
super().__init__(attribute_options=attribute_options)
self.uselist = uselist
self.argument = argument
self._init_args = _RelationshipArgs(
_RelationshipArg("secondary", secondary, None),
_RelationshipArg("primaryjoin", primaryjoin, None),
_RelationshipArg("secondaryjoin", secondaryjoin, None),
_RelationshipArg("order_by", order_by, None),
_RelationshipArg("foreign_keys", foreign_keys, None),
_RelationshipArg("remote_side", remote_side, None),
)
self.post_update = post_update
self.viewonly = viewonly
if viewonly:
self._warn_for_persistence_only_flags(
passive_deletes=passive_deletes,
passive_updates=passive_updates,
enable_typechecks=enable_typechecks,
active_history=active_history,
cascade_backrefs=cascade_backrefs,
)
if viewonly and sync_backref:
raise sa_exc.ArgumentError(
"sync_backref and viewonly cannot both be True"
)
self.sync_backref = sync_backref
self.lazy = lazy
self.single_parent = single_parent
self.collection_class = collection_class
self.passive_deletes = passive_deletes
if cascade_backrefs:
raise sa_exc.ArgumentError(
"The 'cascade_backrefs' parameter passed to "
"relationship() may only be set to False."
)
self.passive_updates = passive_updates
self.enable_typechecks = enable_typechecks
self.query_class = query_class
self.innerjoin = innerjoin
self.distinct_target_key = distinct_target_key
self.doc = doc
self.active_history = active_history
self._legacy_inactive_history_style = _legacy_inactive_history_style
self.join_depth = join_depth
if omit_join:
util.warn(
"setting omit_join to True is not supported; selectin "
"loading of this relationship may not work correctly if this "
"flag is set explicitly. omit_join optimization is "
"automatically detected for conditions under which it is "
"supported."
)
self.omit_join = omit_join
self.local_remote_pairs = _local_remote_pairs
self.load_on_pending = load_on_pending
self.comparator_factory = (
comparator_factory or RelationshipProperty.Comparator
)
util.set_creation_order(self)
if info is not None:
self.info.update(info)
self.strategy_key = (("lazy", self.lazy),)
self._reverse_property: Set[RelationshipProperty[Any]] = set()
if overlaps:
self._overlaps = set(re.split(r"\s*,\s*", overlaps)) # type: ignore # noqa: E501
else:
self._overlaps = ()
# mypy ignoring the @property setter
self.cascade = cascade # type: ignore
self.back_populates = back_populates
if self.back_populates:
if backref:
raise sa_exc.ArgumentError(
"backref and back_populates keyword arguments "
"are mutually exclusive"
)
self.backref = None
else:
self.backref = backref
def _warn_for_persistence_only_flags(self, **kw: Any) -> None:
for k, v in kw.items():
if v!= self._persistence_only[k]:
# we are warning here rather than warn deprecated as this is a
# configuration mistake, and Python shows regular warnings more
# aggressively than deprecation warnings by default. Unlike the
# case of setting viewonly with cascade, the settings being
# warned about here are not actively doing the wrong thing
# against viewonly=True, so it is not as urgent to have these
# raise an error.
util.warn(
"Setting %s on relationship() while also "
"setting viewonly=True does not make sense, as a "
"viewonly=True relationship does not perform persistence "
"operations. This configuration may raise an error "
"in a future release." % (k,)
)
def instrument_class(self, mapper: Mapper[Any]) -> None:
attributes.register_descriptor(
mapper.class_,
self.key,
comparator=self.comparator_factory(self, mapper),
parententity=mapper,
doc=self.doc,
)
class Comparator(util.MemoizedSlots, PropComparator[_PT]):
"""Produce boolean, comparison, and other operators for
:class:`.RelationshipProperty` attributes.
See the documentation for :class:`.PropComparator` for a brief
overview of ORM level operator definition.
.. seealso::
:class:`.PropComparator`
:class:`.ColumnProperty.Comparator`
:class:`.ColumnOperators`
:ref:`types_operators`
:attr:`.TypeEngine.comparator_factory`
"""
__slots__ = (
"entity",
"mapper",
"property",
"_of_type",
"_extra_criteria",
)
prop: RODescriptorReference[RelationshipProperty[_PT]]
_of_type: Optional[_EntityType[_PT]]
def __init__(
self,
prop: RelationshipProperty[_PT],
parentmapper: _InternalEntityType[Any],
adapt_to_entity: Optional[AliasedInsp[Any]] = None,
of_type: Optional[_EntityType[_PT]] = None,
extra_criteria: Tuple[ColumnElement[bool],...] = (),
):
"""Construction of :class:`.RelationshipProperty.Comparator`
is internal to the ORM's attribute mechanics.
"""
self.prop = prop
self._parententity = parentmapper
self._adapt_to_entity = adapt_to_entity
if of_type:
self._of_type = of_type
else:
self._of_type = None
self._extra_criteria = extra_criteria
def adapt_to_entity(
self, adapt_to_entity: AliasedInsp[Any]
) -> RelationshipProperty.Comparator[Any]:
return self.__class__(
self.prop,
self._parententity,
adapt_to_entity=adapt_to_entity,
of_type=self._of_type,
)
entity: _InternalEntityType[_PT]
"""The target entity referred to by this
:class:`.RelationshipProperty.Comparator`.
This is either a :class:`_orm.Mapper` or :class:`.AliasedInsp`
object.
This is the "target" or "remote" side of the
:func:`_orm.relationship`.
"""
mapper: Mapper[_PT]
"""The target :class:`_orm.Mapper` referred to by this
:class:`.RelationshipProperty.Comparator`.
This is the "target" or "remote" side of the
:func:`_orm.relationship`.
"""
def _memoized_attr_entity(self) -> _InternalEntityType[_PT]:
if self._of_type:
return inspect(self._of_type) # type: ignore
else:
return self.prop.entity
def _memoized_attr_mapper(self) -> Mapper[_PT]:
return self.entity.mapper
def _source_selectable(self) -> FromClause:
if self._adapt_to_entity:
return self._adapt_to_entity.selectable
else:
return self.property.parent._with_polymorphic_selectable
def __clause_element__(self) -> ColumnElement[bool]:
adapt_from = self._source_selectable()
if self._of_type:
of_type_entity = inspect(self._of_type)
else:
of_type_entity = None
(
pj,
sj,
source,
dest,
secondary,
target_adapter,
) = self.prop._create_joins(
source_selectable=adapt_from,
source_polymorphic=True,
of_type_entity=of_type_entity,
alias_secondary=True,
extra_criteria=self._extra_criteria,
)
if sj is not None:
return pj & sj
else:
return pj
def of_type(self, class_: _EntityType[Any]) -> PropComparator[_PT]:
r"""Redefine this object in terms of a polymorphic subclass.
See :meth:`.PropComparator.of_type` for an example.
"""
return RelationshipProperty.Comparator(
self.prop,
self._parententity,
adapt_to_entity=self._adapt_to_entity,
of_type=class_,
extra_criteria=self._extra_criteria,
)
def and_(
self, *criteria: _ColumnExpressionArgument[bool]
) -> PropComparator[Any]:
"""Add AND criteria.
See :meth:`.PropComparator.and_` for an example.
.. versionadded:: 1.4
"""
exprs = tuple(
coercions.expect(roles.WhereHavingRole, clause)
for clause in util.coerce_generator_arg(criteria)
)
return RelationshipProperty.Comparator(
self.prop,
self._parententity,
adapt_to_entity=self._adapt_to_entity,
of_type=self._of_type,
extra_criteria=self._extra_criteria + exprs,
)
def in_(self, other: Any) -> NoReturn:
"""Produce an IN clause - this is not implemented
for :func:`_orm.relationship`-based attributes at this time.
"""
raise NotImplementedError(
"in_() not yet supported for "
"relationships. For a simple "
"many-to-one, use in_() against "
"the set of foreign key values."
)
# https://github.com/python/mypy/issues/4266
__hash__ = None # type: ignore
def __eq__(self, other: Any) -> ColumnElement[bool]: # type: ignore[override] # noqa: E501
"""Implement the ``==`` operator.
In a many-to-one context, such as::
MyClass.some_prop == <some object>
this will typically produce a
clause such as::
mytable.related_id == <some id>
Where ``<some id>`` is the primary key of the given
object.
The ``==`` operator provides partial functionality for non-
many-to-one comparisons:
* Comparisons against collections are not supported.
Use :meth:`~.Relationship.Comparator.contains`.
* Compared to a scalar one-to-many, will produce a
clause that compares the target columns in the parent to
the given target.
* Compared to a scalar many-to-many, an alias
of the association table will be rendered as
well, forming a natural join that is part of the
main body of the query. This will not work for
queries that go beyond simple AND conjunctions of
comparisons, such as those which use OR. Use
explicit joins, outerjoins, or
:meth:`~.Relationship.Comparator.has` for
more comprehensive non-many-to-one scalar
membership tests.
* Comparisons against ``None`` given in a one-to-many
or many-to-many context produce a NOT EXISTS clause.
"""
if other is None or isinstance(other, expression.Null):
if self.property.direction in [ONETOMANY, MANYTOMANY]:
return ~self._criterion_exists()
else:
return _orm_annotate(
self.property._optimized_compare(
None, adapt_source=self.adapter
)
)
elif self.property.uselist:
raise sa_exc.InvalidRequestError(
"Can't compare a collection to an object or collection; "
"use contains() to test for membership."
)
else:
return _orm_annotate(
self.property._optimized_compare(
other, adapt_source=self.adapter
)
)
def _criterion_exists(
self,
criterion: Optional[_ColumnExpressionArgument[bool]] = None,
**kwargs: Any,
) -> Exists:
where_criteria = (
coercions.expect(roles.WhereHavingRole, criterion)
if criterion is not None
else None
)
if getattr(self, "_of_type", None):
info: Optional[_InternalEntityType[Any]] = inspect(
self._of_type
)
assert info is not None
target_mapper, to_selectable, is_aliased_class = (
info.mapper,
info.selectable,
info.is_aliased_class,
)
if self.property._is_self_referential and not is_aliased_class:
to_selectable = to_selectable._anonymous_fromclause()
single_crit = target_mapper._single_table_criterion
if single_crit is not None:
if where_criteria is not None:
where_criteria = single_crit & where_criteria
else:
where_criteria = single_crit
else:
is_aliased_class = False
to_selectable = None
if self.adapter:
source_selectable = self._source_selectable()
else:
source_selectable = None
(
pj,
sj,
source,
dest,
secondary,
target_adapter,
) = self.property._create_joins(
dest_selectable=to_selectable,
source_selectable=source_selectable,
)
for k in kwargs:
crit = getattr(self.property.mapper.class_, k) == kwargs[k]
if where_criteria is None:
where_criteria = crit
else:
where_criteria = where_criteria & crit
# annotate the *local* side of the join condition, in the case
# of pj + sj this is the full primaryjoin, in the case of just
# pj its the local side of the primaryjoin.
if sj is not None:
j = _orm_annotate(pj) & sj
else:
j = _orm_annotate(pj, exclude=self.property.remote_side)
if (
where_criteria is not None
and target_adapter
and not is_aliased_class
):
# limit this adapter to annotated only?
where_criteria = target_adapter.traverse(where_criteria)
# only have the "joined left side" of what we
# return be subject to Query adaption. The right
# side of it is used for an exists() subquery and
# should not correlate or otherwise reach out
# to anything in the enclosing query.
if where_criteria is not None:
where_criteria = where_criteria._annotate(
{"no_replacement_traverse": True}
)
crit = j & sql.True_._ifnone(where_criteria)
if secondary is not None:
ex = (
sql.exists(1)
.where(crit)
.select_from(dest, secondary)
.correlate_except(dest, secondary)
)
else:
ex = (
sql.exists(1)
.where(crit)
.select_from(dest)
.correlate_except(dest)
)
return ex
def any(
self,
criterion: Optional[_ColumnExpressionArgument[bool]] = None,
**kwargs: Any,
) -> ColumnElement[bool]:
"""Produce an expression that tests a collection against
particular criterion, using EXISTS.
An expression like::
session.query(MyClass).filter(
MyClass.somereference.any(SomeRelated.x==2)
)
Will produce a query like::
SELECT * FROM my_table WHERE
EXISTS (SELECT 1 FROM related WHERE related.my_id=my_table.id
AND related.x=2)
Because :meth:`~.Relationship.Comparator.any` uses
a correlated subquery, its performance is not nearly as
good when compared against large target tables as that of
using a join.
:meth:`~.Relationship.Comparator.any` is particularly
useful for testing for empty collections::
session.query(MyClass).filter(
~MyClass.somereference.any()
)
will produce::
SELECT * FROM my_table WHERE
NOT (EXISTS (SELECT 1 FROM related WHERE
related.my_id=my_table.id))
:meth:`~.Relationship.Comparator.any` is only
valid for collections, i.e. a :func:`_orm.relationship`
that has ``uselist=True``. For scalar references,
use :meth:`~.Relationship.Comparator.has`.
"""
if not self.property.uselist:
raise sa_exc.InvalidRequestError(
"'any()' not implemented for scalar "
"attributes. Use has()."
)
return self._criterion_exists(criterion, **kwargs)
def has(
self,
criterion: Optional[_ColumnExpressionArgument[bool]] = None,
**kwargs: Any,
) -> ColumnElement[bool]:
"""Produce an expression that tests a scalar reference against
particular criterion, using EXISTS.
An expression like::
session.query(MyClass).filter(
MyClass.somereference.has(SomeRelated.x==2)
)
Will produce a query like::
SELECT * FROM my_table WHERE
EXISTS (SELECT 1 FROM related WHERE
related.id==my_table.related_id AND related.x=2)
Because :meth:`~.Relationship.Comparator.has` uses
a correlated subquery, its performance is not nearly as
good when compared against large target tables as that of
using a join.
:meth:`~.Relationship.Comparator.has` is only
valid for scalar references, i.e. a :func:`_orm.relationship`
that has ``uselist=False``. For collection references,
use :meth:`~.Relationship.Comparator.any`.
"""
if self.property.uselist:
raise sa_exc.InvalidRequestError(
"'has()' not implemented for collections. " "Use any()."
)
return self._criterion_exists(criterion, **kwargs)
def contains(
self, other: _ColumnExpressionArgument[Any], **kwargs: Any
) -> ColumnElement[bool]:
"""Return a simple expression that tests a collection for
containment of a particular item.
:meth:`~.Relationship.Comparator.contains` is
only valid for a collection, i.e. a
:func:`_orm.relationship` that implements
one-to-many or many-to-many with ``uselist=True``.
When used in a simple one-to-many context, an
expression like::
MyClass.contains(other)
Produces a clause like::
mytable.id == <some id>
Where ``<some id>`` is the value of the foreign key
attribute on ``other`` which refers to the primary
key of its parent object. From this it follows that
:meth:`~.Relationship.Comparator.contains` is
very useful when used with simple one-to-many
operations.
For many-to-many operations, the behavior of
:meth:`~.Relationship.Comparator.contains`
has more caveats. The association table will be
rendered in the statement, producing an "implicit"
join, that is, includes multiple tables in the FROM
clause which are equated in the WHERE clause::
query(MyClass).filter(MyClass.contains(other))
Produces a query like::
SELECT * FROM my_table, my_association_table AS
my_association_table_1 WHERE
my_table.id = my_association_table_1.parent_id
AND my_association_table_1.child_id = <some id>
Where ``<some id>`` would be the primary key of
``other``. From the above, it is clear that
:meth:`~.Relationship.Comparator.contains`
will **not** work with many-to-many collections when
used in queries that move beyond simple AND
conjunctions, such as multiple
:meth:`~.Relationship.Comparator.contains`
expressions joined by OR. In such cases subqueries or
explicit "outer joins" will need to be used instead.
See :meth:`~.Relationship.Comparator.any` for
a less-performant alternative using EXISTS, or refer
to :meth:`_query.Query.outerjoin`
as well as :ref:`orm_queryguide_joins`
for more details on constructing outer joins.
kwargs may be ignored by this operator but are required for API
conformance.
"""
if not self.prop.uselist:
raise sa_exc.InvalidRequestError(
"'contains' not implemented for scalar "
"attributes. Use =="
)
clause = self.prop._optimized_compare(
other, adapt_source=self.adapter
)
if self.prop.secondaryjoin is not None:
clause.negation_clause = self.__negated_contains_or_equals(
other
)
return clause
def __negated_contains_or_equals(
self, other: Any
) -> ColumnElement[bool]:
if self.prop.direction == MANYTOONE:
state = attributes.instance_state(other)
def state_bindparam(
local_col: ColumnElement[Any],
state: InstanceState[Any],
remote_col: ColumnElement[Any],
) -> BindParameter[Any]:
dict_ = state.dict
return sql.bindparam(
local_col.key,
type_=local_col.type,
unique=True,
callable_=self.prop._get_attr_w_warn_on_none(
self.prop.mapper, state, dict_, remote_col
),
)
def adapt(col: _CE) -> _CE:
if self.adapter:
return self.adapter(col)
else:
return col
if self.property._use_get:
return sql.and_(
*[
sql.or_(
adapt(x)
!= state_bindparam(adapt(x), state, y),
adapt(x) == None,
)
for (x, y) in self.property.local_remote_pairs
]
)
criterion = sql.and_(
*[
x == y
for (x, y) in zip(
self.property.mapper.primary_key,
self.property.mapper.primary_key_from_instance(other),
)
]
)
return ~self._criterion_exists(criterion)
def __ne__(self, other: Any) -> ColumnElement[bool]: # type: ignore[override] # noqa: E501
"""Implement the ``!=`` operator.
In a many-to-one context, such as::
MyClass.some_prop!= <some object>
This will typically produce a clause such as::
mytable.related_id!= <some id>
Where ``<some id>`` is the primary key of the
given object.
The ``!=`` operator provides partial functionality for non-
many-to-one comparisons:
* Comparisons against collections are not supported.
Use
:meth:`~.Relationship.Comparator.contains`
in conjunction with :func:`_expression.not_`.
* Compared to a scalar one-to-many, will produce a
clause that compares the target columns in the parent to
the given target.
* Compared to a scalar many-to-many, an alias
of the association table will be rendered as
well, forming a natural join that is part of the
main body of the query. This will not work for
queries that go beyond simple AND conjunctions of
comparisons, such as those which use OR. Use
explicit joins, outerjoins, or
:meth:`~.Relationship.Comparator.has` in
conjunction with :func:`_expression.not_` for
more comprehensive non-many-to-one scalar
membership tests.
* Comparisons against ``None`` given in a one-to-many
or many-to-many context produce an EXISTS clause.
"""
if other is None or isinstance(other, expression.Null):
if self.property.direction == MANYTOONE:
return _orm_annotate(
~self.property._optimized_compare(
None, adapt_source=self.adapter
)
)
else:
return self._criterion_exists()
elif self.property.uselist:
raise sa_exc.InvalidRequestError(
"Can't compare a collection"
" to an object or collection; use "
"contains() to test for membership."
)
else:
return _orm_annotate(self.__negated_contains_or_equals(other))
def _memoized_attr_property(self) -> RelationshipProperty[_PT]:
self.prop.parent._check_configure()
return self.prop
def _with_parent(
self,
instance: object,
alias_secondary: bool = True,
from_entity: Optional[_EntityType[Any]] = None,
) -> ColumnElement[bool]:
assert instance is not None
adapt_source: Optional[_CoreAdapterProto] = None
if from_entity is not None:
insp: Optional[_InternalEntityType[Any]] = inspect(from_entity)
assert insp is not None
if insp_is_aliased_class(insp):
adapt_source = insp._adapter.adapt_clause
return self._optimized_compare(
instance,
value_is_parent=True,
adapt_source=adapt_source,
alias_secondary=alias_secondary,
)
def _optimized_compare(
self,
state: Any,
value_is_parent: bool = False,
adapt_source: Optional[_CoreAdapterProto] = None,
alias_secondary: bool = True,
) -> ColumnElement[bool]:
if state is not None:
try:
state = inspect(state)
except sa_exc.NoInspectionAvailable:
state = None
if state is None or not getattr(state, "is_instance", False):
raise sa_exc.ArgumentError(
"Mapped instance expected for relationship "
"comparison to object. Classes, queries and other "
"SQL elements are not accepted in this context; for "
"comparison with a subquery, "
"use %s.has(**criteria)." % self
)
reverse_direction = not value_is_parent
if state is None:
return self._lazy_none_clause(
reverse_direction, adapt_source=adapt_source
)
if not reverse_direction:
criterion, bind_to_col = (
self._lazy_strategy._lazywhere,
self._lazy_strategy._bind_to_col,
)
else:
criterion, bind_to_col = (
self._lazy_strategy._rev_lazywhere,
self._lazy_strategy._rev_bind_to_col,
)
if reverse_direction:
mapper = self.mapper
else:
mapper = self.parent
dict_ = attributes.instance_dict(state.obj())
def visit_bindparam(bindparam: BindParameter[Any]) -> None:
if bindparam._identifying_key in bind_to_col:
bindparam.callable = self._get_attr_w_warn_on_none(
mapper,
state,
dict_,
bind_to_col[bindparam._identifying_key],
)
if self.secondary is not None and alias_secondary:
criterion = ClauseAdapter(
self.secondary._anonymous_fromclause()
).traverse(criterion)
criterion = visitors.cloned_traverse(
criterion, {}, {"bindparam": visit_bindparam}
)
if adapt_source:
criterion = adapt_source(criterion)
return criterion
def _get_attr_w_warn_on_none(
self,
mapper: Mapper[Any],
state: InstanceState[Any],
dict_: _InstanceDict,
column: ColumnElement[Any],
) -> Callable[[], Any]:
"""Create the callable that is used in a many-to-one expression.
E.g.::
u1 = s.query(User).get(5)
expr = Address.user == u1
Above, the SQL should be "address.user_id = 5". The callable
returned by this method produces the value "5" based on the identity
of ``u1``.
"""
# in this callable, we're trying to thread the needle through
# a wide variety of scenarios, including:
#
# * the object hasn't been flushed yet and there's no value for
# the attribute as of yet
#
# * the object hasn't been flushed yet but it has a user-defined
# value
#
# * the object has a value but it's expired and not locally present
#
# * the object has a value but it's expired and not locally present,
# and the object is also detached
#
# * The object hadn't been flushed yet, there was no value, but
# later, the object has been expired and detached, and *now*
# they're trying to evaluate it
#
# * the object had a value, but it was changed to a new value, and
# then expired
#
# * the object had a value, but it was changed to a new value, and
# then expired, then the object was detached
#
# * the object has a user-set value, but it's None and we don't do
# the comparison correctly for that so warn
#
prop = mapper.get_property_by_column(column)
# by invoking this method, InstanceState will track the last known
# value for this key each time the attribute is to be expired.
# this feature was added explicitly for use in this method.
state._track_last_known_value(prop.key)
lkv_fixed = state._last_known_values
def _go() -> Any:
assert lkv_fixed is not None
last_known = to_return = lkv_fixed[prop.key]
existing_is_available = (
last_known is not LoaderCallableStatus.NO_VALUE
)
# we support that the value may have changed. so here we
# try to get the most recent value including re-fetching.
# only if we can't get a value now due to detachment do we return
# the last known value
current_value = mapper._get_state_attr_by_column(
state,
dict_,
column,
passive=PassiveFlag.PASSIVE_OFF
if state.persistent
else PassiveFlag.PASSIVE_NO_FETCH ^ PassiveFlag.INIT_OK,
)
if current_value is LoaderCallableStatus.NEVER_SET:
if not existing_is_available:
raise sa_exc.InvalidRequestError(
"Can't resolve value for column %s on object "
"%s; no value has been set for this column"
% (column, state_str(state))
)
elif current_value is LoaderCallableStatus.PASSIVE_NO_RESULT:
if not existing_is_available:
raise sa_exc.InvalidRequestError(
"Can't resolve value for column %s on object "
"%s; the object is detached and the value was "
"expired" % (column, state_str(state))
)
else:
to_return = current_value
if to_return is None:
util.warn(
"Got None for value of column %s; this is unsupported "
"for a relationship comparison and will not "
"currently produce an IS comparison "
"(but may in a future release)" % column
)
return to_return
return _go
def _lazy_none_clause(
self,
reverse_direction: bool = False,
adapt_source: Optional[_CoreAdapterProto] = None,
) -> ColumnElement[bool]:
if not reverse_direction:
criterion, bind_to_col = (
self._lazy_strategy._lazywhere,
self._lazy_strategy._bind_to_col,
)
else:
criterion, bind_to_col = (
self._lazy_strategy._rev_lazywhere,
self._lazy_strategy._rev_bind_to_col,
)
criterion = adapt_criterion_to_null(criterion, bind_to_col)
if adapt_source:
criterion = adapt_source(criterion)
return criterion
def __str__(self) -> str:
return str(self.parent.class_.__name__) + "." + self.key
def merge(
self,
session: Session,
source_state: InstanceState[Any],
source_dict: _InstanceDict,
dest_state: InstanceState[Any],
dest_dict: _InstanceDict,
load: bool,
_recursive: Dict[Any, object],
_resolve_conflict_map: Dict[_IdentityKeyType[Any], object],
) -> None:
if load:
for r in self._reverse_property:
if (source_state, r) in _recursive:
return
if "merge" not in self._cascade:
return
if self.key not in source_dict:
return
if self.uselist:
impl = source_state.get_impl(self.key)
assert is_has_collection_adapter(impl)
instances_iterable = impl.get_collection(source_state, source_dict)
# if this is a CollectionAttributeImpl, then empty should
# be False, otherwise "self.key in source_dict" should not be
# True
assert not instances_iterable.empty if impl.collection else True
if load:
# for a full merge, pre-load the destination collection,
# so that individual _merge of each item pulls from identity
# map for those already present.
# also assumes CollectionAttributeImpl behavior of loading
# "old" list in any case
dest_state.get_impl(self.key).get(
dest_state, dest_dict, passive=PassiveFlag.PASSIVE_MERGE
)
dest_list = []
for current in instances_iterable:
current_state = attributes.instance_state(current)
current_dict = attributes.instance_dict(current)
_recursive[(current_state, self)] = True
obj = session._merge(
current_state,
current_dict,
load=load,
_recursive=_recursive,
_resolve_conflict_map=_resolve_conflict_map,
)
if obj is not None:
dest_list.append(obj)
if not load:
coll = attributes.init_state_collection(
dest_state, dest_dict, self.key
)
for c in dest_list:
coll.append_without_event(c)
else:
dest_impl = dest_state.get_impl(self.key)
assert is_has_collection_adapter(dest_impl)
dest_impl.set(
dest_state,
dest_dict,
dest_list,
_adapt=False
|
63093190ee20e10698bd99dcea94ccf5d076a006
|
Python
|
<|begin_of_text|>species(
label = 'C=C([CH]C)C(=C)[CH]C(24182)',
structure = SMILES('[CH2]C(=CC)C([CH2])=CC'),
E0 = (249.687,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([325,375,415,465,420,450,1700,1750,2750,2770,2790,2810,2830,2850,1350,1400,1450,1500,700,800,1000,1100,1350,1400,900,1100,3000,3033.33,3066.67,3100,415,465,780,850,1435,1475,900,1100,2995,3025,975,1000,1300,1375,400,500,1630,1680,180],'cm^-1')),
HinderedRotor(inertia=(0.735277,'amu*angstrom^2'), symmetry=1, barrier=(16.9055,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.0632434,'amu*angstrom^2'), symmetry=1, barrier=(29.514,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.737545,'amu*angstrom^2'), symmetry=1, barrier=(16.9576,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.732781,'amu*angstrom^2'), symmetry=1, barrier=(16.8481,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.739219,'amu*angstrom^2'), symmetry=1, barrier=(16.9961,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (108.181,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.384005,0.0840749,-5.09991e-05,5.50851e-09,4.14197e-12,30198.9,28.4131], Tmin=(100,'K'), Tmax=(1039.09,'K')), NASAPolynomial(coeffs=[18.1326,0.0354522,-1.35159e-05,2.44392e-09,-1.69358e-13,25127.7,-67.5143], Tmin=(1039.09,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(249.687,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(461.453,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-Cds(Cds-Cds)Cs) + group(Cds-Cds(Cds-Cds)Cs) + group(Cds-CdsCsH) + group(Cds-CdsCsH) + radical(Allyl_P) + radical(Allyl_P)"""),
)
species(
label = 'CH3CHCCH2(18175)',
structure = SMILES('C=C=CC'),
E0 = (145.615,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2950,3100,1380,975,1025,1650,540,610,2055,2750,2800,2850,1350,1500,750,1050,1375,1000,3010,987.5,1337.5,450,1655],'cm^-1')),
HinderedRotor(inertia=(0.759584,'amu*angstrom^2'), symmetry=1, barrier=(17.4643,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (54.0904,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(2996.71,'J/mol'), sigma=(5.18551,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0, comment="""Epsilon & sigma estimated with Tc=468.08 K, Pc=48.77 bar (from Joback method)"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.74635,0.0218189,8.22353e-06,-2.14768e-08,8.55624e-12,17563.6,12.7381], Tmin=(100,'K'), Tmax=(1025.6,'K')), NASAPolynomial(coeffs=[6.82078,0.0192338,-7.45622e-06,1.36536e-09,-9.53195e-14,16028,-10.4333], Tmin=(1025.6,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(145.615,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(228.648,'J/(mol*K)'), label="""CH3CHCCH2""", comment="""Thermo library: DFT_QCI_thermo"""),
)
species(
label = '[CH2]C1([CH]C)CC1=CC(25275)',
structure = SMILES('[CH2]C1([CH]C)CC1=CC'),
E0 = (462.221,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (108.181,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.263258,0.0692237,-2.26363e-05,-1.35463e-08,8.13734e-12,55737.7,31.4039], Tmin=(100,'K'), Tmax=(1105.46,'K')), NASAPolynomial(coeffs=[15.171,0.0400578,-1.66801e-05,3.13624e-09,-2.2049e-13,50927.8,-48.8594], Tmin=(1105.46,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(462.221,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(465.61,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)CsCsCs) + group(Cs-CsCsHH) + group(Cs-(Cds-Cds)CsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsCs) + group(Cds-CdsCsH) + ring(Methylene_cyclopropane) + radical(Neopentyl) + radical(Cs_S)"""),
)
species(
label = 'C=[C][CH]C(18176)',
structure = SMILES('[CH2][C]=CC'),
E0 = (361.056,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([1685,370,2750,2800,2850,1350,1500,750,1050,1375,1000,3000,3100,440,815,1455,1000,3010,987.5,1337.5,450,1655],'cm^-1')),
HinderedRotor(inertia=(0.352622,'amu*angstrom^2'), symmetry=1, barrier=(8.10748,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.828631,'amu*angstrom^2'), symmetry=1, barrier=(19.0519,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (54.0904,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.42015,0.030446,-1.69076e-05,4.64684e-09,-5.12013e-13,43485.7,14.8304], Tmin=(100,'K'), Tmax=(2065.83,'K')), NASAPolynomial(coeffs=[10.7464,0.014324,-5.20136e-06,8.69079e-10,-5.48385e-14,40045.6,-31.3799], Tmin=(2065.83,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(361.056,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(274.378,'J/(mol*K)'), comment="""Thermo library: DFT_QCI_thermo + radical(Cds_S) + radical(Allyl_P)"""),
)
species(
label = '[CH2]C(=CC)C(C)=[C]C(25412)',
structure = SMILES('[CH2]C(=CC)C(C)=[C]C'),
E0 = (336.03,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([325,375,415,465,420,450,1700,1750,1685,370,2750,2762.5,2775,2787.5,2800,2812.5,2825,2837.5,2850,1350,1380,1410,1440,1470,1500,700,750,800,1000,1050,1100,1350,1375,1400,900,1000,1100,3000,3100,440,815,1455,1000,3010,987.5,1337.5,450,1655,222.04],'cm^-1')),
HinderedRotor(inertia=(0.395973,'amu*angstrom^2'), symmetry=1, barrier=(13.8694,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.396086,'amu*angstrom^2'), symmetry=1, barrier=(13.8683,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.395737,'amu*angstrom^2'), symmetry=1, barrier=(13.8691,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.395039,'amu*angstrom^2'), symmetry=1, barrier=(13.8689,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.395901,'amu*angstrom^2'), symmetry=1, barrier=(13.8689,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (108.181,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.116365,0.0876489,-7.20737e-05,3.21805e-08,-5.96317e-12,40565.5,28.3373], Tmin=(100,'K'), Tmax=(1264.63,'K')), NASAPolynomial(coeffs=[14.5979,0.041109,-1.68732e-05,3.08148e-09,-2.10818e-13,36843.8,-46.1055], Tmin=(1264.63,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(336.03,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(461.453,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-Cds(Cds-Cds)Cs) + group(Cds-Cds(Cds-Cds)Cs) + group(Cds-CdsCsH) + group(Cds-CdsCsH) + radical(Cds_S) + radical(Allyl_P)"""),
)
species(
label = '[CH2]C(=[C]C)C(C)=CC(25413)',
structure = SMILES('[CH2]C(=[C]C)C(C)=CC'),
E0 = (336.03,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([325,375,415,465,420,450,1700,1750,1685,370,2750,2762.5,2775,2787.5,2800,2812.5,2825,2837.5,2850,1350,1380,1410,1440,1470,1500,700,750,800,1000,1050,1100,1350,1375,1400,900,1000,1100,3000,3100,440,815,1455,1000,3010,987.5,1337.5,450,1655,222.04],'cm^-1')),
HinderedRotor(inertia=(0.395973,'amu*angstrom^2'), symmetry=1, barrier=(13.8694,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.396086,'amu*angstrom^2'), symmetry=1, barrier=(13.8683,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.395737,'amu*angstrom^2'), symmetry=1, barrier=(13.8691,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.395039,'amu*angstrom^2'), symmetry=1, barrier=(13.8689,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.395901,'amu*angstrom^2'), symmetry=1, barrier=(13.8689,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (108.181,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.116365,0.0876489,-7.20737e-05,3.21805e-08,-5.96317e-12,40565.5,28.3373], Tmin=(100,'K'), Tmax=(1264.63,'K')), NASAPolynomial(coeffs=[14.5979,0.041109,-1.68732e-05,3.08148e-09,-2.10818e-13,36843.8,-46.1055], Tmin=(1264.63,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(336.03,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(461.453,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-Cds(Cds-Cds)Cs) + group(Cds-Cds(Cds-Cds)Cs) + group(Cds-CdsCsH) + group(Cds-CdsCsH) + radical(Allyl_P) + radical(Cds_S)"""),
)
species(
label = '[CH2]C(=CC)[C](C)C=C(24605)',
structure = SMILES('[CH2]C=C(C)C([CH2])=CC'),
E0 = (216.244,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([325,375,415,465,420,450,1700,1750,2750,2770,2790,2810,2830,2850,1350,1400,1450,1500,700,800,1000,1100,1350,1400,900,1100,3000,3033.33,3066.67,3100,415,465,780,850,1435,1475,900,1100,2995,3025,975,1000,1300,1375,400,500,1630,1680,180],'cm^-1')),
HinderedRotor(inertia=(0.712083,'amu*angstrom^2'), symmetry=1, barrier=(16.3722,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.555659,'amu*angstrom^2'), symmetry=1, barrier=(96.3851,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.0202512,'amu*angstrom^2'), symmetry=1, barrier=(16.3711,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.712008,'amu*angstrom^2'), symmetry=1, barrier=(16.3705,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(4.19211,'amu*angstrom^2'), symmetry=1, barrier=(96.3849,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (108.181,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.0883175,0.0775021,-3.58132e-05,-7.55711e-09,8.27771e-12,26166.1,29.3215], Tmin=(100,'K'), Tmax=(1017.17,'K')), NASAPolynomial(coeffs=[16.4341,0.0376674,-1.41425e-05,2.53759e-09,-1.75328e-13,21504.4,-57.0638], Tmin=(1017.17,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(216.244,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(461.453,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-Cds(Cds-Cds)Cs) + group(Cds-Cds(Cds-Cds)Cs) + group(Cds-CdsCsH) + group(Cds-CdsCsH) + radical(Allyl_P) + radical(C=CC=CCJ)"""),
)
species(
label = '[CH2][C](C=C)C(C)=CC(24606)',
structure = SMILES('[CH2]C=C([CH2])C(C)=CC'),
E0 = (216.244,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (108.181,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.0883175,0.0775021,-3.58132e-05,-7.55711e-09,8.27771e-12,26166.1,29.3215], Tmin=(100,'K'), Tmax=(1017.17,'K')), NASAPolynomial(coeffs=[16.4341,0.0376674,-1.41425e-05,2.53759e-09,-1.75328e-13,21504.4,-57.0638], Tmin=(1017.17,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(216.244,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(461.453,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-Cds(Cds-Cds)Cs) + group(Cds-Cds(Cds-Cds)Cs) + group(Cds-CdsCsH) + group(Cds-CdsCsH) + radical(Allyl_P) + radical(C=CC=CCJ)"""),
)
species(
label = '[CH2]C(=CC)[C]1CC1C(25414)',
structure = SMILES('[CH2]C(=CC)[C]1CC1C'),
E0 = (289.9,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (108.181,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.71289,0.0520158,3.84829e-05,-8.55933e-08,3.61457e-11,35003.5,26.4903], Tmin=(100,'K'), Tmax=(968.714,'K')), NASAPolynomial(coeffs=[16.7686,0.0352996,-1.24057e-05,2.26286e-09,-1.62921e-13,29566.5,-62.466], Tmin=(968.714,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(289.9,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(465.61,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-CsCsCsH) + group(Cs-(Cds-Cds)CsCsH) + group(Cs-CsCsHH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsCs) + group(Cds-CdsCsH) + ring(Cyclopropane) + radical(Allyl_T) + radical(Allyl_P)"""),
)
species(
label = '[CH2][C]1C(=CC)CC1C(25415)',
structure = SMILES('[CH2]C1=C([CH]C)CC1C'),
E0 = (304.572,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (108.181,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.583091,0.0531885,4.0938e-05,-9.08388e-08,3.83549e-11,36774.2,26.4705], Tmin=(100,'K'), Tmax=(972.301,'K')), NASAPolynomial(coeffs=[18.2947,0.0339462,-1.21014e-05,2.24934e-09,-1.64353e-13,30795.4,-71.5147], Tmin=(972.301,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(304.572,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(465.61,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)CsCsH) + group(Cs-(Cds-Cds)CsHH) + group(Cs-(Cds-Cds)CsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsCs) + group(Cds-CdsCsCs) + ring(Cyclobutene) + radical(Allyl_P) + radical(Allyl_S)"""),
)
species(
label = 'CH2(S)(23)',
structure = SMILES('[CH2]'),
E0 = (419.862,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([1369.36,2789.41,2993.36],'cm^-1')),
],
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (14.0266,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(1197.29,'J/mol'), sigma=(3.8,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[4.19195,-0.00230793,8.0509e-06,-6.60123e-09,1.95638e-12,50484.3,-0.754589], Tmin=(200,'K'), Tmax=(1000,'K')), NASAPolynomial(coeffs=[2.28556,0.00460255,-1.97412e-06,4.09548e-10,-3.34695e-14,50922.4,8.67684], Tmin=(1000,'K'), Tmax=(3000,'K'))], Tmin=(200,'K'), Tmax=(3000,'K'), E0=(419.862,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(58.2013,'J/(mol*K)'), label="""CH2(S)""", comment="""Thermo library: Klippenstein_Glarborg2016"""),
)
species(
label = '[CH2]C(=C)C([CH2])=CC(25416)',
structure = SMILES('[CH2]C(=C)C([CH2])=CC'),
E0 = (285.713,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([325,375,415,465,420,450,1700,1750,2950,3100,1380,975,1025,1650,2750,2800,2850,1350,1500,750,1050,1375,1000,3000,3033.33,3066.67,3100,415,465,780,850,1435,1475,900,1100,3010,987.5,1337.5,450,1655,311.383],'cm^-1')),
HinderedRotor(inertia=(0.327475,'amu*angstrom^2'), symmetry=1, barrier=(22.5291,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.327466,'amu*angstrom^2'), symmetry=1, barrier=(22.5294,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.327318,'amu*angstrom^2'), symmetry=1, barrier=(22.5272,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.327483,'amu*angstrom^2'), symmetry=1, barrier=(22.5297,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (94.1543,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.335271,0.0676667,-2.76626e-05,-1.62749e-08,1.21982e-11,34506.8,24.024], Tmin=(100,'K'), Tmax=(980.594,'K')), NASAPolynomial(coeffs=[17.5531,0.0266059,-9.47854e-06,1.70194e-09,-1.19937e-13,29727.4,-65.8563], Tmin=(980.594,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(285.713,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(390.78,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-Cds(Cds-Cds)Cs) + group(Cds-Cds(Cds-Cds)Cs) + group(Cds-CdsCsH) + group(Cds-CdsHH) + radical(Allyl_P) + radical(Allyl_P)"""),
)
species(
label = 'C=C([CH]C)C[C]=CC(24184)',
structure = SMILES('[CH2]C(=CC)C[C]=CC'),
E0 = (366.985,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2995,3025,975,1000,1300,1375,400,500,1630,1680,2750,2770,2790,2810,2830,2850,1350,1400,1450,1500,700,800,1000,1100,1350,1400,900,1100,1685,370,350,440,435,1725,2750,2850,1437.5,1250,1305,750,350,3000,3100,440,815,1455,1000,180,579.702],'cm^-1')),
HinderedRotor(inertia=(0.147406,'amu*angstrom^2'), symmetry=1, barrier=(3.38916,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.64226,'amu*angstrom^2'), symmetry=1, barrier=(14.7668,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.64164,'amu*angstrom^2'), symmetry=1, barrier=(14.7526,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.643937,'amu*angstrom^2'), symmetry=1, barrier=(14.8054,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.145327,'amu*angstrom^2'), symmetry=1, barrier=(3.34136,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (108.181,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(3683.66,'J/mol'), sigma=(6.4482,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0, comment="""Epsilon & sigma estimated with Tc=575.38 K, Pc=31.18 bar (from Joback method)"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.29648,0.0786067,-5.42868e-05,1.96375e-08,-2.97459e-12,44273.2,31.2372], Tmin=(100,'K'), Tmax=(1490.43,'K')), NASAPolynomial(coeffs=[13.9025,0.0420909,-1.75363e-05,3.199e-09,-2.17227e-13,40217.5,-39.8334], Tmin=(1490.43,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(366.985,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(461.453,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)(Cds-Cds)HH) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsCs) + group(Cds-CdsCsH) + group(Cds-CdsCsH) + group(Cds-CdsCsH) + radical(Cds_S) + radical(Allyl_P)"""),
)
species(
label = 'CC=C1CCC1=CC(25269)',
structure = SMILES('CC=C1CCC1=CC'),
E0 = (114.107,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (108.181,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.677799,0.0585738,5.80411e-06,-4.1598e-08,1.78951e-11,13856,25.5085], Tmin=(100,'K'), Tmax=(1034.79,'K')), NASAPolynomial(coeffs=[13.4814,0.0415234,-1.65073e-05,3.07348e-09,-2.16896e-13,9469.28,-45.0922], Tmin=(1034.79,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(114.107,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(473.925,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)CsHH) + group(Cs-(Cds-Cds)CsHH) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-Cds(Cds-Cds)Cs) + group(Cds-Cds(Cds-Cds)Cs) + group(Cds-CdsCsH) + group(Cds-CdsCsH) + ring(12methylenecyclobutane)"""),
)
species(
label = 'CH2(19)',
structure = SMILES('[CH2]'),
E0 = (381.563,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([1032.72,2936.3,3459],'cm^-1')),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (14.0266,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(1197.29,'J/mol'), sigma=(3.8,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[3.8328,0.000224446,4.68033e-06,-6.04743e-09,2.59009e-12,45920.8,1.40666], Tmin=(200,'K'), Tmax=(1000,'K')), NASAPolynomial(coeffs=[3.16229,0.00281798,-7.56235e-07,5.05446e-11,5.65236e-15,46099.1,4.77656], Tmin=(1000,'K'), Tmax=(3000,'K'))], Tmin=(200,'K'), Tmax=(3000,'K'), E0=(381.563,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(58.2013,'J/(mol*K)'), label="""CH2""", comment="""Thermo library: Klippenstein_Glarborg2016"""),
)
species(
label = '[CH2]C([C]=CC)=CC(25417)',
structure = SMILES('[CH2]C([C]=CC)=CC'),
E0 = (334.774,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([350,440,435,1725,1685,370,2750,2770,2790,2810,2830,2850,1350,1400,1450,1500,700,800,1000,1100,1350,1400,900,1100,3000,3100,440,815,1455,1000,2995,3025,975,1000,1300,1375,400,500,1630,1680,180],'cm^-1')),
HinderedRotor(inertia=(0.7606,'amu*angstrom^2'), symmetry=1, barrier=(17.4877,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.760854,'amu*angstrom^2'), symmetry=1, barrier=(17.4935,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.760586,'amu*angstrom^2'), symmetry=1, barrier=(17.4874,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(2.15146,'amu*angstrom^2'), symmetry=1, barrier=(49.4663,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (94.1543,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.352604,0.0734369,-5.91187e-05,2.57941e-08,-4.60694e-12,40400.9,25.1788], Tmin=(100,'K'), Tmax=(1327.42,'K')), NASAPolynomial(coeffs=[14.2321,0.0316126,-1.18565e-05,2.05761e-09,-1.36512e-13,36716.1,-45.7131], Tmin=(1327.42,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(334.774,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(390.78,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-Cds(Cds-Cds)Cs) + group(Cds-CdsCsH) + group(Cds-CdsCsH) + group(Cds-Cds(Cds-Cds)H) + radical(C=CJC=C) + radical(Allyl_P)"""),
)
species(
label = '[CH2]C1([CH]C)C(=C)C1C(25296)',
structure = SMILES('[CH2]C1([CH]C)C(=C)C1C'),
E0 = (466.494,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (108.181,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.29276,0.0655305,-4.50464e-06,-3.74661e-08,1.7759e-11,56253.7,30.0992], Tmin=(100,'K'), Tmax=(1027.4,'K')), NASAPolynomial(coeffs=[16.6435,0.0372633,-1.49065e-05,2.81296e-09,-2.01072e-13,51026,-58.316], Tmin=(1027.4,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(466.494,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(465.61,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)CsCsCs) + group(Cs-(Cds-Cds)CsCsH) + group(Cs-CsCsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + group(Cds-CdsCsCs) + group(Cds-CdsHH) + ring(Methylene_cyclopropane) + radical(Neopentyl) + radical(Cs_S)"""),
)
species(
label = 'H(3)',
structure = SMILES('[H]'),
E0 = (211.792,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (1.00794,'amu'),
collisionModel = TransportData(shapeIndex=0, epsilon=(1205.6,'J/mol'), sigma=(2.05,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.5,9.24385e-15,-1.3678e-17,6.66185e-21,-1.00107e-24,25472.7,-0.459566], Tmin=(100,'K'), Tmax=(3459.6,'K')), NASAPolynomial(coeffs=[2.5,9.20456e-12,-3.58608e-15,6.15199e-19,-3.92042e-23,25472.7,-0.459566], Tmin=(3459.6,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(211.792,'kJ/mol'), Cp0=(20.7862,'J/(mol*K)'), CpInf=(20.7862,'J/(mol*K)'), label="""H""", comment="""Thermo library: BurkeH2O2"""),
)
species(
label = '[CH2]C(=CC)C(=C)C=C(24604)',
structure = SMILES('[CH2]C(=CC)C(=C)C=C'),
E0 = (242.677,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([325,375,415,465,420,450,1700,1750,2950,3000,3050,3100,1330,1430,900,1050,1000,1050,1600,1700,2750,2800,2850,1350,1500,750,1050,1375,1000,3000,3100,440,815,1455,1000,2995,3025,975,1000,1300,1375,400,500,1630,1680,181.962,683.313],'cm^-1')),
HinderedRotor(inertia=(0.669842,'amu*angstrom^2'), symmetry=1, barrier=(19.1337,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.0582339,'amu*angstrom^2'), symmetry=1, barrier=(19.1767,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.83204,'amu*angstrom^2'), symmetry=1, barrier=(19.1302,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(4.52237,'amu*angstrom^2'), symmetry=1, barrier=(104.569,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (107.173,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.293043,0.0682771,-2.00337e-05,-2.05401e-08,1.21516e-11,29332.3,27.0261], Tmin=(100,'K'), Tmax=(1018.57,'K
|
3c01ca27a5eef877b606b93b04ffe6f73168cd6b
|
Python
|
<|begin_of_text|>#Embedded file name: c:/depot/games/branches/release/EVE-TRANQUILITY/eve/client/script/paperDoll/SkinRaytracing.py
import trinity
import blue
import telemetry
import ctypes
import math
import time
import geo2
import struct
import itertools
import weakref
import uthread
import paperDoll as PD
import log
import random
mylog = log.Channel('optix', 'python')
def LogInfo(text, *args):
for arg in args:
text +='' + str(arg)
mylog.Log(text, log.LGINFO)
def LogWarn(text, *args):
for arg in args:
text = text +'' + str(arg)
mylog.Log(text, log.LGWARN)
class SkinRaytracingTools():
__guid__ = 'paperDoll.SkinRaytracingTools'
@staticmethod
def SetOptixMatrixFromTrinity(optix, matrixName, ratio = None):
proj = trinity.TriProjection()
view = trinity.TriView()
view.transform = trinity.GetViewTransform()
proj.PerspectiveFov(trinity.GetFieldOfView(), trinity.GetAspectRatio() if ratio is None else ratio, trinity.GetFrontClip(), trinity.GetBackClip())
projToView = geo2.MatrixInverse(proj.transform)
viewToWorld = geo2.MatrixInverse(view.transform)
projToWorld = geo2.MatrixMultiply(projToView, viewToWorld)
r0 = projToWorld[0]
r1 = projToWorld[1]
r2 = projToWorld[2]
r3 = projToWorld[3]
mat = trinity.TriMatrix(r0[0], r0[1], r0[2], r0[3], r1[0], r1[1], r1[2], r1[3], r2[0], r2[1], r2[2], r2[3], r3[0], r3[1], r3[2], r3[3])
optix.SetMatrix4x4(matrixName, mat)
r0 = view.transform[0]
r1 = view.transform[1]
r2 = view.transform[2]
r3 = view.transform[3]
mat = trinity.TriMatrix(r0[0], r0[1], r0[2], r0[3], r1[0], r1[1], r1[2], r1[3], r2[0], r2[1], r2[2], r2[3], r3[0], r3[1], r3[2], r3[3])
optix.SetMatrix4x4('viewTransform', mat)
return mat
@staticmethod
def CreateSamplerForTexture(name, map, waitForFinish):
rt = trinity.Tr2RenderTarget(map.width, map.height, 1, trinity.PIXEL_FORMAT.B8G8R8A8_UNORM)
job = trinity.CreateRenderJob()
job.PushRenderTarget(rt)
job.PushDepthStencil(None)
job.SetStdRndStates(trinity.RM_FULLSCREEN)
job.RenderTexture(map)
job.PopDepthStencil()
job.PopRenderTarget()
job.ScheduleOnce()
if waitForFinish:
job.WaitForFinish()
sampler = trinity.Tr2OptixTextureSampler()
if True:
res = trinity.TriTextureRes()
res.CreateAndCopyFromRenderTarget(rt)
sampler.CreateFromTexture(res)
else:
sampler.CreateFromRenderTarget(rt)
sampler.SetNormalizedIndexingMode(True)
if True:
return (sampler, res)
else:
return (sampler, rt)
@staticmethod
def ConvertCubeToTextures(cube):
names = ['PX',
'NX',
'PY',
'NY',
'PZ',
'NZ']
viewVec = [(1, 0, 0),
(-1, 0, 0),
(0, 1, 0),
(0, -1, 0),
(0, 0, 1),
(0, 0, -1)]
upVec = [(0, 1, 0),
(0, 1, 0),
(0, 0, 1),
(0, 0, -1),
(0, 1, 0),
(0, 1, 0)]
spaceScene = trinity.EveSpaceScene()
spaceScene.envMap1ResPath = str(cube.resourcePath)
spaceScene.envMapScaling = (1, 1, -1)
spaceScene.backgroundRenderingEnabled = True
spaceScene.backgroundEffect = trinity.Load('res:/dx9/scene/starfield/bakeNebula.red')
blue.resMan.Wait()
node = PD.FindParameterByName(spaceScene.backgroundEffect, 'NebulaBrightness')
if node is None:
node = trinity.Tr2FloatParameter()
node.name = 'NebulaBrightness'
spaceScene.backgroundEffect.parameters.append(node)
if node is not None:
node.value = 100
node = PD.FindResourceByName(spaceScene.backgroundEffect, 'NebulaMap')
if node is None:
node = trinity.TriTexture2DParam()
node.name = 'NebulaMap'
spaceScene.backgroundEffect.resources.append(node)
node.SetResource(cube.resource)
blue.resMan.Wait()
mipmapped = []
useTexture = True
for i in xrange(len(names)):
name = names[i]
rt = PD.SkinLightmapRenderer.CreateRenderTarget(cube.resource.width, cube.resource.height, trinity.PIXEL_FORMAT.B8G8R8A8_UNORM, useRT=True)
job = trinity.CreateRenderJob(name=name)
job.PushRenderTarget(rt)
job.PushDepthStencil(None)
job.Clear([(1, 0, 0),
(0.2, 0, 0),
(0, 1, 0),
(0, 0.2, 0),
(0, 0, 1),
(0, 0, 0.2)][i], None)
proj = trinity.TriProjection()
proj.PerspectiveFov(math.pi * 0.5, 1, 0.1, 1000)
view = trinity.TriView()
view.SetLookAtPosition((0, 0, 0), viewVec[i], upVec[i])
viewport = trinity.TriViewport(0, 0, cube.resource.width, cube.resource.height, 0.0, 1.0)
job.SetView(view)
job.SetProjection(proj)
job.SetViewport(viewport)
job.Update(spaceScene)
job.RenderScene(spaceScene)
job.PopDepthStencil()
job.PopRenderTarget()
if useTexture:
tex = trinity.TriTextureRes(cube.resource.width, cube.resource.height, 1, trinity.PIXEL_FORMAT.B8G8R8A8_UNORM)
if True:
job.ScheduleOnce()
job.WaitForFinish()
if useTexture:
mipmapped.append(tex)
else:
mipmapped.append(rt)
else:
job.ScheduleRecurring()
return (mipmapped, names)
@staticmethod
def FindAllTextureResourcesFromEffect(effect, scope):
textures = {}
samplers = []
cubemaps = []
if effect is not None:
for r in effect.resources:
if type(r) == trinity.TriTexture2DParameter and r.resource is not None:
textures[r.name] = r.resource
elif type(r) == trinity.TriTextureCubeParameter and r.resource is not None:
if r.name in cubemaps:
continue
LogInfo('', r.name, ': Converting to individual textures')
cubemaps.append(r.name)
mipmaps, names = SkinRaytracingTools.ConvertCubeToTextures(r)
for i in range(len(names)):
if i < len(mipmaps):
sampler = trinity.Tr2OptixTextureSampler()
sampler.CreateFromTexture(mipmaps[i])
sampler.SetNormalizedIndexingMode(True)
scope.SetSampler(r.name + names[i], sampler)
LogInfo('No-Copy Cube Side Interop for'+ r.name + names[i])
samplers.append(mipmaps[i])
samplers.append(sampler)
return (textures, samplers)
@staticmethod
def FindAllTextureResources(dynamic, scope):
textures = {}
samplers = []
cubemaps = []
def ProcessMesh(mesh):
for area in itertools.chain(mesh.opaqueAreas, mesh.decalAreas, mesh.transparentAreas):
newTextures, newSamplers = SkinRaytracingTools.FindAllTextureResourcesFromEffect(area.effect, scope)
textures.update(newTextures)
samplers.extend(newSamplers)
if type(dynamic) == trinity.Tr2IntSkinnedObject:
for mesh in dynamic.visualModel.meshes:
ProcessMesh(mesh)
elif type(dynamic) == trinity.EveShip2:
ProcessMesh(dynamic.highDetailMesh.object)
elif type(dynamic) == trinity.EveStation2:
ProcessMesh(dynamic.highDetailMesh.object)
return (textures, samplers)
@staticmethod
def InteropTexture(name, texture, waitForFinish, scope):
if texture.format == trinity.PIXEL_FORMAT.B8G8R8A8_UNORM:
sampler = trinity.Tr2OptixTextureSampler()
sampler.CreateFromTexture(texture)
sampler.SetNormalizedIndexingMode(True)
scope.SetSampler(name, sampler)
LogInfo('No-Copy Interop for', name)
return (sampler, None)
if texture.type == trinity.TRIRTYPE_CUBETEXTURE:
LogInfo('Copy-Interop for cubes not supported, skipping', name)
return
sampler_rt = SkinRaytracingTools.CreateSamplerForTexture(name, texture, waitForFinish)
if sampler_rt is None or len(sampler_rt) < 1:
LogInfo('InteropTexture failed for', name)
else:
scope.SetSampler(name, sampler_rt[0])
LogInfo('Interop for', name)
return sampler_rt
@staticmethod
def InteropAllTexturesFromEffect(optix, effect, waitForFinish, nameTranslation = None, scope = None, cache = None):
if scope is None:
scope = optix
textures, samplers = SkinRaytracingTools.FindAllTextureResourcesFromEffect(effect, scope)
for name, texture in textures.iteritems():
if'spotlight' in name.lower():
continue
if nameTranslation is not None:
name = nameTranslation.get(name, name)
if cache is not None and texture in cache:
sampler = cache[texture]
scope.SetSampler(name, sampler[0])
LogInfo('Interop cache for', name)
else:
sampler = SkinRaytracingTools.InteropTexture(name, texture, waitForFinish, scope)
if sampler and cache is not None:
cache[texture] = sampler
if sampler is not None:
samplers.append(sampler)
return samplers
@staticmethod
def InteropAllTextures(optix, dynamic, waitForFinish, nameTranslation = None, scope = None):
if scope is None:
scope = optix
textures, samplers = SkinRaytracingTools.FindAllTextureResources(dynamic, scope)
for name, texture in textures.iteritems():
if'spotlight' in name.lower():
continue
if nameTranslation is not None:
name = nameTranslation.get(name, name)
sampler = SkinRaytracingTools.InteropTexture(name, texture, waitForFinish, scope)
if sampler is not None:
samplers.append(sampler)
return samplers
@staticmethod
def SafeLinearize(values):
peak = max(1, max(values[0], max(values[1], values[2])))
return (peak * math.pow(values[0] / peak, 2.2),
peak * math.pow(values[1] / peak, 2.2),
peak * math.pow(values[2] / peak, 2.2),
values[3])
@staticmethod
def CopyParametersToContext(effect, instance, linearNames = None):
for p in effect.parameters:
if type(p) is trinity.Tr2Vector4Parameter:
value = SkinRaytracingTools.SafeLinearize(p.value) if linearNames is not None and p.name in linearNames else p.value
instance.SetFloat4(p.name, value[0], value[1], value[2], value[3])
elif type(p) is trinity.TriFloatParameter or type(p) is trinity.Tr2FloatParameter:
instance.SetFloat4(p.name, p.value, 0, 0, 0)
@staticmethod
def CreateBufferForLights(lights, leaveEmpty = False, preserveAlpha = False):
bufEveLights = trinity.Tr2OptixBuffer()
bufEveLights.CreateUserData(64, len(lights), trinity.OPTIX_BUFFER_OUTPUT, False)
bufEveLights.MapUser()
buffer = ''
if leaveEmpty:
lights = []
for light in lights:
innerAngle = light.coneAlphaInner
outerAngle = light.coneAlphaOuter
if innerAngle + 1.0 > outerAngle:
innerAngle = outerAngle - 1.0
innerAngle = math.cos(innerAngle * 3.1415927 / 180.0)
outerAngle = math.cos(outerAngle * 3.1415927 / 180.0)
coneDir = geo2.Vec3Normalize((light.coneDirection[0], light.coneDirection[1], light.coneDirection[2]))
import struct
buffer += struct.pack('16f', light.position[0], light.position[1], light.position[2], light.radius, math.pow(light.color[0], 2.2), math.pow(light.color[1], 2.2), math.pow(light.color[2], 2.2), light.falloff if not preserveAlpha else light.color[3], coneDir[0], coneDir[1], coneDir[2], outerAngle, innerAngle, 0, 0, 0)
bufEveLights.SetUserDataFromStruct(buffer)
bufEveLights.UnmapUser()
return bufEveLights
@staticmethod
def CreateUInt1Buffer(optix, name):
buffer = trinity.Tr2OptixBuffer()
buffer.CreateUInt1(1, 1, trinity.OPTIX_BUFFER_INPUT_OUTPUT)
buffer.Map()
buffer.SetUserDataI(0, 0)
buffer.Unmap()
optix.SetBuffer(name, buffer)
return buffer
@staticmethod
def matEqual(m1, m2):
return m1._11 == m2._11 and m1._12 == m2._12 and m1._13 == m2._13 and m1._14 == m2._14 and m1._21 == m2._21 and m1._22 == m2._22 and m1._23 == m2._23 and m1._24 == m2._24 and m1._31 == m2._31 and m1._32 == m2._32 and m1._33 == m2._33 and m1._34 == m2._34 and m1._41 == m2._41 and m1._42 == m2._42 and m1._43 == m2._43 and m1._44 == m2._44
@staticmethod
def FuncWrapper(weakSelf, func):
if weakSelf():
func(weakSelf())
class OitHelper():
def __init__(self, optix):
self.oitAllocatorBuffer = SkinRaytracingTools.CreateUInt1Buffer(optix, 'oit_allocator')
oitPoolBuffer = trinity.Tr2OptixBuffer()
oitPoolBuffer.CreateUserData(64 + 112, 1048576, trinity.OPTIX_BUFFER_INPUT_OUTPUT, True)
optix.SetBuffer('oit_pool', oitPoolBuffer)
self.oitPoolBuffer = oitPoolBuffer
def ResetAllocationCount(self):
self.oitAllocatorBuffer.Map()
self.oitAllocatorBuffer.SetUserDataI(0, 0)
self.oitAllocatorBuffer.Unmap()
def GetAllocationCount(self):
self.oitAllocatorBuffer.Map()
count = self.oitAllocatorBuffer.GetUserDataI(0)
self.oitAllocatorBuffer.Unmap()
return count
class RayCountHelper():
def __init__(self, optix):
self.rayCountBuffer = SkinRaytracingTools.CreateUInt1Buffer(optix, 'ray_count')
def ResetCount(self):
self.rayCountBuffer.Map()
self.rayCountBuffer.SetUserDataI(0, 0)
self.rayCountBuffer.Unmap()
def GetCount(self):
self.rayCountBuffer.Map()
count = self.rayCountBuffer.GetUserDataI(0)
self.rayCountBuffer.Unmap()
return count
class CaptureHelper():
def __init__(self, width, height):
self.capture = trinity.Tr2RenderTarget(width, height, 1, trinity.PIXEL_FORMAT.B8G8R8A8_UNORM)
def SaveSurfaceToFile(self, filename):
trinity.SaveRenderTarget(filename, self.capture)
LogInfo('Saved to', filename)
def CreateRenderSteps(self, rj, blitfx):
rj.PushRenderTarget(self.capture).name = 'Begin screenshot capture'
rj.PushDepthStencil(None).name =' push depth'
rj.RenderEffect(blitfx).name =' Blit to screenshot'
rj.PopDepthStencil().name =' pop depth'
rj.PopRenderTarget().name = 'End screenshot capture'
class FullScreenBlitter():
def __init__(self, width, height):
self.effect = trinity.Tr2Effect()
self.effect.effectFilePath ='res:/graphics/effect/optix/shaders/gammaBlit.fx'
if self.effect.effectResource is None:
LogWarn('Failed to load effect 1')
return
self.highpassEffect = trinity.Tr2Effect()
self.highpassEffect.effectFilePath ='res:/graphics/effect/optix/shaders/highpassFilter.fx'
if self.highpassEffect.effectResource is None:
LogWarn('Failed to load effect 1')
return
self.gaussianHorizEffect = trinity.Tr2Effect()
self.gaussianHorizEffect.effectFilePath ='res:/graphics/effect/optix/shaders/gaussianBlur.fx'
if self.gaussianHorizEffect.effectResource is None:
LogWarn('Failed to load effect 3')
return
self.gaussianVertEffect = trinity.Tr2Effect()
self.gaussianVertEffect.effectFilePath ='res:/graphics/effect/optix/shaders/gaussianBlur.fx'
if self.gaussianVertEffect.effectResource is None:
LogWarn('Failed to load effect 3')
return
for effect in [self.effect,
self.highpassEffect,
self.gaussianHorizEffect,
self.gaussianVertEffect]:
while effect.effectResource.isLoading:
PD.Yield()
self.blitcolor = trinity.Tr2Vector4Parameter()
self.blitcolor.name = 'Color'
for effect in [self.effect,
self.highpassEffect,
self.gaussianHorizEffect,
self.gaussianVertEffect]:
effect.PopulateParameters()
effect.RebuildCachedData()
effect.parameters.append(self.blitcolor)
sizesParam = trinity.Tr2Vector4Parameter()
sizesParam.name = 'InvSize'
sizesParam.value = (1.0 / width,
1.0 / height,
0,
0)
for effect in [self.effect, self.highpassEffect]:
effect.parameters.append(sizesParam)
sizesHorizParam = trinity.Tr2Vector4Parameter()
sizesHorizParam.name = 'invTexelSize'
sizesHorizParam.value = (1.0 / width,
0.0,
0,
0)
self.gaussianHorizEffect.parameters.append(sizesHorizParam)
sizesVertParam = trinity.Tr2Vector4Parameter()
sizesVertParam.name = 'invTexelSize'
sizesVertParam.value = (0.0,
1.0 / height,
0,
0)
self.gaussianVertEffect.parameters.append(sizesVertParam)
def SetTexture(self, optixOutputTexture, highpassTexture, filteredTexture):
tex = trinity.TriTexture2DParameter()
tex.name = 'Texture'
tex.SetResource(optixOutputTexture)
for effect in [self.effect, self.highpassEffect]:
effect.resources.append(tex)
tex = trinity.TriTexture2DParameter()
tex.name = 'Texture'
tex.SetResource(highpassTexture)
self.gaussianHorizEffect.resources.append(tex)
tex = trinity.TriTexture2DParameter()
tex.name = 'Texture'
tex.SetResource(filteredTexture)
self.gaussianVertEffect.resources.append(tex)
tex = trinity.TriTexture2DParameter()
tex.name = 'BloomTexture'
tex.SetResource(highpassTexture)
self.effect.resources.append(tex)
def UpdateFrameCount(self, framecount):
invFC = 1.0 / framecount if framecount > 0 else 1.0
self.blitcolor.value = (invFC,
invFC,
invFC,
invFC)
class FullOptixRenderer():
__guid__ = 'paperDoll.FullOptixRenderer'
instance = None
def AddCallback(self, func, name, rj):
cb = trinity.TriStepPythonCB()
weakSelf = weakref.ref(self)
cb.SetCallback(lambda : SkinRaytracingTools.FuncWrapper(weakSelf, func))
cb.name = name
rj.steps.append(cb)
def GetFrameCount(self):
return self.framecount
def SaveScreenshot(self, filename):
self.capture.SaveSurfaceToFile(filename)
def AddRenderPreviewStep(self, renderJob):
renderJob.SetStdRndStates(trinity.RM_FULLSCREEN).name =' [optix] fullscreen quad'
renderJob.PushDepthStencil(None).name =' [optix] push depth'
renderJob.RenderEffect(self.blitfx.effect).name =' [optix] Blit to screenshot'
renderJob.PopDepthStencil().name =' [optix] pop depth'
def RefreshMatrices(self):
model = self.skinnedObject
self.optix.RefreshMatrices(model, self.skinnedOptix)
self.RunSkinningAndTesselation()
self.ApplySettings()
print 'Refreshed'
@staticmethod
def RaytraceFrame(selfRef):
start = time.time()
VP = SkinRaytracingTools.SetOptixMatrixFromTrinity(selfRef.optix, 'clipToWorld', selfRef.width / float(selfRef.height))
if not SkinRaytracingTools.matEqual(VP, selfRef.previousVP):
selfRef.previousVP = VP
selfRef.outputBuffer.Clear()
selfRef.framecount = 0
model = selfRef.skinnedObject
pos1 = model.GetBonePosition(model.GetBoneIndex('fj_eyeballLeft'))
pos2 = model.GetBonePosition(model.GetBoneIndex('fj_eyeballRight'))
dist1 = geo2.Vec3Distance(pos1, trinity.GetViewPosition())
dist2 = geo2.Vec3Distance(pos2, trinity.GetViewPosition())
autodof = min(dist1, dist2)
dof = selfRef.settings.get('lens_focal_distance', autodof)
print 'Auto-depth-of-field is at', autodof, ', actual focal distance is', dof
selfRef.optix.SetFloat3('depthOfField', dof - trinity.GetFrontClip(), selfRef.settings['lens_radius'], 0)
else:
selfRef.framecount += 1
selfRef.optix.SetUInt('frameIteration', selfRef.framecount)
selfRef.oit.ResetAllocationCount()
selfRef.rayCounter.ResetCount()
time1 = time.time()
selfRef.optix.Run(0, selfRef.width, selfRef.height)
time2 = time.time()
sec = time2 - time1
raycount = selfRef.rayCounter.GetCount()
raysec = 0
if sec > 0:
raysec = raycount / float(sec)
time3 = time.time()
if selfRef.framecount % 32 == 0:
stop = time.time()
print selfRef.oit.GetAllocationCount(), 'oit allocations'
selfRef.blitfx.UpdateFrameCount(selfRef.framecount)
selfRef.outputBuffer.CopyToTexture(selfRef.outputTexture)
print 'time %05.3f / %05.3f / %05.3f / %05.3f msec' % (float(time1 - start) * 1000,
float(time2 - time1) * 1000,
float(time3 - time2) * 1000,
float(stop - time3) * 1000),
print '%d rays in %05.3f ms / %10d Krays/sec / %d rays per pixel' % (raycount,
sec * 1000,
raysec / 1000,
selfRef.framecount)
@telemetry.ZONE_METHOD
def OnBeforeOptixPositionsUV(self):
PD.SkinLightmapRenderer.DoChangeEffect('oxPosWorldUVEffect', self.oxMeshes)
if self.skinnedObject is not None and self.skinnedObject.visualModel is not None:
self.savedMeshes = self.skinnedObject.visualModel.meshes[:]
filteredMeshes = [ ref.object for ref in self.oxMeshes.iterkeys() if ref.object is not None ]
PD.SkinLightmapRenderer.DoCopyMeshesToVisual(self.skinnedObject, filteredMeshes)
self.scene.filterList.removeAt(-1)
self.scene.filterList.append(self.skinnedObject)
self.scene.useFilterList = True
@telemetry.ZONE_METHOD
def OnBeforeOptixNormalsUV(self):
PD.SkinLightmapRenderer.DoChangeEffect('oxNormalWorldUVEffect', self.oxMeshes)
def OnAfterOptix(self):
PD.SkinLightmapRenderer.DoRestoreShaders(meshes=self.oxMeshes)
PD.SkinLightmapRenderer.DoCopyMeshesToVisual(self.skinnedObject, self.savedMeshes)
del self.savedMeshes
self.scene.useFilterList = False
self.scene.filterList.removeAt(-1)
def _InitUVUnwrap(self):
self.oxMeshes = {}
self.scatterFX = set()
self.unwrapSize = 1024
posUV = PD.SkinLightmapRenderer.PreloadEffect(PD.SkinLightmapRenderer.OPTIX_POSWORLD_UV_EFFECT)
normalUV = PD.SkinLightmapRenderer.PreloadEffect(PD.SkinLightmapRenderer.OPTIX_NORMALWORLD_UV_EFFECT)
deriv = PD.SkinLightmapRenderer.PreloadEffect(PD.SkinLightmapRenderer.STRETCHMAP_RENDERER_EFFECT)
self.oxDepth = trinity.Tr2DepthStencil(self.unwrapSize, self.unwrapSize, trinity.DEPTH_STENCIL_FORMAT.D24S8, 1, 0)
for mesh in self.skinnedObject.visualModel.meshes:
if PD.SkinLightmapRenderer.IsScattering(mesh):
m = PD.SkinLightmapRenderer.Mesh()
m.ExtractOrigEffect(mesh)
m.CreateOptixEffects(includeStretchMap=True)
PD.AddWeakBlue(self, 'oxMeshes', mesh, m)
fx = PD.GetEffectsFromMesh(mesh)
for f in fx:
self.scatterFX.add(f)
self.oxWorldPosMapUV = PD.SkinLightmapRenderer.CreateRenderTarget(self.unwrapSize, self.unwrapSize, trinity.PIXEL_FORMAT.R32G32B32A32_FLOAT, useRT=True)
self.oxWorldNormalMapUV = PD.SkinLightmapRenderer.CreateRenderTarget(self.unwrapSize, self.unwrapSize, trinity.PIXEL_FORMAT.R32G32B32A32_FLOAT, useRT=True)
self.stretchMap = PD.SkinLightmapRenderer.CreateRenderTarget(self.unwrapSize / 2, self.unwrapSize / 2, trinity.PIXEL_FORMAT.R32G32B32A32_FLOAT, useRT=True)
rj = trinity.CreateRenderJob('Optix UV Unwrap')
rj.PushRenderTarget(self.oxWorldPosMapUV)
rj.PushDepthStencil(self.oxDepth)
rj.Clear((0, 0, 0, 0), 1.0)
rj.SetStdRndStates(trinity.RM_FULLSCREEN)
vp = trinity.TriViewport()
vp.x = 0
vp.y = 0
vp.width = self.unwrapSize
vp.height = self.unwrapSize
rj.SetViewport(vp)
PD.SkinLightmapRenderer.AddCallback(self, FullOptixRenderer.OnBeforeOptixPositionsUV, 'onBeforeOptixPositionsUV', rj)
rj.RenderScene(self.scene).name = 'Optix WorldPos (UV space)'
PD.SkinLightmapRenderer.AddCallback(self, lambda weakSelf: PD.SkinLightmapRenderer.DoChangeEffect('oxNormalWorldUVEffect', meshes=weakSelf.oxMeshes), '', rj)
rj.SetRenderTarget(self.oxWorldNormalMapUV)
rj.Clear((0, 0, 0, 0), 1.0)
rj.RenderScene(self.scene).name = 'Optix Normals (UV space)'
rj.SetRenderTarget(self.stretchMap)
rj.Clear((0, 0, 0, 0), 1.0)
vp2 = trinity.TriViewport()
vp2.x = 0
vp2.y = 0
vp2.width = self.unwrapSize / 2
vp2.height = self.unwrapSize / 2
rj.SetViewport(vp2)
PD.SkinLightmapRenderer.AddCallback(self, lambda weakSelf: PD.SkinLightmapRenderer.DoChangeEffect('stretchmapRenderEffect', meshes=weakSelf.oxMeshes), '', rj)
rj.RenderScene(self.scene).name = 'Stretchmap'
PD.SkinLightmapRenderer.AddCallback(self, FullOptixRenderer.OnAfterOptix, 'onAfterOptix', rj)
rj.PopRenderTarget()
rj.PopDepthStencil()
rj.ScheduleOnce()
rj.WaitForFinish()
if False:
PD.SkinLightmapRenderer.SaveTarget(self.oxWorldPosMapUV, 'c:/depot/oxworldposuv2.dds', isRT=True)
PD.SkinLightmapRenderer.SaveTarget(self.oxWorldNormalMapUV, 'c:/depot/oxworldnormaluv2.dds', isRT=True)
PD.SkinLightmapRenderer.SaveTarget(self.stretchMap, 'c:/depot/stretchmap2.dds', isRT=True)
print '** MAPS SAVED **'
def RunSkinningAndTesselation(self):
print '*** Tesselation phase ***'
batchTypes = self.skinnedOptix[0]
optix = self.optix
ptx = {}
ptx[72] = self.path + 'eve_skinning_kernel72.ptx'
ptx[64] = self.path + 'eve_skinning_kernel64.ptx'
for bytes, ptxfile in ptx.iteritems():
LogInfo('Processing ', bytes, 'bytes/vertex')
skinningProgram = trinity.Tr2OptixProgram(ptxfile, 'kernel_no_tesselation')
skinningProgramTesselate = trinity.Tr2OptixProgram(ptxfile, 'kernel_tesselation')
optix.SetEntryPointCount(2)
optix.SetRayGenerationProgram(0, skinningProgram)
optix.SetRayGenerationProgram(1, skinningProgramTesselate)
for batchType in range(len(batchTypes)):
batches = batchTypes[batchType]
out = []
def needsTesselation(fx):
return'skinnedavatarhair_detailed.fx' in fx.effectFilePath.lower()
for batch in batches:
if 'furshell' in batch[1].effectFilePath.lower():
out.append(None)
continue
tesselate = needsTesselation(batch[1])
triangle_count = batch[6]
bytes_per_vertex = batch[8]
if bytes_per_vertex!= bytes:
out.append(None)
continue
vertex_buffer_output = trinity.Tr2OptixBuffer()
vertex_buffer_output.CreateUserData(bytes_per_vertex, triangle_count * 3 * 4 if tesselate else triangle_count * 3, trinity.OPTIX_BUFFER_INPUT_OUTPUT, True)
out.append(vertex_buffer_output)
for i, batch in enumerate(batches):
if 'furshell' in batch[1].effectFilePath.lower():
continue
triangle_count = batch[6]
tesselate = needsTesselation(batch[1])
bytes_per_vertex = batch[8]
if bytes_per_vertex!= bytes:
continue
if tesselate:
LogInfo('Tesselating geometry ', batch,'of type ', batchType)
else:
LogInfo('Skinning geometry ', batch,'of type ', batchType)
optix.SetBuffer('vertex_buffer', batch[2])
optix.SetBuffer('index_buffer', batch[3])
optix.SetBuffer('vertex_buffer_output', out[i])
optix.SetUInt('first_index_index', batch[5])
optix.SetBuffer('matrix_buffer', batch[7])
program = int(tesselate)
optix.Run(program, triangle_count, 1)
batch[0].SetBuffer('vertex_buffer', out[i])
if tesselate:
batch[0].SetPrimitiveCount(triangle_count * 4)
optix.SetRayGenerationProgram(0, self.raygen)
optix.SetRayGenerationProgram(1, self.raygen)
def RemoveBadGeometry(self, model):
self.haveBeard = False
self.beardFx = None
for mesh in model.visualModel.meshes:
for area in mesh.decalAreas:
if PD.IsBeard(area):
self.haveBeard = True
self.beardFx = area.effect
area.debugIsHidden = True
break
for mesh in model.visualModel.meshes:
for area in mesh.transparentAreas:
lname = area.name.lower()
if lname.startswith('eyeshadow_'):
mesh.transparentAreas.removeAt(-1)
break
if False:
for mesh in model.visualModel.meshes:
for area in mesh.opaqueAreas:
lname = area.name.lower()
if 'eye' not in lname or 'eyewet' in lname or 'eyelash' in lname:
mesh.opaqueAreas.removeAt(-1)
break
for area in mesh.transparentAreas:
lname = area.name.lower()
if 'eye' not in lname or 'eyewet' in lname or 'eyelash' in lname:
mesh.transparentAreas.removeAt(-1)
break
if False:
print 'raytracing', len(model.visualModel.meshes),'meshes'
for mesh in model.visualModel.meshes:
lname = mesh.name.lower()
if not lname.startswith('hair'):
print'removing', lname
mesh.opaqueAreas.removeAt(-1)
mesh.decalAreas.removeAt(-1)
mesh.transparentAreas.removeAt(-1)
elif False:
print'removing', lname
for a in mesh.opaqueAreas:
print 'opaque', a.name
for a in mesh.decalAreas:
print 'decal', a.name
for a in mesh.transparentAreas:
print 'transp', a.name
mesh.opaqueAreas.removeAt(-1)
mesh.decalAreas.removeAt(-1)
mesh.transparentAreas.removeAt(-1)
else:
print 'keeping', lname
def TransferBeardParameters(self, optix):
if self.haveBeard:
LogInfo('Beard found')
beardLength = self.settings['beardLength']
optix.SetFloat3('beardOptions', beardLength[0], beardLength[1], self.settings['beardGravity'])
floatMap = {'FurLength': 'beard_fur_length',
'UVScale': 'beard_uv_scale',
'AlphaMultiplier': 'beard_alpha_multiplier',
'CombStrength': 'beard_comb_strength',
'FurGrainRotation': 'beard_fur_grain_rotation',
'MirrorGrain': 'beard_mirror_grain',
'FurParallax': 'beard_fur_parallax'}
float3Map = {'gravityOffset': 'beard_gravity_offset',
'MaterialDiffuseColor': 'beard_diffuse_color'}
for param in self.beardFx.parameters:
optixName = floatMap.get(param.name, None)
if optixName is not None:
optix.SetFloat(optixName, param.value)
else:
optixName = float3Map.get(param.name, None)
if optixName is not None:
optix.SetFloat3(optixName, param.value[0], param.value[1], param.value[2])
def GenerateBeardGeometry(self, optix, path, any_hit_shadow):
if not self.haveBeard:
return None
LogInfo('generating beard splines')
SkinRaytracingTools.SetOptixMatrixFromTrinity(optix, 'clipToWorld')
beardProgram = trinity.Tr2OptixProgram(path + 'eve_beard_kernel.ptx', 'kernel')
curveOutputBuffer = trinity.Tr2OptixBuffer()
curveCount = 512
curveOutputBuffer.CreateUserData(80, curveCount * curveCount, trinity.OPTIX_BUFFER_INPUT_OUTPUT, True)
optix.SetBuffer('output', curveOutputBuffer)
rayTypeCount = optix.GetRayTypeCount()
optix.SetRayTypeCount(1)
optix.SetEntryPointCount(2)
optix.SetRayGenerationProgram(0, beardProgram)
optix.SetRayGenerationProgram(1, beardProgram)
optix.SetEntryPointCount(1)
LogInfo('beard: about to Run')
optix.Run(0, curveCount, curveCount)
LogInfo('beard: Run done')
optix.SetRayTypeCount(rayTypeCount)
hairGeometry = trinity.Tr2OptixGeometry()
hairGeometry.InitializeFromProgram(path + 'bezier_curves.ptx', 'intersect', 'bounds')
subdivideDepth = 2
hairGeometry.SetPrimitiveCount(curveCount * curveCount * (1 << subdivideDepth))
optix.SetUInt('presubdivide_depth', subdivideDepth)
optix.SetBuffer('curves', curveOutputBuffer)
LogInfo('beard: geometry setup done')
beardInstance = trinity.Tr2OptixGeometryInstance()
beardInstance.SetGeometry(hairGeometry)
closest_hit_BeardShader = trinity.Tr2OptixProgram(path + 'eve_beard_shader.ptx', 'closest_hit_BeardShader')
beardMaterial = trinity.Tr2OptixMaterial()
beardMaterial.SetClosestHit(0, closest_hit_BeardShader)
beardMaterial.SetAnyHit(1, any_hit_shadow)
beardInstance.SetMaterial(beardMaterial)
LogInfo('beard: geometry instance setup done')
return beardInstance
def _DoInit(self, scene = None):
model = None
if scene is None:
scene = PD.SkinLightmapRenderer.Scene()
self.scene = scene
self.previousVP = trinity.TriMatrix()
self.framecount = 1
self.useOIT = True
if scene is None:
LogWarn('No scene!')
return
for dynamic in scene.dynamics:
if dynamic.__typename__ == 'Tr2IntSkinnedObject':
model = dynamic
break
else:
LogWarn('No Tr2IntSkinnedObject found')
return
if model is None:
LogWarn('No Tr2IntSkinnedObject found')
return
self.skinnedObject = model
if self.skinnedObject.visualModel is None:
LogWarn('skinnedObject has no visualMeshes')
return
bg = trinity.renderContext.GetDefaultBackBuffer()
step = trinity.renderJobs.FindStepByName('SET_SWAPCHAIN_RT')
if step is not None:
bg = step.renderTarget
self.width = self.settings.get('outputWidth', bg.width)
self.height = self.settings.get('outputHeight', bg.height)
self.blitfx = FullScreenBlitter(self.width, self.height)
self.RemoveBadGeometry(model)
outputTexture = trinity.TriTextureRes(self.width, self.height, 1, trinity.PIXEL_FORMAT.R32G32B32A32_FLOAT)
self.outputTexture = outputTexture
self.capture = CaptureHelper(self.width, self.height)
self._InitUVUnwrap()
for steps in trinity.renderJobs.recurring:
if steps.name == 'FullOptixRenderer':
steps.UnscheduleRecurring()
start = time.clock()
optix = trinity.Tr2Optix()
self.optix = optix
optix.SetInteropDevice()
optix.SetRayTypeCount(4)
optix.SetEntryPointCount(1)
if False:
optix.EnableAllExceptions()
optix.SetPrintEnabled(True)
optix.SetPrintBufferSize(16384)
optix.SetUInt('radiance_ray_type', 0)
optix.SetUInt('shadow_ray_type', 1)
optix.SetUInt('translucency_ray_type', 2)
optix.SetUInt('translucency_ray_type', 3)
optix.SetFloat('scene_epsilon', 0.001)
optix.SetUInt('frameIteration', 0)
self.outputBuffer = trinity.Tr2OptixBuffer()
self.outputBuffer.CreateFloat4(self.width, self.height, trinity.OPTIX_BUFFER_INPUT_OUTPUT)
optix.SetBuffer('output_buffer', self.outputBuffer)
self.ApplySettings()
path = str(blue.paths.ResolvePath('res:/graphics/effect/optix/NCC/'))
self.path = path
LogInfo('Getting files from', path)
everything = []
any_hit_shadow = trinity.Tr2OptixProgram(path + 'eve_shadow.ptx', 'any_hit_shadow')
any_hit_shadow_blend = trinity.Tr2OptixProgram(path + 'eve_shadow.ptx', 'any_hit_shadow_blend')
shader_diffuse_only_feeler = trinity.Tr2OptixProgram(path + 'eve_bounce.ptx', 'closest_hit_DiffuseOnlyFeeler2')
any_hit_cutout = trinity.Tr2OptixProgram(path + 'eve_cutout.ptx', 'any_hit_CutoutMask')
any_hit_diffuse_feeler_blend = trinity.Tr2OptixProgram(path + 'eve_shadow.ptx', 'any_hit_diffuse_feeler_blend')
everything.append(any_hit_shadow)
everything.append(any_hit_shadow_blend)
everything.append(shader_diffuse_only_feeler)
everything.append(any_hit_cutout)
mainRay = 0
shadowRay = 1
bounceRay = 3
def MakeMaterialWithShader(shader):
material = trinity.Tr2OptixMaterial()
material.SetClosestHit(mainRay, shader)
material.SetAnyHit(shadowRay, any_hit_shadow)
material.SetClosestHit(bounceRay, shader_diffuse_only_feeler)
everything.append(material)
return (material, shader)
def MakeMaterial(ptxFile, shaderName):
shader = trinity.Tr2OptixProgram(path + ptxFile + '.ptx', shaderName)
everything.append(shader)
return MakeMaterialWithShader(shader)
def MakeDecal(material):
material.SetAnyHit(mainRay, any_hit_cutout)
material.SetAnyHit(shadowRay, any_hit_shadow_blend)
material.SetAnyHit(bounceRay, any_hit_cutout)
skin_single_material, skin_single_shade = MakeMaterial('eve_skin', 'closest_hit_ShadeSinglePassSkin_Single2')
skin_single_material_scatter = MakeMaterial('eve_skin', 'closest_hit_ShadeSinglePassSkin_Single_Scatter2')[0]
skin_single_material_decal = MakeMaterialWithShader(skin_single_shade)[0]
MakeDecal(skin_single_material_decal)
glasses_shade = trinity.Tr2OptixProgram(path + 'eve_glasses.ptx', 'glasses_shade')
glasses_shadow = trinity.Tr2OptixProgram(path + 'eve_glasses.ptx', 'glasses_shadow')
glass_material = trinity.Tr2OptixMaterial()
glass_material.SetAnyHit(mainRay, glasses_shade)
glass_material.SetAnyHit(shadowRay, glasses_shadow)
glass_material.SetClosestHit(bounceRay, shader_diffuse_only_feeler)
everything.append(glasses_shade)
everything.append(glasses_shadow)
vizNames = ['closest_hit_VizNormal',
'closest_hit_VizUV',
'closest_hit_VizConstantColor',
'closest_hit_VizDiffuse']
vizualizer, vizualizer_shade = MakeMaterial('eve_basic', vizNames[0])
vizualizer_decal = MakeMaterialWithShader(vizualizer_shade)[0]
MakeDecal(vizualizer_decal)
skin_double_material, skin_double_shade = MakeMaterial('eve_skin', 'closest_hit_ShadeSinglePassSkin_Double2')
skin_double_material_decal = MakeMaterialWithShader(skin_double_shade)[0]
MakeDecal(skin_double_material_decal)
skin_double_material_transparent = MakeMaterial('eve_skin', 'closest_hit_ShadeSinglePassSkin_Double2_Blend')[0]
skin_double_material_transparent.SetAnyHit(mainRay, any_hit_cutout)
skin_double_material_transparent.SetAnyHit(shadowRay, any_hit_shadow_blend)
skin_double_material_transparent.SetAnyHit(bounceRay, any_hit_cutout)
avatar_br
|
937fd6aa7bd21258bd6e0f592d94a966519ef885
|
Python
|
<|begin_of_text|>'''
# AWS::Chatbot Construct Library
AWS Chatbot is an AWS service that enables DevOps and software development teams to use Slack chat rooms to monitor and respond to operational events in their AWS Cloud. AWS Chatbot processes AWS service notifications from Amazon Simple Notification Service (Amazon SNS), and forwards them to Slack chat rooms so teams can analyze and act on them immediately, regardless of location.
This module is part of the [AWS Cloud Development Kit](https://github.com/aws/aws-cdk) project.
```python
import aws_cdk.aws_chatbot as chatbot
import aws_cdk.aws_sns as sns
import aws_cdk.aws_iam as iam
slack_channel = chatbot.SlackChannelConfiguration(self, "MySlackChannel",
slack_channel_configuration_name="YOUR_CHANNEL_NAME",
slack_workspace_id="YOUR_SLACK_WORKSPACE_ID",
slack_channel_id="YOUR_SLACK_CHANNEL_ID"
)
slack_channel.add_to_role_policy(iam.PolicyStatement(
effect=iam.Effect.ALLOW,
actions=["s3:GetObject"
],
resources=["arn:aws:s3:::abc/xyz/123.txt"]
))
slack_channel.add_notification_topic(sns.Topic(self, "MyTopic"))
```
## Log Group
Slack channel configuration automatically create a log group with the name `/aws/chatbot/<configuration-name>` in `us-east-1` upon first execution with
log data set to never expire.
The `logRetention` property can be used to set a different expiration period. A log group will be created if not already exists.
If the log group already exists, it's expiration will be configured to the value specified in this construct (never expire, by default).
By default, CDK uses the AWS SDK retry options when interacting with the log group. The `logRetentionRetryOptions` property
allows you to customize the maximum number of retries and base backoff duration.
*Note* that, if `logRetention` is set, a [CloudFormation custom
resource](https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cfn-customresource.html) is added
to the stack that pre-creates the log group as part of the stack deployment, if it already doesn't exist, and sets the
correct log retention period (never expire, by default).
'''
import abc
import builtins
import datetime
import enum
import typing
import jsii
import publication
import typing_extensions
from typeguard import check_type
from.._jsii import *
import constructs
from.. import (
CfnResource as _CfnResource_9df397a6,
Duration as _Duration_4839e8c3,
IInspectable as _IInspectable_c2943556,
IResolvable as _IResolvable_da3f097b,
IResource as _IResource_c80c4260,
Resource as _Resource_45bc6135,
TreeInspector as _TreeInspector_488e0dd5,
)
from..aws_cloudwatch import (
Metric as _Metric_e396a4dc,
MetricOptions as _MetricOptions_1788b62f,
Unit as _Unit_61bc6f70,
)
from..aws_codestarnotifications import (
INotificationRuleTarget as _INotificationRuleTarget_faa3b79b,
NotificationRuleTargetConfig as _NotificationRuleTargetConfig_ea27e095,
)
from..aws_iam import (
IGrantable as _IGrantable_71c4f5de,
IPrincipal as _IPrincipal_539bb2fd,
IRole as _IRole_235f5d8e,
PolicyStatement as _PolicyStatement_0fe33853,
)
from..aws_logs import (
LogRetentionRetryOptions as _LogRetentionRetryOptions_62d80a14,
RetentionDays as _RetentionDays_070f99f0,
)
from..aws_sns import ITopic as _ITopic_9eca4852
@jsii.implements(_IInspectable_c2943556)
class CfnSlackChannelConfiguration(
_CfnResource_9df397a6,
metaclass=jsii.JSIIMeta,
jsii_type="aws-cdk-lib.aws_chatbot.CfnSlackChannelConfiguration",
):
'''A CloudFormation ``AWS::Chatbot::SlackChannelConfiguration``.
The ``AWS::Chatbot::SlackChannelConfiguration`` resource configures a Slack channel to allow users to use AWS Chatbot with AWS CloudFormation templates.
This resource requires some setup to be done in the AWS Chatbot console. To provide the required Slack workspace ID, you must perform the initial authorization flow with Slack in the AWS Chatbot console, then copy and paste the workspace ID from the console. For more details, see steps 1-4 in `Setting Up AWS Chatbot with Slack <https://docs.aws.amazon.com/chatbot/latest/adminguide/setting-up.html#Setup_intro>`_ in the *AWS Chatbot User Guide*.
:cloudformationResource: AWS::Chatbot::SlackChannelConfiguration
:link: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-chatbot-slackchannelconfiguration.html
:exampleMetadata: fixture=_generated
Example::
# The code below shows an example of how to instantiate this type.
# The values are placeholders you should change.
from aws_cdk import aws_chatbot as chatbot
cfn_slack_channel_configuration = chatbot.CfnSlackChannelConfiguration(self, "MyCfnSlackChannelConfiguration",
configuration_name="configurationName",
iam_role_arn="iamRoleArn",
slack_channel_id="slackChannelId",
slack_workspace_id="slackWorkspaceId",
# the properties below are optional
guardrail_policies=["guardrailPolicies"],
logging_level="loggingLevel",
sns_topic_arns=["snsTopicArns"],
user_role_required=False
)
'''
def __init__(
self,
scope: constructs.Construct,
id: builtins.str,
*,
configuration_name: builtins.str,
iam_role_arn: builtins.str,
slack_channel_id: builtins.str,
slack_workspace_id: builtins.str,
guardrail_policies: typing.Optional[typing.Sequence[builtins.str]] = None,
logging_level: typing.Optional[builtins.str] = None,
sns_topic_arns: typing.Optional[typing.Sequence[builtins.str]] = None,
user_role_required: typing.Optional[typing.Union[builtins.bool, _IResolvable_da3f097b]] = None,
) -> None:
'''Create a new ``AWS::Chatbot::SlackChannelConfiguration``.
:param scope: - scope in which this resource is defined.
:param id: - scoped id of the resource.
:param configuration_name: The name of the configuration.
:param iam_role_arn: The ARN of the IAM role that defines the permissions for AWS Chatbot. This is a user-definworked role that AWS Chatbot will assume. This is not the service-linked role. For more information, see `IAM Policies for AWS Chatbot <https://docs.aws.amazon.com/chatbot/latest/adminguide/chatbot-iam-policies.html>`_.
:param slack_channel_id: The ID of the Slack channel. To get the ID, open Slack, right click on the channel name in the left pane, then choose Copy Link. The channel ID is the 9-character string at the end of the URL. For example, ``ABCBBLZZZ``.
:param slack_workspace_id: The ID of the Slack workspace authorized with AWS Chatbot. To get the workspace ID, you must perform the initial authorization flow with Slack in the AWS Chatbot console. Then you can copy and paste the workspace ID from the console. For more details, see steps 1-4 in `Setting Up AWS Chatbot with Slack <https://docs.aws.amazon.com/chatbot/latest/adminguide/setting-up.html#Setup_intro>`_ in the *AWS Chatbot User Guide*.
:param guardrail_policies: The list of IAM policy ARNs that are applied as channel guardrails. The AWS managed 'AdministratorAccess' policy is applied as a default if this is not set.
:param logging_level: Specifies the logging level for this configuration. This property affects the log entries pushed to Amazon CloudWatch Logs. Logging levels include ``ERROR``, ``INFO``, or ``NONE``.
:param sns_topic_arns: The ARNs of the SNS topics that deliver notifications to AWS Chatbot.
:param user_role_required: Enables use of a user role requirement in your chat configuration.
'''
if __debug__:
type_hints = typing.get_type_hints(CfnSlackChannelConfiguration.__init__)
check_type(argname="argument scope", value=scope, expected_type=type_hints["scope"])
check_type(argname="argument id", value=id, expected_type=type_hints["id"])
props = CfnSlackChannelConfigurationProps(
configuration_name=configuration_name,
iam_role_arn=iam_role_arn,
slack_channel_id=slack_channel_id,
slack_workspace_id=slack_workspace_id,
guardrail_policies=guardrail_policies,
logging_level=logging_level,
sns_topic_arns=sns_topic_arns,
user_role_required=user_role_required,
)
jsii.create(self.__class__, self, [scope, id, props])
@jsii.member(jsii_name="inspect")
def inspect(self, inspector: _TreeInspector_488e0dd5) -> None:
'''Examines the CloudFormation resource and discloses attributes.
:param inspector: - tree inspector to collect and process attributes.
'''
if __debug__:
type_hints = typing.get_type_hints(CfnSlackChannelConfiguration.inspect)
check_type(argname="argument inspector", value=inspector, expected_type=type_hints["inspector"])
return typing.cast(None, jsii.invoke(self, "inspect", [inspector]))
@jsii.member(jsii_name="renderProperties")
def _render_properties(
self,
props: typing.Mapping[builtins.str, typing.Any],
) -> typing.Mapping[builtins.str, typing.Any]:
'''
:param props: -
'''
if __debug__:
type_hints = typing.get_type_hints(CfnSlackChannelConfiguration._render_properties)
check_type(argname="argument props", value=props, expected_type=type_hints["props"])
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.invoke(self, "renderProperties", [props]))
@jsii.python.classproperty # type: ignore[misc]
@jsii.member(jsii_name="CFN_RESOURCE_TYPE_NAME")
def CFN_RESOURCE_TYPE_NAME(cls) -> builtins.str:
'''The CloudFormation resource type name for this resource class.'''
return typing.cast(builtins.str, jsii.sget(cls, "CFN_RESOURCE_TYPE_NAME"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrArn")
def attr_arn(self) -> builtins.str:
'''
:cloudformationAttribute: Arn
'''
return typing.cast(builtins.str, jsii.get(self, "attrArn"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="cfnProperties")
def _cfn_properties(self) -> typing.Mapping[builtins.str, typing.Any]:
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.get(self, "cfnProperties"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="configurationName")
def configuration_name(self) -> builtins.str:
'''The name of the configuration.
:link: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-chatbot-slackchannelconfiguration.html#cfn-chatbot-slackchannelconfiguration-configurationname
'''
return typing.cast(builtins.str, jsii.get(self, "configurationName"))
@configuration_name.setter
def configuration_name(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(getattr(CfnSlackChannelConfiguration, "configuration_name").fset)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "configurationName", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="iamRoleArn")
def iam_role_arn(self) -> builtins.str:
'''The ARN of the IAM role that defines the permissions for AWS Chatbot.
This is a user-definworked role that AWS Chatbot will assume. This is not the service-linked role. For more information, see `IAM Policies for AWS Chatbot <https://docs.aws.amazon.com/chatbot/latest/adminguide/chatbot-iam-policies.html>`_.
:link: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-chatbot-slackchannelconfiguration.html#cfn-chatbot-slackchannelconfiguration-iamrolearn
'''
return typing.cast(builtins.str, jsii.get(self, "iamRoleArn"))
@iam_role_arn.setter
def iam_role_arn(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(getattr(CfnSlackChannelConfiguration, "iam_role_arn").fset)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "iamRoleArn", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="slackChannelId")
def slack_channel_id(self) -> builtins.str:
'''The ID of the Slack channel.
To get the ID, open Slack, right click on the channel name in the left pane, then choose Copy Link. The channel ID is the 9-character string at the end of the URL. For example, ``ABCBBLZZZ``.
:link: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-chatbot-slackchannelconfiguration.html#cfn-chatbot-slackchannelconfiguration-slackchannelid
'''
return typing.cast(builtins.str, jsii.get(self, "slackChannelId"))
@slack_channel_id.setter
def slack_channel_id(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(getattr(CfnSlackChannelConfiguration, "slack_channel_id").fset)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "slackChannelId", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="slackWorkspaceId")
def slack_workspace_id(self) -> builtins.str:
'''The ID of the Slack workspace authorized with AWS Chatbot.
To get the workspace ID, you must perform the initial authorization flow with Slack in the AWS Chatbot console. Then you can copy and paste the workspace ID from the console. For more details, see steps 1-4 in `Setting Up AWS Chatbot with Slack <https://docs.aws.amazon.com/chatbot/latest/adminguide/setting-up.html#Setup_intro>`_ in the *AWS Chatbot User Guide*.
:link: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-chatbot-slackchannelconfiguration.html#cfn-chatbot-slackchannelconfiguration-slackworkspaceid
'''
return typing.cast(builtins.str, jsii.get(self, "slackWorkspaceId"))
@slack_workspace_id.setter
def slack_workspace_id(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(getattr(CfnSlackChannelConfiguration, "slack_workspace_id").fset)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "slackWorkspaceId", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="guardrailPolicies")
def guardrail_policies(self) -> typing.Optional[typing.List[builtins.str]]:
'''The list of IAM policy ARNs that are applied as channel guardrails.
The AWS managed 'AdministratorAccess' policy is applied as a default if this is not set.
:link: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-chatbot-slackchannelconfiguration.html#cfn-chatbot-slackchannelconfiguration-guardrailpolicies
'''
return typing.cast(typing.Optional[typing.List[builtins.str]], jsii.get(self, "guardrailPolicies"))
@guardrail_policies.setter
def guardrail_policies(
self,
value: typing.Optional[typing.List[builtins.str]],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(getattr(CfnSlackChannelConfiguration, "guardrail_policies").fset)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "guardrailPolicies", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="loggingLevel")
def logging_level(self) -> typing.Optional[builtins.str]:
'''Specifies the logging level for this configuration. This property affects the log entries pushed to Amazon CloudWatch Logs.
Logging levels include ``ERROR``, ``INFO``, or ``NONE``.
:link: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-chatbot-slackchannelconfiguration.html#cfn-chatbot-slackchannelconfiguration-logginglevel
'''
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "loggingLevel"))
@logging_level.setter
def logging_level(self, value: typing.Optional[builtins.str]) -> None:
if __debug__:
type_hints = typing.get_type_hints(getattr(CfnSlackChannelConfiguration, "logging_level").fset)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "loggingLevel", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="snsTopicArns")
def sns_topic_arns(self) -> typing.Optional[typing.List[builtins.str]]:
'''The ARNs of the SNS topics that deliver notifications to AWS Chatbot.
:link: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-chatbot-slackchannelconfiguration.html#cfn-chatbot-slackchannelconfiguration-snstopicarns
'''
return typing.cast(typing.Optional[typing.List[builtins.str]], jsii.get(self, "snsTopicArns"))
@sns_topic_arns.setter
def sns_topic_arns(self, value: typing.Optional[typing.List[builtins.str]]) -> None:
if __debug__:
type_hints = typing.get_type_hints(getattr(CfnSlackChannelConfiguration, "sns_topic_arns").fset)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "snsTopicArns", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="userRoleRequired")
def user_role_required(
self,
) -> typing.Optional[typing.Union[builtins.bool, _IResolvable_da3f097b]]:
'''Enables use of a user role requirement in your chat configuration.
:link: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-chatbot-slackchannelconfiguration.html#cfn-chatbot-slackchannelconfiguration-userrolerequired
'''
return typing.cast(typing.Optional[typing.Union[builtins.bool, _IResolvable_da3f097b]], jsii.get(self, "userRoleRequired"))
@user_role_required.setter
def user_role_required(
self,
value: typing.Optional[typing.Union[builtins.bool, _IResolvable_da3f097b]],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(getattr(CfnSlackChannelConfiguration, "user_role_required").fset)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "userRoleRequired", value)
@jsii.data_type(
jsii_type="aws-cdk-lib.aws_chatbot.CfnSlackChannelConfigurationProps",
jsii_struct_bases=[],
name_mapping={
"configuration_name": "configurationName",
"iam_role_arn": "iamRoleArn",
"slack_channel_id": "slackChannelId",
"slack_workspace_id": "slackWorkspaceId",
"guardrail_policies": "guardrailPolicies",
"logging_level": "loggingLevel",
"sns_topic_arns": "snsTopicArns",
"user_role_required": "userRoleRequired",
},
)
class CfnSlackChannelConfigurationProps:
def __init__(
self,
*,
configuration_name: builtins.str,
iam_role_arn: builtins.str,
slack_channel_id: builtins.str,
slack_workspace_id: builtins.str,
guardrail_policies: typing.Optional[typing.Sequence[builtins.str]] = None,
logging_level: typing.Optional[builtins.str] = None,
sns_topic_arns: typing.Optional[typing.Sequence[builtins.str]] = None,
user_role_required: typing.Optional[typing.Union[builtins.bool, _IResolvable_da3f097b]] = None,
) -> None:
'''Properties for defining a ``CfnSlackChannelConfiguration``.
:param configuration_name: The name of the configuration.
:param iam_role_arn: The ARN of the IAM role that defines the permissions for AWS Chatbot. This is a user-definworked role that AWS Chatbot will assume. This is not the service-linked role. For more information, see `IAM Policies for AWS Chatbot <https://docs.aws.amazon.com/chatbot/latest/adminguide/chatbot-iam-policies.html>`_.
:param slack_channel_id: The ID of the Slack channel. To get the ID, open Slack, right click on the channel name in the left pane, then choose Copy Link. The channel ID is the 9-character string at the end of the URL. For example, ``ABCBBLZZZ``.
:param slack_workspace_id: The ID of the Slack workspace authorized with AWS Chatbot. To get the workspace ID, you must perform the initial authorization flow with Slack in the AWS Chatbot console. Then you can copy and paste the workspace ID from the console. For more details, see steps 1-4 in `Setting Up AWS Chatbot with Slack <https://docs.aws.amazon.com/chatbot/latest/adminguide/setting-up.html#Setup_intro>`_ in the *AWS Chatbot User Guide*.
:param guardrail_policies: The list of IAM policy ARNs that are applied as channel guardrails. The AWS managed 'AdministratorAccess' policy is applied as a default if this is not set.
:param logging_level: Specifies the logging level for this configuration. This property affects the log entries pushed to Amazon CloudWatch Logs. Logging levels include ``ERROR``, ``INFO``, or ``NONE``.
:param sns_topic_arns: The ARNs of the SNS topics that deliver notifications to AWS Chatbot.
:param user_role_required: Enables use of a user role requirement in your chat configuration.
:link: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-chatbot-slackchannelconfiguration.html
:exampleMetadata: fixture=_generated
Example::
# The code below shows an example of how to instantiate this type.
# The values are placeholders you should change.
from aws_cdk import aws_chatbot as chatbot
cfn_slack_channel_configuration_props = chatbot.CfnSlackChannelConfigurationProps(
configuration_name="configurationName",
iam_role_arn="iamRoleArn",
slack_channel_id="slackChannelId",
slack_workspace_id="slackWorkspaceId",
# the properties below are optional
guardrail_policies=["guardrailPolicies"],
logging_level="loggingLevel",
sns_topic_arns=["snsTopicArns"],
user_role_required=False
)
'''
if __debug__:
type_hints = typing.get_type_hints(CfnSlackChannelConfigurationProps.__init__)
check_type(argname="argument configuration_name", value=configuration_name, expected_type=type_hints["configuration_name"])
check_type(argname="argument iam_role_arn", value=iam_role_arn, expected_type=type_hints["iam_role_arn"])
check_type(argname="argument slack_channel_id", value=slack_channel_id, expected_type=type_hints["slack_channel_id"])
check_type(argname="argument slack_workspace_id", value=slack_workspace_id, expected_type=type_hints["slack_workspace_id"])
check_type(argname="argument guardrail_policies", value=guardrail_policies, expected_type=type_hints["guardrail_policies"])
check_type(argname="argument logging_level", value=logging_level, expected_type=type_hints["logging_level"])
check_type(argname="argument sns_topic_arns", value=sns_topic_arns, expected_type=type_hints["sns_topic_arns"])
check_type(argname="argument user_role_required", value=user_role_required, expected_type=type_hints["user_role_required"])
self._values: typing.Dict[str, typing.Any] = {
"configuration_name": configuration_name,
"iam_role_arn": iam_role_arn,
"slack_channel_id": slack_channel_id,
"slack_workspace_id": slack_workspace_id,
}
if guardrail_policies is not None:
self._values["guardrail_policies"] = guardrail_policies
if logging_level is not None:
self._values["logging_level"] = logging_level
if sns_topic_arns is not None:
self._values["sns_topic_arns"] = sns_topic_arns
if user_role_required is not None:
self._values["user_role_required"] = user_role_required
@builtins.property
def configuration_name(self) -> builtins.str:
'''The name of the configuration.
:link: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-chatbot-slackchannelconfiguration.html#cfn-chatbot-slackchannelconfiguration-configurationname
'''
result = self._values.get("configuration_name")
assert result is not None, "Required property 'configuration_name' is missing"
return typing.cast(builtins.str, result)
@builtins.property
def iam_role_arn(self) -> builtins.str:
'''The ARN of the IAM role that defines the permissions for AWS Chatbot.
This is a user-definworked role that AWS Chatbot will assume. This is not the service-linked role. For more information, see `IAM Policies for AWS Chatbot <https://docs.aws.amazon.com/chatbot/latest/adminguide/chatbot-iam-policies.html>`_.
:link: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-chatbot-slackchannelconfiguration.html#cfn-chatbot-slackchannelconfiguration-iamrolearn
'''
result = self._values.get("iam_role_arn")
assert result is not None, "Required property 'iam_role_arn' is missing"
return typing.cast(builtins.str, result)
@builtins.property
def slack_channel_id(self) -> builtins.str:
'''The ID of the Slack channel.
To get the ID, open Slack, right click on the channel name in the left pane, then choose Copy Link. The channel ID is the 9-character string at the end of the URL. For example, ``ABCBBLZZZ``.
:link: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-chatbot-slackchannelconfiguration.html#cfn-chatbot-slackchannelconfiguration-slackchannelid
'''
result = self._values.get("slack_channel_id")
assert result is not None, "Required property'slack_channel_id' is missing"
return typing.cast(builtins.str, result)
@builtins.property
def slack_workspace_id(self) -> builtins.str:
'''The ID of the Slack workspace authorized with AWS Chatbot.
To get the workspace ID, you must perform the initial authorization flow with Slack in the AWS Chatbot console. Then you can copy and paste the workspace ID from the console. For more details, see steps 1-4 in `Setting Up AWS Chatbot with Slack <https://docs.aws.amazon.com/chatbot/latest/adminguide/setting-up.html#Setup_intro>`_ in the *AWS Chatbot User Guide*.
:link: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-chatbot-slackchannelconfiguration.html#cfn-chatbot-slackchannelconfiguration-slackworkspaceid
'''
result = self._values.get("slack_workspace_id")
assert result is not None, "Required property'slack_workspace_id' is missing"
return typing.cast(builtins.str, result)
@builtins.property
def guardrail_policies(self) -> typing.Optional[typing.List[builtins.str]]:
'''The list of IAM policy ARNs that are applied as channel guardrails.
The AWS managed 'AdministratorAccess' policy is applied as a default if this is not set.
:link: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-chatbot-slackchannelconfiguration.html#cfn-chatbot-slackchannelconfiguration-guardrailpolicies
'''
result = self._values.get("guardrail_policies")
return typing.cast(typing.Optional[typing.List[builtins.str]], result)
@builtins.property
def logging_level(self) -> typing.Optional[builtins.str]:
'''Specifies the logging level for this configuration. This property affects the log entries pushed to Amazon CloudWatch Logs.
Logging levels include ``ERROR``, ``INFO``, or ``NONE``.
:link: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-chatbot-slackchannelconfiguration.html#cfn-chatbot-slackchannelconfiguration-logginglevel
'''
result = self._values.get("logging_level")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def sns_topic_arns(self) -> typing.Optional[typing.List[builtins.str]]:
'''The ARNs of the SNS topics that deliver notifications to AWS Chatbot.
:link: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-chatbot-slackchannelconfiguration.html#cfn-chatbot-slackchannelconfiguration-snstopicarns
'''
result = self._values.get("sns_topic_arns")
return typing.cast(typing.Optional[typing.List[builtins.str]], result)
@builtins.property
def user_role_required(
self,
) -> typing.Optional[typing.Union[builtins.bool, _IResolvable_da3f097b]]:
'''Enables use of a user role requirement in your chat configuration.
:link: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-chatbot-slackchannelconfiguration.html#cfn-chatbot-slackchannelconfiguration-userrolerequired
'''
result = self._values.get("user_role_required")
return typing.cast(typing.Optional[typing.Union[builtins.bool, _IResolvable_da3f097b]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "CfnSlackChannelConfigurationProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.interface(jsii_type="aws-cdk-lib.aws_chatbot.ISlackChannelConfiguration")
class ISlackChannelConfiguration(
_IResource_c80c4260,
_IGrantable_71c4f5de,
_INotificationRuleTarget_faa3b79b,
typing_extensions.Protocol,
):
'''Represents a Slack channel configuration.'''
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="slackChannelConfigurationArn")
def slack_channel_configuration_arn(self) -> builtins.str:
'''The ARN of the Slack channel configuration In the form of arn:aws:chatbot:{region}:{account}:chat-configuration/slack-channel/{slackChannelName}.
:attribute: true
'''
...
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="slackChannelConfigurationName")
def slack_channel_configuration_name(self) -> builtins.str:
'''The name of Slack channel configuration.
:attribute: true
'''
...
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="role")
def role(self) -> typing.Optional[_IRole_235f5d8e]:
'''The permission role of Slack channel configuration.
:default: - A role will be created.
:attribute: true
'''
...
@jsii.member(jsii_name="addToRolePolicy")
def add_to_role_policy(self, statement: _PolicyStatement_0fe33853) -> None:
'''Adds a statement to the IAM role.
:param statement: -
'''
...
@jsii.member(jsii_name="metric")
def metric(
self,
metric_name: builtins.str,
*,
account: typing.Optional[builtins.str] = None,
color: typing.Optional[builtins.str] = None,
dimensions_map: typing.Optional[typing.Mapping[builtins.str, builtins.str]] = None,
label: typing.Optional[builtins.str] = None,
period: typing.Optional[_Duration_4839e8c3] = None,
region: typing.Optional[builtins.str] = None,
statistic: typing.Optional[builtins.str] = None,
unit: typing.Optional[_Unit_61bc6f70] = None,
) -> _Metric_e396a4dc:
'''Return the given named metric for this SlackChannelConfiguration.
:param metric_name: -
:param account: Account which this metric comes from. Default: - Deployment account.
:param color: The hex color code, prefixed with '#' (e.g. '#00ff00'), to use when this metric is rendered on a graph. The ``Color`` class has a set of standard colors that can be used here. Default: - Automatic color
:param dimensions_map: Dimensions of the metric. Default: - No dimensions.
:param label: Label for this metric when added to a Graph in a Dashboard. You can use `dynamic labels <https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/graph-dynamic-labels.html>`_ to show summary information about the entire displayed time series in the legend. For example, if you use:: [max: ${MAX}] MyMetric As the metric label, the maximum value in the visible range will be shown next to the time series name in the graph's legend. Default: - No label
:param period: The period over which the specified statistic is applied. Default: Duration.minutes(5)
:param region: Region which this metric comes from. Default: - Deployment region.
:param statistic: What function to use for aggregating. Can be one of the following: - "Minimum" | "min" - "Maximum" | "max" - "Average" | "avg" - "Sum" | "sum" - "SampleCount | "n" - "pNN.NN" Default: Average
:param unit: Unit used to filter the metric stream. Only refer to datums emitted to the metric stream with the given unit and ignore all others. Only useful when datums are being emitted to the same metric stream under different units. The default is to use all matric datums in the stream, regardless of unit, which is recommended in nearly all cases. CloudWatch does not honor this property for graphs. Default: - All metric datums in the given metric stream
'''
...
class _ISlackChannelConfigurationProxy(
jsii.proxy_for(_IResource_c80c4260), # type: ignore[misc]
jsii.proxy_for(_IGrantable_71c4f5de), # type: ignore[misc]
jsii.proxy_for(_INotificationRuleTarget_faa3b79b), # type: ignore[misc]
):
'''Represents a Slack channel configuration.'''
__jsii_type__: typing.ClassVar[str] = "aws-cdk-lib.aws_chatbot.ISlackChannelConfiguration"
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="slackChannelConfigurationArn")
def slack_channel_configuration_arn(self) -> builtins.str:
'''The ARN of the Slack channel configuration In the form of arn:aws:chatbot:{region}:{account}:chat-configuration/slack-channel/{slackChannelName}.
:attribute: true
'''
return typing.cast(builtins.str, jsii.get(self, "slackChannelConfigurationArn"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="slackChannelConfigurationName")
def slack_channel_configuration_name(self) -> builtins.str:
'''The name of Slack channel configuration.
:attribute: true
'''
return typing.cast(builtins.str, jsii.get(self, "slackChannelConfigurationName"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="role")
def role(self) -> typing.Optional[_IRole_235f5d8e]:
'''The permission role of Slack channel configuration.
:default: - A role will be created.
:attribute: true
'''
return typing.cast(typing.Optional[_IRole_235f5d8e], jsii.get(self, "role"))
@jsii.member(jsii_name="addToRolePolicy")
def add_to_role_policy(self, statement: _PolicyStatement_0fe33853) -> None:
'''Adds a statement to the IAM role.
:param statement: -
'''
if __debug__:
type_hints = typing.get_type_hints(ISlackChannelConfiguration.add_to_role_policy)
check_type(argname="argument statement", value=statement, expected_type=type_hints["statement"])
return typing.cast(None, jsii.invoke(self, "addToRolePolicy", [statement]))
@jsii.member(jsii_name="metric")
def metric(
self,
metric_name: builtins.str,
*,
account: typing.Optional[builtins.str] = None,
color: typing.Optional[builtins.str] = None,
dimensions_map: typing.Optional[typing.Mapping[builtins.str, builtins.str]] = None,
label: typing.Optional[builtins.str] = None,
period: typing.Optional[_Duration_4839e8c3] = None,
region: typing.Optional[builtins.str] = None,
statistic: typing.Optional[builtins.str] = None,
unit: typing.Optional[_Unit_61bc6f70] = None,
) -> _Metric_e396a4dc:
'''Return the given named metric for this SlackChannelConfiguration.
:param metric_name: -
:param account: Account which this metric comes from. Default: - Deployment account.
:param color: The hex color code, prefixed with '#' (e.g. '#00ff00'), to use when this metric is rendered on a graph. The ``Color`` class has a set of standard colors that can be used here. Default: - Automatic color
:param dimensions_map: Dimensions of the metric. Default: - No dimensions.
:param label: Label for this metric when added to a Graph in a Dashboard. You can use `dynamic labels <https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/graph-dynamic-labels.html>`_ to show summary information about the entire displayed time series in the legend. For example, if you use:: [max: ${MAX}] MyMetric As the metric label, the maximum value in the visible range will be shown next to the time series name in the graph's legend. Default: - No label
:param period: The period over which the specified statistic is applied. Default: Duration.minutes(5)
:param region: Region which this metric comes from. Default: - Deployment region.
:param statistic: What function to use for aggregating. Can be one of the following: - "Minimum" | "min" - "Maximum" | "max" - "Average" | "avg" - "Sum" | "sum" - "SampleCount | "n" - "pNN.NN" Default: Average
:param unit: Unit used to filter the metric stream. Only refer to datums emitted to the metric stream with the given unit and ignore all others. Only useful when datums are being emitted to the same metric stream under different units. The default is to use all matric datums in the stream, regardless of unit, which is recommended in nearly all cases. CloudWatch does not honor this property for graphs. Default: - All metric datums in the given metric stream
'''
if __debug__:
type_hints = typing.get_type_hints(ISlackChannelConfiguration.metric)
check_type(argname="argument metric_name", value=metric_name, expected_type=type_hints["metric_name"])
props = _MetricOptions_1788b62f(
account=account,
color=color,
dimensions_map=dimensions_map,
label=label,
period=period,
region=region,
statistic=statistic,
unit=unit,
)
return typing.cast(_Metric_e396a4dc, jsii.invoke(self, "metric", [metric_name, props]))
# Adding a "__jsii_proxy_class__(): typing.Type" function to the interface
typing.cast(typing.Any, ISlackChannelConfiguration).__jsii_proxy_class__ = lambda : _ISlackChannelConfigurationProxy
@jsii.enum(jsii_type="aws-cdk-lib.aws_chatbot.LoggingLevel")
class LoggingLevel(enum.Enum):
'''Logging levels include ERROR, INFO, or NONE.'''
ERROR = "ERROR"
'''ERROR.'''
INFO = "INFO"
'''INFO.'''
NONE = "NONE"
'''NONE.'''
@jsii.implements(ISlackChannelConfiguration)
class SlackChannelConfiguration(
_Resource_45bc6135,
metaclass=jsii.JSIIMeta,
jsii_type="aws-cdk-lib.aws_chatbot.SlackChannelConfiguration",
):
'''A new Slack channel configuration.
:exampleMetadata: infused
Example::
import aws_cdk.aws_chatbot as chatbot
# project: codebuild.Project
target = chatbot.SlackChannelConfiguration(self, "MySlackChannel",
slack_channel_configuration_name="YOUR_CHANNEL_NAME",
slack_workspace_id="YOUR_SLACK_WORKSPACE_ID",
slack_channel_id="YOUR_SLACK_CHANNEL_ID"
)
rule = project.notify_on_build_succeeded("NotifyOnBuildSucceeded", target)
'''
def __init__(
self,
scope: constructs.Construct,
id: builtins.str,
*,
slack_channel_configuration_name: builtins.str,
slack_channel_id: builtins.str,
slack_workspace_id: builtins.str,
logging_level: typing.Optional[LoggingLevel] = None,
log_retention: typing.Optional[_RetentionDays_070f99f0] = None,
log_retention_retry_options: typing.Optional[_LogRetentionRetryOptions_62d80a14] = None,
log_retention_role: typing.Optional[_IRole_235f5d8e] = None,
notification_topics: typing.Optional[typing.Sequence[_ITopic_9eca4852]] = None,
role: typing.Optional[_IRole_235f5d8e] = None,
) -> None:
'''
:param scope: -
:param id: -
:param slack_channel_configuration_name: The name of Slack channel configuration.
:param slack_channel_id: The ID of the Slack channel. To get the ID, open Slack, right click on the channel name in the left pane, then choose Copy Link. The channel ID is the 9-character string at the end of the URL. For example, ABCBBLZZZ.
:param slack_workspace_id: The ID of the Slack workspace authorized with AWS Chatbot. To get the workspace ID, you must perform the initial authorization flow with Slack in the AWS Chatbot console. Then you can copy and paste the workspace ID from the console. For more details, see steps 1-4 in Setting Up AWS Chatbot with Slack in the AWS Chatbot User Guide.
:param logging_level: Specifies the logging level for this configuration. This property affects the log entries pushed to Amazon CloudWatch Logs. Default: LoggingLevel.NONE
:param log_retention: The number of days log events are kept in CloudWatch Logs. When updating this property, unsetting it doesn't remove the log retention policy. To remove the retention policy, set the value to ``INFINITE``. Default: logs.RetentionDays.INFINITE
:param log_retention_retry_options: When log retention is specified, a custom resource attempts to create the CloudWatch log group. These options control the retry policy when interacting with CloudWatch APIs. Default: - Default AWS SDK retry options.
:param log_retention_role: The IAM role for the Lambda function associated with the custom resource that sets the retention policy. Default: - A new role is created.
:param notification_topics: The SNS topics that deliver notifications to AWS Chatbot. Default: None
:param role: The permission role of Slack channel configuration.
|
2ab303a2f36cdd64e2119856312dd5e38ee728d6
|
Python
|
<|begin_of_text|>#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import json
import uuid
import mock
import mox
import six
from heat.common import exception
from heat.common import template_format
from heat.engine import resource
from heat.engine import rsrc_defn
from heat.engine import scheduler
from heat.tests import common
from heat.tests import utils
from..resources import cloud_loadbalancer as lb # noqa
# The following fakes are for pyrax
cert = """\n-----BEGIN CERTIFICATE-----
MIIFBjCCAu4CCQDWdcR5LY/+/jANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQGEwJB
VTETMBEGA1UECAwKU29tZS1TdGF0ZTEhMB8GA1UECgwYSW50ZXJuZXQgV2lkZ2l0
cyBQdHkgTHRkMB4XDTE0MTAxNjE3MDYxNVoXDTE1MTAxNjE3MDYxNVowRTELMAkG
A1UEBhMCQVUxEzARBgNVBAgMClNvbWUtU3RhdGUxITAfBgNVBAoMGEludGVybmV0
IFdpZGdpdHMgUHR5IEx0ZDCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB
AMm5NcP0tMKHblT6Ud1k8TxZ9/8uOHwUNPbvFsvSyCupj0J0vGCTjbuC2I5T/CXR
tnLEIt/EarlNAqcjbDCWtSyEKs3zDmmkreoIDEa8pyAQ2ycsCXGMxDN97F3/wlLZ
agUNM0FwGHLZWBg62bM6l+bpTUcX0PqSyv/aVMhJ8EPDX0Dx1RYsVwUzIe/HWC7x
vCmtDApAp1Fwq7AwlRaKU17sGwPWJ8+I8PyouBdqNuslHm7LQ0XvBA5DfkQA6feB
ZeJIyOtctM9WFWQI5fKOsyt5P306B3Zztw9VZLAmZ8qHex+R1WY1zXxDAwKEQz/X
8bRqMA/VU8OxJcK0AmY/1v/TFmAlRh2XBCIc+5UGtCcftWvZJAsKur8Hg5pPluGv
ptyqSgSsSKtOVWkyTANP1LyOkpBA8Kmkeo2CKXu1SCFypY5Q6E+Fy8Y8RaHJPvzR
NHcm1tkBvHOKyRso6FjvxuJEyIC9EyUK010nwQm7Qui11VgCSHBoaKVvkIbFfQdK
aCes0oQO5dqY0+fC/IFDhrxlvSd2Wk7KjuNjNu9kVN9Ama2pRTxhYKaN+GsHfoL7
ra6G9HjbUVULAdjCko3zOKEUzFLLf1VZYk7hDhyv9kovk0b8sr5WowxW7+9Wy0NK
WL5f2QgVCcoHw9bGhyuYQCdBfztNmKOWe9pGj6bQAx4pAgMBAAEwDQYJKoZIhvcN
AQEFBQADggIBALFSj3G2TEL/UWtNcPeY2fbxSGBrboFx3ur8+zTkdZzvfC8H9/UK
w0aRH0rK4+lKYDqF6A9bUHP17DaJm1lF9In38VVMOuur0ehUIn1S2U3OvlDLN68S
p5D4wGKMcUfUQ6pzhSKJCMvGX561TKHCc5fZhPruy75Xq2DcwJENE189foKLFvJs
ca4sIARqP6v1vfARcfH5leSsdIq8hy6VfL0BRATXfNHZh4SNbyDJYYTxrEUPHYXW
pzW6TziZXYNMG2ZRdHF/mDJuFzw2EklOrPC9MySCZv2i9swnqyuwNYh/SAMhodTv
ZDGy4nbjWNe5BflTMBceh45VpyTcnQulFhZQFwP79fK10BoDrOc1mEefhIqT+fPI
LJepLOf7CSXtYBcWbmMCLHNh+PrlCiA1QMTyd/AC1vvoiyCbs3M419XbXcBSDEh8
tACplmhf6z1vDkElWiDr8y0kujJ/Gie24iLTun6oHG+f+o6bbQ9w196T0olLcGx0
oAYL0Olqli6cWHhraVAzZ5t5PH4X9TiESuQ+PMjqGImCIUscXY4objdnB5dfPHoz
eF5whPl36/GK8HUixCibkCyqEOBBuNqhOz7nVLM0eg5L+TE5coizEBagxVCovYSj
fQ9zkIgaC5oeH6L0C1FFG1vRNSWokheBk14ztVoJCJyFr6p0/6pD7SeR
-----END CERTIFICATE-----\n"""
private_key = """\n-----BEGIN PRIVATE KEY-----
MIIJRAIBADANBgkqhkiG9w0BAQEFAASCCS4wggkqAgEAAoICAQDJuTXD9LTCh25U
+lHdZPE8Wff/Ljh8FDT27xbL0sgrqY9CdLxgk427gtiOU/wl0bZyxCLfxGq5TQKn
I2wwlrUshCrN8w5ppK3qCAxGvKcgENsnLAlxjMQzfexd/8JS2WoFDTNBcBhy2VgY
OtmzOpfm6U1HF9D6ksr/2lTISfBDw19A8dUWLFcFMyHvx1gu8bwprQwKQKdRcKuw
MJUWilNe7BsD1ifPiPD8qLgXajbrJR5uy0NF7wQOQ35EAOn3gWXiSMjrXLTPVhVk
COXyjrMreT99Ogd2c7cPVWSwJmfKh3sfkdVmNc18QwMChEM/1/G0ajAP1VPDsSXC
tAJmP9b/0xZgJUYdlwQiHPuVBrQnH7Vr2SQLCrq/B4OaT5bhr6bcqkoErEirTlVp
MkwDT9S8jpKQQPCppHqNgil7tUghcqWOUOhPhcvGPEWhyT780TR3JtbZAbxziskb
KOhY78biRMiAvRMlCtNdJ8EJu0LotdVYAkhwaGilb5CGxX0HSmgnrNKEDuXamNPn
wvyBQ4a8Zb0ndlpOyo7jYzbvZFTfQJmtqUU8YWCmjfhrB36C+62uhvR421FVCwHY
wpKN8zihFMxSy39VWWJO4Q4cr/ZKL5NG/LK+VqMMVu/vVstDSli+X9kIFQnKB8PW
xocrmEAnQX87TZijlnvaRo+m0AMeKQIDAQABAoICAA8DuBrDxgiMqAuvLhS6hLIn
SCw4NoAVyPNwTFQTdk65qi4aHkNZ+DyyuoetfKEcAOZ97tKU/hSYxM/H9S+QqB+O
HtmBc9stJLy8qJ1DQXVDi+xYfMN05M2oW8WLWd1szVVe7Ce8vjUeNE5pYvbSL6hC
STw3a5ibAH0WtSTLTBTfH+HnniKuXjPG4InGXqvv1j+L38+LjGilaEIO+6nX1ejE
ziX09LWfzcAglsM3ZqsN8jvw6Sr1ZWniYC2Tm9aOTRUQsdPC7LpZ//GYL/Vj5bYg
qjcZ8KBCcKe1hW8PDL6oYuOwqR+YdZkAK+MuEQtZeWYiWT10dW2la9gYKe2OZuQ1
7q3zZ6zLP+XP+0N7DRMTTuk2gurBVX7VldzIzvjmW8X+8Q5QO+EAqKr2yordK3S1
uYcKmyL4Nd6rSFjRo0zSqHMNOyKt3b1r3m/eR2W623rT5uTjgNYpiwCNxnxmcjpK
Sq7JzZKz9NLbEKQWsP9gQ3G6pp3XfLtoOHEDkSKMmQxd8mzK6Ja/9iC+JGqRTJN+
STe1vL9L2DC7GnjOH1h2TwLoLtQWSGebf/GBxju0e5pAL0UYWBNjAwcpOoRU9J5J
y9E7sNbbXTmK2rg3B/5VKGQckBWfurg7CjAmHGgz9xxceJQLKvT1O5zHZc+v4TVB
XDZjtz8L2k3wFLDynDY5AoIBAQDm2fFgx4vk+gRFXPoLNN34Jw2fT+xuwD/H7K0e
0Cas0NfyNil/Kbp+rhMHuVXTt86BIY+z8GO4wwn+YdDgihBwobAh2G9T/P6wNm+Q
NcIeRioml8V/CP7lOQONQJ6sLTRYnNLfB96uMFe+13DO/PjFybee5VflfBUrJK1M
DqRLwm9wEIf5p0CWYI/ZJaDNN71B09BB/jdT/e7Ro1hXHlq3W4tKqRDPfuUqwy3H
ocYQ1SUk3oFdSiYFd6PijNkfTnrtyToa0xUL9uGL+De1LfgV+uvqkOduQqnpm/5+
XQC1qbTUjq+4WEsuPjYf2E0WAVFGzwzWcdb0LnMIUJHwPvpLAoIBAQDfsvCZlcFM
nGBk1zUnV3+21CPK+5+X3zLHr/4otQHlGMFL6ZiQManvKMX6a/cT3rG+LvECcXGD
jSsTu7JIt9l8VTpbPaS76htTmQYaAZERitBx1C8zDMuI2O4bjFLUGUX73RyTZdRm
G68IX+7Q7SL8zr/fHjcnk+3yj0L1soAVPC7lY3se7vQ/SCre97E+noP5yOhrpnRt
dij7NYy79xcvUZfc/z0//Ia4JSCcIvv2HO7JZIPzUCVO4sjbUOGsgR9pwwQkwYeP
b5P0MVaPgFnOgo/rz6Uqe+LpeY83SUwc2q8W8bskzTLZEnwSV5bxCY+gIn9KCZSG
8QxuftgIiQDbAoIBAQDQ2oTC5kXulzOd/YxK7z2S8OImLAzf9ha+LaZCplcXKqr0
e4P3hC0xxxN4fXjk3vp5YX+9b9MIqYw1FRIA02gkPmQ3erTd65oQmm88rSY+dYRU
/iKz19OkVnycIsZrR0qAkQFGvrv8I8h+5DMvUTdQ2jrCCwQGnsgYDEqs8OI7mGFx
pcMfXu3UHvCFqMFeaPtUvuk/i1tLJgYWrA2UY+X21V+j4GlREKEMmyCj5/xl5jCA
tr2bRSY49BDVOlCFPl+BGfjzo9z6whU0qRDdXgWA/U7LHOYEn1NSAsuwTzwBHtR3
KdBYm6kI4Ufeb7buHasGwPQAX2X17MAt2ZbvIEsZAoIBAQC4g5dzh5PGhmH4K48b
YU/l1TukzUIJekAfd+ozV4I1nuKppAeEQILD0yTh9zX4vMJtdbiz5DDWapWylCpt
UsBgjsgwxDriCSr7HIhs4QfwqUhf67325MHpoc1dCbS0YBhatDpC1kaI5qLMTJzm
1gL69epLtleWHK2zWjnIAbEmUtr3uMOwczciD3vVKAeZ+BQx72bOjKESPNl2w+fO
jvQfwrR5xEqYQco5j95DC5Q6oAjSM0enZV8wn10/kYpjyKnJieMcEkmnpUgrrpqQ
iTUKYqUlw8OftEopfGwGFT5junmbek57/4nGhTmzw22sac9/LZVC034ghClV5uh4
udDrAoIBAQCJHfBPJmJMT/WtSATTceVDgZiyezWNgH2yLJMqDP6sEuImnLAg2L9M
Yc6LqMcHLj7CyXfy2AEAuYTZwXFSRmVKl6Ycad7sS/hIL1ykvDveRU9VNImexDBq
AJR4GKr6jbRZnBztnRYZTsGA+TcrFc6SwdSPXgz7JQT9uw+JkhLi59m141XBdeRc
NQ/LFgOaxjvRUID81izQaYEyADId7asy+2QVazMDafuALJ23WSUMSXajCXaC6/7N
53RWrOAb+kFRgjuHM8pQkpgnY/Ds0MZxpakFw3Y7PAEL99xyYdR+rE3JOMjPlgr0
LpTt0Xs1OFZxaNpolW5Qis4os7UmmIRV
-----END PRIVATE KEY-----\n"""
class FakeException(Exception):
pass
class FakeClient(object):
user_agent = "Fake"
USER_AGENT = "Fake"
class FakeManager(object):
api = FakeClient()
def list(self):
pass
def get(self, item):
pass
def delete(self, item):
pass
def create(self, *args, **kwargs):
pass
def find(self, *args, **kwargs):
pass
def action(self, item, action_type, body=None):
pass
class FakeLoadBalancerManager(object):
def __init__(self, api=None, *args, **kwargs):
pass
def set_content_caching(self, *args, **kwargs):
pass
class FakeNode(object):
def __init__(self, address=None, port=None, condition=None, weight=None,
status=None, parent=None, type=None, id=None):
if not (address and port):
# This mimics the check that pyrax does on Node instantiation
raise TypeError("You must include an address and "
"a port when creating a node.")
self.address = address
self.port = port
self.condition = condition
self.weight = weight
self.status = status
self.parent = parent
self.type = type
self.id = id
def __eq__(self, other):
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self.__eq__(other)
def update(self):
pass
def delete(self):
pass
class FakeVirtualIP(object):
def __init__(self, address=None, port=None, condition=None,
ipVersion=None, type=None, id=None):
self.address = address
self.port = port
self.condition = condition
self.ipVersion = ipVersion
self.type = type
self.id = id
self.ip_version = ipVersion
def __eq__(self, other):
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self.__eq__(other)
class FakeLoadBalancerClient(object):
def __init__(self, *args, **kwargs):
self.Node = FakeNode
self.VirtualIP = FakeVirtualIP
pass
def get(self, *args, **kwargs):
pass
def create(self, *args, **kwargs):
pass
class FakeLoadBalancer(object):
def __init__(self, name=None, info=None, *args, **kwargs):
name = name or uuid.uuid4()
info = info or {"fake": "fake"}
self.id = uuid.uuid4()
self.manager = FakeLoadBalancerManager()
self.Node = FakeNode
self.VirtualIP = FakeVirtualIP
self.nodes = []
self.algorithm = "ROUND_ROBIN"
self.session_persistence = "HTTP_COOKIE"
self.connection_logging = False
self.timeout = None
self.httpsRedirect = False
self.protocol = None
self.port = None
self.name = None
self.halfClosed = None
self.content_caching = False
def get(self, *args, **kwargs):
pass
def add_nodes(self, *args, **kwargs):
pass
def add_ssl_termination(self, *args, **kwargs):
pass
def set_error_page(self, *args, **kwargs):
pass
def clear_error_page(self, *args, **kwargs):
pass
def add_access_list(self, *args, **kwargs):
pass
def update(self, *args, **kwargs):
pass
def add_health_monitor(self, *args, **kwargs):
pass
def delete_health_monitor(self, *args, **kwargs):
pass
def delete_ssl_termination(self, *args, **kwargs):
pass
def set_metadata(self, *args, **kwargs):
pass
def delete_metadata(self, *args, **kwargs):
pass
def add_connection_throttle(self, *args, **kwargs):
pass
def delete_connection_throttle(self, *args, **kwargs):
pass
def delete(self, *args, **kwargs):
pass
def get_health_monitor(self, *args, **kwargs):
return {}
def get_metadata(self, *args, **kwargs):
return {}
def get_error_page(self, *args, **kwargs):
pass
def get_connection_throttle(self, *args, **kwargs):
pass
def get_ssl_termination(self, *args, **kwargs):
pass
def get_access_list(self, *args, **kwargs):
pass
class LoadBalancerWithFakeClient(lb.CloudLoadBalancer):
def cloud_lb(self):
return FakeLoadBalancerClient()
def override_resource():
return {
'Rackspace::Cloud::LoadBalancer': LoadBalancerWithFakeClient
}
class LoadBalancerTest(common.HeatTestCase):
def setUp(self):
super(LoadBalancerTest, self).setUp()
self.lb_template = {
"AWSTemplateFormatVersion": "2010-09-09",
"Description": "fawef",
"Resources": {
self._get_lb_resource_name(): {
"Type": "Rackspace::Cloud::LoadBalancer",
"Properties": {
"name": "test-clb",
"nodes": [{"addresses": ["166.78.103.141"],
"port": 80,
"condition": "ENABLED"}],
"protocol": "HTTP",
"port": 80,
"virtualIps": [
{"type": "PUBLIC", "ipVersion": "IPV6"}],
"algorithm": 'LEAST_CONNECTIONS',
"connectionThrottle": {'maxConnectionRate': 1000},
'timeout': 110,
'contentCaching': 'DISABLED'
}
}
}
}
self.lb_name = 'test-clb'
self.expected_body = {
"nodes": [FakeNode(address=u"166.78.103.141", port=80,
condition=u"ENABLED", type=u"PRIMARY",
weight=1)],
"protocol": u'HTTP',
"port": 80,
"virtual_ips": [FakeVirtualIP(type=u"PUBLIC", ipVersion=u"IPV6")],
"algorithm": u'LEAST_CONNECTIONS',
"connectionThrottle": {'maxConnectionRate': 1000,
'maxConnections': None,
'rateInterval': None,
'minConnections': None},
"connectionLogging": None,
"halfClosed": None,
"healthMonitor": None,
"metadata": None,
"sessionPersistence": None,
"timeout": 110,
"httpsRedirect": False
}
lb.resource_mapping = override_resource
resource._register_class("Rackspace::Cloud::LoadBalancer",
LoadBalancerWithFakeClient)
def _get_lb_resource_name(self):
return "lb-" + str(uuid.uuid4())
def __getattribute__(self, name):
if name == 'expected_body' or name == 'lb_template':
return copy.deepcopy(super(LoadBalancerTest, self)
.__getattribute__(name))
return super(LoadBalancerTest, self).__getattribute__(name)
def _mock_create(self, tmpl, stack, resource_name, lb_name, lb_body):
resource_defns = tmpl.resource_definitions(stack)
rsrc = LoadBalancerWithFakeClient(resource_name,
resource_defns[resource_name],
stack)
fake_lb = FakeLoadBalancer(name=lb_name)
fake_lb.status = 'ACTIVE'
fake_lb.resource_id = 1234
self.m.StubOutWithMock(rsrc.clb, 'create')
rsrc.clb.create(lb_name, **lb_body).AndReturn(fake_lb)
self.m.StubOutWithMock(rsrc.clb, 'get')
rsrc.clb.get(mox.IgnoreArg()).MultipleTimes().AndReturn(
fake_lb)
return (rsrc, fake_lb)
def _get_first_resource_name(self, templ):
return next(k for k in templ['Resources'])
def _mock_loadbalancer(self, lb_template, expected_name, expected_body):
t = template_format.parse(json.dumps(lb_template))
self.stack = utils.parse_stack(t, stack_name=utils.random_name())
rsrc, fake_lb = self._mock_create(self.stack.t, self.stack,
self.
_get_first_resource_name(
lb_template),
expected_name,
expected_body)
return (rsrc, fake_lb)
def _set_template(self, templ, **kwargs):
for k, v in six.iteritems(kwargs):
templ['Resources'][self._get_first_resource_name(templ)][
'Properties'][k] = v
return templ
def _set_expected(self, expected, **kwargs):
for k, v in six.iteritems(kwargs):
expected[k] = v
return expected
def test_process_node(self):
nodes = [{'addresses': ['1234'], 'port': 80, 'enabled': True},
{'addresses': ['4567', '8901', '8903'], 'port': 80,
'enabled': True},
{'addresses': [], 'port': 80, 'enabled': True}]
rsrc, fake_lb = self._mock_loadbalancer(self.lb_template,
self.lb_name,
self.expected_body)
expected_nodes = [{'address': '1234', 'port': 80, 'enabled': True},
{'address': '4567', 'port': 80, 'enabled': True},
{'address': '8901', 'port': 80, 'enabled': True},
{'address': '8903', 'port': 80, 'enabled': True}]
self.assertEqual(expected_nodes, list(rsrc._process_nodes(nodes)))
def test_nodeless(self):
"""It's possible to create a LoadBalancer resource with no nodes."""
template = self._set_template(self.lb_template,
nodes=[])
expected_body = copy.deepcopy(self.expected_body)
expected_body['nodes'] = []
rsrc, fake_lb = self._mock_loadbalancer(
template, self.lb_name, expected_body)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
self.m.VerifyAll()
def test_alter_properties(self):
# test alter properties functions
template = self._set_template(self.lb_template,
sessionPersistence='HTTP_COOKIE',
connectionLogging=True,
metadata={'yolo': 'heeyyy_gurl'})
expected = self._set_expected(self.expected_body,
sessionPersistence={
'persistenceType': 'HTTP_COOKIE'},
connectionLogging={'enabled': True},
metadata=[
{'key': 'yolo',
'value': 'heeyyy_gurl'}])
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
expected)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
self.m.VerifyAll()
def test_validate_vip(self):
snippet = {
"nodes": [],
"protocol": 'HTTP',
"port": 80,
"halfClosed": None,
"algorithm": u'LEAST_CONNECTIONS',
"virtualIps": [{"id": "1234"}]
}
stack = mock.Mock()
stack.db_resource_get.return_value = None
stack.has_cache_data.return_value = False
# happy path
resdef = rsrc_defn.ResourceDefinition("testvip",
lb.CloudLoadBalancer,
properties=snippet)
rsrc = lb.CloudLoadBalancer("testvip", resdef, stack)
self.assertIsNone(rsrc.validate())
# make sure the vip id prop is exclusive
snippet["virtualIps"][0]["type"] = "PUBLIC"
exc = self.assertRaises(exception.StackValidationFailed,
rsrc.validate)
self.assertIn("Cannot specify type or version", str(exc))
# make sure you have to specify type and version if no id
snippet["virtualIps"] = [{}]
exc = self.assertRaises(exception.StackValidationFailed,
rsrc.validate)
self.assertIn("Must specify VIP type and version", str(exc))
def test_validate_half_closed(self):
# test failure (invalid protocol)
template = self._set_template(self.lb_template, halfClosed=True)
expected = self._set_expected(self.expected_body, halfClosed=True)
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
expected)
exc = self.assertRaises(exception.StackValidationFailed,
rsrc.validate)
self.assertIn('The halfClosed property is only available for the TCP'
'or TCP_CLIENT_FIRST protocols', str(exc))
# test TCP protocol
template = self._set_template(template, protocol='TCP')
expected = self._set_expected(expected, protocol='TCP')
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
expected)
self.assertIsNone(rsrc.validate())
# test TCP_CLIENT_FIRST protocol
template = self._set_template(template,
protocol='TCP_CLIENT_FIRST')
expected = self._set_expected(expected,
protocol='TCP_CLIENT_FIRST')
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
expected)
self.assertIsNone(rsrc.validate())
def test_validate_health_monitor(self):
# test connect success
health_monitor = {
'type': 'CONNECT',
'attemptsBeforeDeactivation': 1,
'delay': 1,
'timeout': 1
}
template = self._set_template(self.lb_template,
healthMonitor=health_monitor)
expected = self._set_expected(self.expected_body,
healthMonitor=health_monitor)
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
expected)
self.assertIsNone(rsrc.validate())
# test connect failure
# bodyRegex is only valid for type 'HTTP(S)'
health_monitor['bodyRegex'] = 'dfawefawe'
template = self._set_template(template,
healthMonitor=health_monitor)
expected = self._set_expected(expected,
healthMonitor=health_monitor)
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
expected)
exc = self.assertRaises(exception.StackValidationFailed,
rsrc.validate)
self.assertIn('Unknown Property bodyRegex', str(exc))
# test http fields
health_monitor['type'] = 'HTTP'
health_monitor['bodyRegex'] = 'bodyRegex'
health_monitor['statusRegex'] ='statusRegex'
health_monitor['hostHeader'] = 'hostHeader'
health_monitor['path'] = 'path'
template = self._set_template(template,
healthMonitor=health_monitor)
expected = self._set_expected(expected,
healthMonitor=health_monitor)
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
expected)
self.assertIsNone(rsrc.validate())
def test_validate_ssl_termination(self):
ssl_termination = {
'privatekey': 'ewfawe',
'intermediateCertificate': 'fwaefawe',
'secureTrafficOnly': True
}
# test ssl termination enabled without required fields failure
template = self._set_template(self.lb_template,
sslTermination=ssl_termination)
expected = self._set_expected(self.expected_body,
sslTermination=ssl_termination)
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
expected)
exc = self.assertRaises(exception.StackValidationFailed, rsrc.validate)
self.assertIn("Property certificate not assigned", six.text_type(exc))
ssl_termination['certificate'] = 'dfaewfwef'
template = self._set_template(template,
sslTermination=ssl_termination)
expected = self._set_expected(expected,
sslTermination=ssl_termination)
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
expected)
self.assertIsNone(rsrc.validate())
def test_ssl_termination_unstripped_certificates(self):
ssl_termination_template = {
'securePort': 443,
'privatekey': 'afwefawe',
'certificate':' \nfawefwea\n ',
'intermediateCertificate': "\n\nintermediate_certificate\n",
'secureTrafficOnly': False
}
ssl_termination_api = copy.deepcopy(ssl_termination_template)
template = self._set_template(self.lb_template,
sslTermination=ssl_termination_template)
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
self.expected_body)
self.m.StubOutWithMock(fake_lb, 'get_ssl_termination')
fake_lb.get_ssl_termination().AndReturn({})
fake_lb.get_ssl_termination().AndReturn({
'securePort': 443,
'certificate': 'fawefwea',
'intermediateCertificate': "intermediate_certificate",
'secureTrafficOnly': False,
'enabled': True,
})
self.m.StubOutWithMock(fake_lb, 'add_ssl_termination')
fake_lb.add_ssl_termination(**ssl_termination_api)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
self.m.VerifyAll()
def test_ssl_termination_intermediateCertificate_None(self):
ssl_termination_template = {
'securePort': 443,
'privatekey': 'afwefawe',
'certificate':' \nfawefwea\n ',
'intermediateCertificate': None,
'secureTrafficOnly': False
}
template = self._set_template(self.lb_template,
sslTermination=ssl_termination_template)
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
self.expected_body)
self.m.StubOutWithMock(fake_lb, 'get_ssl_termination')
fake_lb.get_ssl_termination().AndReturn({})
fake_lb.get_ssl_termination().AndReturn({
'securePort': 443,
'certificate': 'fawefwea',
'secureTrafficOnly': False,
'enabled': True,
})
self.m.StubOutWithMock(fake_lb, 'add_ssl_termination')
add_ssl_termination_args = {
'securePort': 443,
'privatekey': 'afwefawe',
'certificate':' \nfawefwea\n ',
'intermediateCertificate': '',
'secureTrafficOnly': False
}
fake_lb.add_ssl_termination(**add_ssl_termination_args)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
self.m.VerifyAll()
def test_post_creation_access_list(self):
access_list = [{"address": '192.168.1.1/0',
'type': 'ALLOW'},
{'address': '172.165.3.43',
'type': 'DENY'}]
api_access_list = [{"address": '192.168.1.1/0', 'id': 1234,
'type': 'ALLOW'},
{'address': '172.165.3.43', 'id': 3422,
'type': 'DENY'}]
template = self._set_template(self.lb_template,
accessList=access_list)
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
self.expected_body)
self.m.StubOutWithMock(fake_lb, 'get_access_list')
fake_lb.get_access_list().AndReturn([])
fake_lb.get_access_list().AndReturn(api_access_list)
self.m.StubOutWithMock(fake_lb, 'add_access_list')
fake_lb.add_access_list(access_list)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
self.m.VerifyAll()
def test_ref_id(self):
"""The Reference ID of the resource is the resource ID."""
template = self._set_template(self.lb_template)
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
self.expected_body)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
self.m.VerifyAll()
self.assertEqual(rsrc.resource_id, rsrc.FnGetRefId())
def test_post_creation_error_page(self):
error_page = "REALLY BIG ERROR"
template = self._set_template(self.lb_template,
errorPage=error_page)
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
self.expected_body)
self.m.StubOutWithMock(fake_lb, 'get_error_page')
fake_lb.get_error_page().AndReturn({u'errorpage': {u'content': u''}})
fake_lb.get_error_page().AndReturn(
{u'errorpage': {u'content': error_page}})
self.m.StubOutWithMock(fake_lb,'set_error_page')
fake_lb.set_error_page(error_page)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
self.m.VerifyAll()
def test_post_creation_ssl_termination(self):
ssl_termination_template = {
'securePort': 443,
'privatekey': 'afwefawe',
'certificate': 'fawefwea',
'intermediateCertificate': "intermediate_certificate",
'secureTrafficOnly': False
}
ssl_termination_api = copy.deepcopy(ssl_termination_template)
template = self._set_template(self.lb_template,
sslTermination=ssl_termination_template)
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
self.expected_body)
self.m.StubOutWithMock(fake_lb, 'get_ssl_termination')
fake_lb.get_ssl_termination().AndReturn({})
fake_lb.get_ssl_termination().AndReturn({
'securePort': 443,
'certificate': 'fawefwea',
'intermediateCertificate': "intermediate_certificate",
'secureTrafficOnly': False,
'enabled': True,
})
self.m.StubOutWithMock(fake_lb, 'add_ssl_termination')
fake_lb.add_ssl_termination(**ssl_termination_api)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
self.m.VerifyAll()
def test_post_creation_content_caching(self):
template = self._set_template(self.lb_template,
contentCaching='ENABLED')
rsrc = self._mock_loadbalancer(template, self.lb_name,
self.expected_body)[0]
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
self.m.VerifyAll()
def test_check(self):
stack = mock.Mock()
stack.db_resource_get.return_value = None
stack.has_cache_data.return_value = False
resdef = mock.Mock(spec=rsrc_defn.ResourceDefinition)
loadbalancer = lb.CloudLoadBalancer("test", resdef, stack)
loadbalancer._add_event = mock.Mock()
mock_cloud_lb = mock.Mock()
mock_get = mock.Mock(return_value=mock_cloud_lb)
loadbalancer.clb.get = mock_get
mock_cloud_lb.status = 'ACTIVE'
scheduler.TaskRunner(loadbalancer.check)()
self.assertEqual('CHECK', loadbalancer.action)
self.assertEqual('COMPLETE', loadbalancer.status)
mock_cloud_lb.status = 'FOOBAR'
exc = self.assertRaises(exception.ResourceFailure,
scheduler.TaskRunner(loadbalancer.check))
self.assertEqual('CHECK', loadbalancer.action)
self.assertEqual('FAILED', loadbalancer.status)
self.assertIn('FOOBAR', str(exc))
mock_get.side_effect = lb.NotFound('boom')
exc = self.assertRaises(exception.ResourceFailure,
scheduler.TaskRunner(loadbalancer.check))
self.assertEqual('CHECK', loadbalancer.action)
self.assertEqual('FAILED', loadbalancer.status)
self.assertIn('boom', str(exc))
def test_update_add_node_by_address(self):
rsrc, fake_lb = self._mock_loadbalancer(self.lb_template,
self.lb_name,
self.expected_body)
fake_lb.nodes = self.expected_body['nodes']
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
update_template = copy.deepcopy(rsrc.t)
expected_ip = '172.168.1.4'
update_template['Properties']['nodes'] = [
{"addresses": ["166.78.103.141"],
"port": 80,
"condition": "ENABLED",
"type": "PRIMARY",
"weight": 1},
{"addresses": [expected_ip],
"port": 80,
"condition": "ENABLED",
"type": "PRIMARY",
"weight": 1}]
self.m.UnsetStubs()
self.m.StubOutWithMock(rsrc.clb, 'get')
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb)
fake_lb1 = copy.deepcopy(fake_lb)
fake_lb1.nodes = [
FakeNode(address=u"172.168.1.4", port=80, condition=u"ENABLED",
type="PRIMARY", weight=1),
FakeNode(address=u"166.78.103.141", port=80, condition=u"ENABLED",
type="PRIMARY", weight=1),
]
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb1)
self.m.StubOutWithMock(fake_lb, 'add_nodes')
fake_lb.add_nodes([
fake_lb.Node(address=expected_ip,
port=80,
condition='ENABLED',
type="PRIMARY", weight=1)])
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_resolve_attr_noid(self):
stack = mock.Mock()
stack.db_resource_get.return_value = None
stack.has_cache_data.return_value = False
resdef = mock.Mock(spec=rsrc_defn.ResourceDefinition)
lbres = lb.CloudLoadBalancer("test", resdef, stack)
self.assertIsNone(lbres._resolve_attribute("PublicIp"))
def test_resolve_attr_virtualips(self):
rsrc, fake_lb = self._mock_loadbalancer(self.lb_template,
self.lb_name,
self.expected_body)
fake_lb.virtual_ips = [FakeVirtualIP(address='1.2.3.4',
type='PUBLIC',
ipVersion="IPv6",
id='test-id')]
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
expected = [{
'ip_version': 'IPv6',
'type': 'PUBLIC',
'id': 'test-id',
'address': '1.2.3.4'}]
self.m.ReplayAll()
self.assertEqual(expected, rsrc._resolve_attribute("virtualIps"))
self.m.VerifyAll()
def test_update_nodes_immutable(self):
rsrc, fake_lb = self._mock_loadbalancer(self.lb_template,
self.lb_name,
self.expected_body)
current_nodes = [
FakeNode(address=u"1.1.1.1", port=80, condition=u"ENABLED",
type="PRIMARY", weight=1),
FakeNode(address=u"2.2.2.2", port=80, condition=u"ENABLED",
type="PRIMARY", weight=1),
FakeNode(address=u"3.3.3.3", port=80, condition=u"ENABLED",
type="PRIMARY", weight=1)
]
fake_lb.nodes = current_nodes
fake_lb.tracker = "fake_lb"
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
update_template = copy.deepcopy(rsrc.t)
expected_ip = '4.4.4.4'
update_template['Properties']['nodes'] = [
{"addresses": ["1.1.1.1"], "port": 80, "condition": "ENABLED",
"type": "PRIMARY", "weight": 1},
{"addresses": ["2.2.2.2"], "port": 80, "condition": "DISABLED",
"type": "PRIMARY", "weight": 1},
{"addresses": [expected_ip], "port": 80, "condition": "ENABLED",
"type": "PRIMARY", "weight": 1}
]
self.m.UnsetStubs()
self.m.StubOutWithMock(rsrc.clb, 'get')
fake_lb1 = copy.deepcopy(fake_lb)
fake_lb1.status = "
|
12845f9dceb836a8c5f395a89d266b458a782958
|
Python
|
<|begin_of_text|># Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Add tests for the badge server image end-point."""
import datetime
import unittest
import unittest.mock
import urllib.parse
from compatibility_lib import compatibility_store
from compatibility_lib import dependency_highlighter
from compatibility_lib import deprecated_dep_finder_stub
from compatibility_lib import fake_compatibility_store
from compatibility_lib import package
import main
import utils
APACHE_BEAM_RECENT_SUCCESS_2 = compatibility_store.CompatibilityResult(
[package.Package('apache-beam[gcp]')],
python_major_version=2,
status=compatibility_store.Status.SUCCESS,
dependency_info={
'apache-beam[gcp]': {
'current_time': datetime.datetime(2019, 5, 7, 0, 0, 0),
'installed_version': '2.12.0',
'installed_version_time': datetime.datetime(
2019, 2, 19, 21, 15, 56),
'is_latest': True,
'latest_version': '2.12.0',
'latest_version_time': datetime.datetime(
2019, 2, 19, 21, 15, 56),
},
},
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
APACHE_BEAM_GIT_RECENT_SUCCESS_2 = compatibility_store.CompatibilityResult(
[package.Package('git+git://github.com/google/apache-beam.git')],
python_major_version=2,
status=compatibility_store.Status.SUCCESS,
dependency_info={
'apache-beam[gcp]': {
'current_time': datetime.datetime(2019, 5, 7, 0, 0, 0),
'installed_version': '2.12.0',
'installed_version_time': datetime.datetime(
2019, 2, 19, 21, 15, 56),
'is_latest': True,
'latest_version': '2.12.0',
'latest_version_time': datetime.datetime(
2019, 2, 19, 21, 15, 56),
},
},
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
GOOGLE_API_CORE_RECENT_SUCCESS_2 = compatibility_store.CompatibilityResult(
[package.Package('google-api-core')],
python_major_version=2,
status=compatibility_store.Status.SUCCESS,
dependency_info={
'google-api-core': {
'current_time': datetime.datetime(2019, 5, 7, 0, 0, 0),
'installed_version': '1.9.0',
'installed_version_time': datetime.datetime(
2019, 4, 5, 18, 1, 48),
'is_latest': True,
'latest_version': '1.9.0',
'latest_version_time': datetime.datetime(
2019, 4, 5, 18, 1, 48),
},
},
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
GOOGLE_API_CORE_RECENT_SELF_INCOMPATIBLE_2 = compatibility_store.CompatibilityResult(
[package.Package('google-api-core')],
python_major_version=2,
status=compatibility_store.Status.CHECK_WARNING,
dependency_info={
'google-api-core': {
'current_time': datetime.datetime(2019, 5, 7, 0, 0, 0),
'installed_version': '1.9.0',
'installed_version_time': datetime.datetime(
2019, 4, 5, 18, 1, 48),
'is_latest': True,
'latest_version': '1.9.0',
'latest_version_time': datetime.datetime(
2019, 4, 5, 18, 1, 48),
},
},
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
GOOGLE_API_CORE_RECENT_INSTALL_FAILURE_2 = compatibility_store.CompatibilityResult(
[package.Package('google-api-core')],
python_major_version=2,
status=compatibility_store.Status.INSTALL_ERROR,
dependency_info={
'google-api-core': {
'current_time': datetime.datetime(2019, 5, 7, 0, 0, 0),
'installed_version': '1.9.0',
'installed_version_time': datetime.datetime(
2019, 4, 5, 18, 1, 48),
'is_latest': True,
'latest_version': '1.9.0',
'latest_version_time': datetime.datetime(
2019, 4, 5, 18, 1, 48),
},
},
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
GOOGLE_API_CORE_RECENT_SUCCESS_3 = compatibility_store.CompatibilityResult(
[package.Package('google-api-core')],
python_major_version=3,
status=compatibility_store.Status.SUCCESS,
dependency_info={
'google-api-core': {
'current_time': datetime.datetime(2019, 5, 7, 0, 0, 0),
'installed_version': '1.9.0',
'installed_version_time': datetime.datetime(
2019, 4, 5, 18, 1, 48),
'is_latest': True,
'latest_version': '1.9.0',
'latest_version_time': datetime.datetime(
2019, 4, 5, 18, 1, 48),
},
},
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
GOOGLE_API_CORE_RECENT_SELF_INCOMPATIBLE_3 = compatibility_store.CompatibilityResult(
[package.Package('google-api-core')],
python_major_version=3,
status=compatibility_store.Status.CHECK_WARNING,
dependency_info={
'google-api-core': {
'current_time': datetime.datetime(2019, 5, 7, 0, 0, 0),
'installed_version': '1.9.0',
'installed_version_time': datetime.datetime(
2019, 4, 5, 18, 1, 48),
'is_latest': True,
'latest_version': '1.9.0',
'latest_version_time': datetime.datetime(
2019, 4, 5, 18, 1, 48),
},
},
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
GOOGLE_API_CORE_GIT_RECENT_SUCCESS_2 = compatibility_store.CompatibilityResult(
[package.Package('git+git://github.com/google/api-core.git')],
python_major_version=2,
status=compatibility_store.Status.SUCCESS,
dependency_info={
'google-api-core': {
'current_time': datetime.datetime(2019, 5, 7, 0, 0, 0),
'installed_version': '1.9.0',
'installed_version_time': datetime.datetime(
2019, 4, 5, 18, 1, 48),
'is_latest': True,
'latest_version': '1.9.0',
'latest_version_time': datetime.datetime(
2019, 4, 5, 18, 1, 48),
},
},
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
GOOGLE_API_CORE_GIT_RECENT_SELF_INCOMPATIBLE_2 = compatibility_store.CompatibilityResult(
[package.Package('git+git://github.com/google/api-core.git')],
python_major_version=2,
status=compatibility_store.Status.CHECK_WARNING,
dependency_info={
'google-api-core': {
'current_time': datetime.datetime(2019, 5, 7, 0, 0, 0),
'installed_version': '1.9.0',
'installed_version_time': datetime.datetime(
2019, 4, 5, 18, 1, 48),
'is_latest': True,
'latest_version': '1.9.0',
'latest_version_time': datetime.datetime(
2019, 4, 5, 18, 1, 48),
},
},
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
GOOGLE_API_CORE_GIT_RECENT_INSTALL_FAILURE_2 = compatibility_store.CompatibilityResult(
[package.Package('git+git://github.com/google/api-core.git')],
python_major_version=2,
status=compatibility_store.Status.INSTALL_ERROR,
dependency_info={
'google-api-core': {
'current_time': datetime.datetime(2019, 5, 7, 0, 0, 0),
'installed_version': '1.9.0',
'installed_version_time': datetime.datetime(
2019, 4, 5, 18, 1, 48),
'is_latest': True,
'latest_version': '1.9.0',
'latest_version_time': datetime.datetime(
2019, 4, 5, 18, 1, 48),
},
},
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
GOOGLE_API_CORE_GIT_RECENT_SUCCESS_3 = compatibility_store.CompatibilityResult(
[package.Package('git+git://github.com/google/api-core.git')],
python_major_version=3,
status=compatibility_store.Status.SUCCESS,
dependency_info={
'google-api-core': {
'current_time': datetime.datetime(2019, 5, 7, 0, 0, 0),
'installed_version': '1.9.0',
'installed_version_time': datetime.datetime(
2019, 4, 5, 18, 1, 48),
'is_latest': True,
'latest_version': '1.9.0',
'latest_version_time': datetime.datetime(
2019, 4, 5, 18, 1, 48),
},
},
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
GOOGLE_API_CORE_GIT_RECENT_SELF_INCOMPATIBLE_3 = compatibility_store.CompatibilityResult(
[package.Package('git+git://github.com/google/api-core.git')],
python_major_version=3,
status=compatibility_store.Status.CHECK_WARNING,
dependency_info={
'google-api-core': {
'current_time': datetime.datetime(2019, 5, 7, 0, 0, 0),
'installed_version': '1.9.0',
'installed_version_time': datetime.datetime(
2019, 4, 5, 18, 1, 48),
'is_latest': True,
'latest_version': '1.9.0',
'latest_version_time': datetime.datetime(
2019, 4, 5, 18, 1, 48),
},
},
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
TENSORFLOW_RECENT_SUCCESS_3 = compatibility_store.CompatibilityResult(
[package.Package('tensorflow')],
python_major_version=3,
status=compatibility_store.Status.SUCCESS,
dependency_info={
'tensorflow': {
'current_time': datetime.datetime(2019, 5, 7, 0, 0, 0),
'installed_version': '1.3.1',
'installed_version_time': datetime.datetime(
2019, 4, 26, 0, 0, 0),
'is_latest': True,
'latest_version': '1.3.1',
'latest_version_time': datetime.datetime(
2019, 4, 26, 0, 0, 0),
},
},
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
TENSORFLOW_GIT_RECENT_SUCCESS_3 = compatibility_store.CompatibilityResult(
[package.Package('git+git://github.com/google/tensorflow.git')],
python_major_version=3,
status=compatibility_store.Status.SUCCESS,
dependency_info={
'tensorflow': {
'current_time': datetime.datetime(2019, 5, 7, 0, 0, 0),
'installed_version': '1.3.1',
'installed_version_time': datetime.datetime(
2019, 4, 26, 0, 0, 0),
'is_latest': True,
'latest_version': '1.3.1',
'latest_version_time': datetime.datetime(
2019, 4, 26, 0, 0, 0),
},
},
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
APACHE_BEAM_GOOGLE_API_CORE_RECENT_SUCCESS_2 = compatibility_store.CompatibilityResult(
[package.Package('apache-beam[gcp]'),
package.Package('google-api-core')],
python_major_version=2,
status=compatibility_store.Status.SUCCESS,
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
APACHE_BEAM_GOOGLE_API_CORE_RECENT_PAIR_INCOMPATIBLE_2 = compatibility_store.CompatibilityResult(
[package.Package('apache-beam[gcp]'),
package.Package('google-api-core')],
python_major_version=2,
status=compatibility_store.Status.CHECK_WARNING,
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
APACHE_BEAM_GOOGLE_API_CORE_RECENT_INSTALL_ERROR_3 = compatibility_store.CompatibilityResult(
[package.Package('apache-beam[gcp]'),
package.Package('google-api-core')],
python_major_version=3, # apache-beam does not support Python 3
status=compatibility_store.Status.INSTALL_ERROR,
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
APACHE_BEAM_GOOGLE_API_PYTHON_CLIENT_RECENT_SUCCESS_2 = compatibility_store.CompatibilityResult(
[
package.Package('apache-beam[gcp]'),
package.Package('google-api-python-client')
],
python_major_version=2,
status=compatibility_store.Status.SUCCESS,
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
APACHE_BEAM_GIT_GOOGLE_API_CORE_RECENT_SUCCESS_2 = compatibility_store.CompatibilityResult(
[package.Package('git+git://github.com/google/apache-beam.git'),
package.Package('google-api-core')],
python_major_version=2,
status=compatibility_store.Status.SUCCESS,
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
APACHE_BEAM_GIT_GOOGLE_API_CORE_RECENT_PAIR_INCOMPATIBLE_2 = compatibility_store.CompatibilityResult(
[package.Package('git+git://github.com/google/apache-beam.git'),
package.Package('google-api-core')],
python_major_version=2,
status=compatibility_store.Status.CHECK_WARNING,
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
APACHE_BEAM_GIT_GOOGLE_API_PYTHON_CLIENT_RECENT_SUCCESS_2 = compatibility_store.CompatibilityResult(
[
package.Package('git+git://github.com/google/apache-beam.git'),
package.Package('google-api-python-client')
],
python_major_version=2,
status=compatibility_store.Status.SUCCESS,
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
APACHE_BEAM_GOOGLE_API_CORE_GIT_RECENT_SUCCESS_2 = compatibility_store.CompatibilityResult(
[
package.Package('apache-beam[gcp]'),
package.Package('git+git://github.com/google/api-core.git')
],
python_major_version=2,
status=compatibility_store.Status.SUCCESS,
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
APACHE_BEAM_GOOGLE_API_CORE_GIT_RECENT_PAIR_INCOMPATIBLE_2 = compatibility_store.CompatibilityResult(
[
package.Package('apache-beam[gcp]'),
package.Package('git+git://github.com/google/api-core.git')
],
python_major_version=2,
status=compatibility_store.Status.CHECK_WARNING,
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
APACHE_BEAM_GOOGLE_API_CORE_GIT_RECENT_INSTALL_ERROR_3 = compatibility_store.CompatibilityResult(
[
package.Package('apache-beam[gcp]'),
package.Package('git+git://github.com/google/api-core.git')
],
python_major_version=3, # apache-beam does not support Python 3
status=compatibility_store.Status.INSTALL_ERROR,
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
APACHE_BEAM_GOOGLE_API_PYTHON_CLIENT_GIT_RECENT_SUCCESS_2 = compatibility_store.CompatibilityResult(
[
package.Package('apache-beam[gcp]'),
package.Package('git+git://github.com/google/api-python-client.git')
],
python_major_version=2,
status=compatibility_store.Status.SUCCESS,
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
GOOGLE_API_CORE_GOOGLE_API_PYTHON_CLIENT_RECENT_SUCCESS_2 = compatibility_store.CompatibilityResult(
[
package.Package('google-api-core'),
package.Package('google-api-python-client')
],
python_major_version=2,
status=compatibility_store.Status.SUCCESS,
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
GOOGLE_API_CORE_GOOGLE_API_PYTHON_CLIENT_RECENT_SUCCESS_3 = compatibility_store.CompatibilityResult(
[
package.Package('google-api-core'),
package.Package('google-api-python-client')
],
python_major_version=3,
status=compatibility_store.Status.SUCCESS,
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
GOOGLE_API_CORE_GOOGLE_API_PYTHON_CLIENT_RECENT_PAIR_INCOMPATIBLE_2 = compatibility_store.CompatibilityResult(
[
package.Package('google-api-core'),
package.Package('google-api-python-client')
],
python_major_version=2,
status=compatibility_store.Status.CHECK_WARNING,
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
GOOGLE_API_CORE_GOOGLE_API_PYTHON_CLIENT_RECENT_PAIR_INCOMPATIBLE_3 = compatibility_store.CompatibilityResult(
[
package.Package('google-api-core'),
package.Package('google-api-python-client')
],
python_major_version=3,
status=compatibility_store.Status.CHECK_WARNING,
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
GOOGLE_API_CORE_GIT_GOOGLE_API_PYTHON_CLIENT_RECENT_SUCCESS_2 = compatibility_store.CompatibilityResult(
[
package.Package('git+git://github.com/google/api-core.git'),
package.Package('google-api-python-client')
],
python_major_version=2,
status=compatibility_store.Status.SUCCESS,
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
GOOGLE_API_CORE_GIT_GOOGLE_API_PYTHON_CLIENT_RECENT_SUCCESS_3 = compatibility_store.CompatibilityResult(
[
package.Package('git+git://github.com/google/api-core.git'),
package.Package('google-api-python-client')
],
python_major_version=3,
status=compatibility_store.Status.SUCCESS,
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
GOOGLE_API_CORE_GIT_GOOGLE_API_PYTHON_CLIENT_RECENT_PAIR_INCOMPATIBLE_2 = compatibility_store.CompatibilityResult(
[
package.Package('git+git://github.com/google/api-core.git'),
package.Package('google-api-python-client')
],
python_major_version=2,
status=compatibility_store.Status.CHECK_WARNING,
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
GOOGLE_API_CORE_GIT_GOOGLE_API_PYTHON_CLIENT_RECENT_PAIR_INCOMPATIBLE_3 = compatibility_store.CompatibilityResult(
[
package.Package('git+git://github.com/google/api-core.git'),
package.Package('google-api-python-client')
],
python_major_version=3,
status=compatibility_store.Status.CHECK_WARNING,
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
GOOGLE_API_CORE_TENSORFLOW_RECENT_INSTALL_ERROR_2 = compatibility_store.CompatibilityResult(
[package.Package('google-api-core'),
package.Package('tensorflow')],
python_major_version=2, # tensorflow does not support Python 2
status=compatibility_store.Status.INSTALL_ERROR,
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
GOOGLE_API_CORE_TENSORFLOW_RECENT_SUCCESS_3 = compatibility_store.CompatibilityResult(
[package.Package('google-api-core'),
package.Package('tensorflow')],
python_major_version=3,
status=compatibility_store.Status.SUCCESS,
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
GOOGLE_API_CORE_TENSORFLOW_RECENT_PAIR_INCOMPATIBLE_3 = compatibility_store.CompatibilityResult(
[package.Package('google-api-core'),
package.Package('tensorflow')],
python_major_version=3,
status=compatibility_store.Status.CHECK_WARNING,
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
GOOGLE_API_CORE_GIT_TENSORFLOW_RECENT_INSTALL_ERROR_2 = compatibility_store.CompatibilityResult(
[
package.Package('git+git://github.com/google/api-core.git'),
package.Package('tensorflow')
],
python_major_version=2, # tensorflow does not support Python 2
status=compatibility_store.Status.INSTALL_ERROR,
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
GOOGLE_API_CORE_GIT_TENSORFLOW_RECENT_SUCCESS_3 = compatibility_store.CompatibilityResult(
[
package.Package('git+git://github.com/google/api-core.git'),
package.Package('tensorflow')
],
python_major_version=3,
status=compatibility_store.Status.SUCCESS,
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
GOOGLE_API_CORE_GIT_TENSORFLOW_RECENT_PAIR_INCOMPATIBLE_3 = compatibility_store.CompatibilityResult(
[
package.Package('git+git://github.com/google/api-core.git'),
package.Package('tensorflow')
],
python_major_version=3,
status=compatibility_store.Status.CHECK_WARNING,
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
GOOGLE_API_PYTHON_CLIENT_TENSORFLOW_RECENT_SUCCESS_3 = compatibility_store.CompatibilityResult(
[
package.Package('google-api-python-client'),
package.Package('tensorflow')
],
python_major_version=3,
status=compatibility_store.Status.SUCCESS,
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
RECENT_SUCCESS_DATA = [
APACHE_BEAM_RECENT_SUCCESS_2,
APACHE_BEAM_GIT_RECENT_SUCCESS_2,
GOOGLE_API_CORE_RECENT_SUCCESS_2,
GOOGLE_API_CORE_RECENT_SUCCESS_3,
GOOGLE_API_CORE_GIT_RECENT_SUCCESS_2,
GOOGLE_API_CORE_GIT_RECENT_SUCCESS_3,
TENSORFLOW_RECENT_SUCCESS_3,
TENSORFLOW_GIT_RECENT_SUCCESS_3,
APACHE_BEAM_GOOGLE_API_CORE_RECENT_SUCCESS_2,
APACHE_BEAM_GIT_GOOGLE_API_CORE_RECENT_SUCCESS_2,
APACHE_BEAM_GOOGLE_API_CORE_GIT_RECENT_SUCCESS_2,
APACHE_BEAM_GOOGLE_API_PYTHON_CLIENT_GIT_RECENT_SUCCESS_2,
APACHE_BEAM_GOOGLE_API_PYTHON_CLIENT_RECENT_SUCCESS_2,
APACHE_BEAM_GIT_GOOGLE_API_PYTHON_CLIENT_RECENT_SUCCESS_2,
GOOGLE_API_CORE_GOOGLE_API_PYTHON_CLIENT_RECENT_SUCCESS_2,
GOOGLE_API_CORE_GOOGLE_API_PYTHON_CLIENT_RECENT_SUCCESS_3,
GOOGLE_API_CORE_GIT_GOOGLE_API_PYTHON_CLIENT_RECENT_SUCCESS_2,
GOOGLE_API_CORE_GIT_GOOGLE_API_PYTHON_CLIENT_RECENT_SUCCESS_3,
GOOGLE_API_CORE_TENSORFLOW_RECENT_SUCCESS_3,
GOOGLE_API_CORE_GIT_TENSORFLOW_RECENT_SUCCESS_3,
GOOGLE_API_PYTHON_CLIENT_TENSORFLOW_RECENT_SUCCESS_3,
]
GOOGLE_API_CORE_SELF_INCOMPATIBLE_DATA = [
APACHE_BEAM_RECENT_SUCCESS_2,
APACHE_BEAM_GIT_RECENT_SUCCESS_2,
GOOGLE_API_CORE_RECENT_SELF_INCOMPATIBLE_2,
GOOGLE_API_CORE_RECENT_SELF_INCOMPATIBLE_3,
GOOGLE_API_CORE_GIT_RECENT_SELF_INCOMPATIBLE_2,
GOOGLE_API_CORE_GIT_RECENT_SELF_INCOMPATIBLE_3,
TENSORFLOW_RECENT_SUCCESS_3,
TENSORFLOW_GIT_RECENT_SUCCESS_3,
APACHE_BEAM_GOOGLE_API_CORE_RECENT_PAIR_INCOMPATIBLE_2,
APACHE_BEAM_GIT_GOOGLE_API_CORE_RECENT_PAIR_INCOMPATIBLE_2,
APACHE_BEAM_GOOGLE_API_CORE_GIT_RECENT_PAIR_INCOMPATIBLE_2,
APACHE_BEAM_GOOGLE_API_PYTHON_CLIENT_GIT_RECENT_SUCCESS_2,
APACHE_BEAM_GOOGLE_API_PYTHON_CLIENT_RECENT_SUCCESS_2,
APACHE_BEAM_GIT_GOOGLE_API_PYTHON_CLIENT_RECENT_SUCCESS_2,
GOOGLE_API_CORE_GOOGLE_API_PYTHON_CLIENT_RECENT_PAIR_INCOMPATIBLE_2,
GOOGLE_API_CORE_GOOGLE_API_PYTHON_CLIENT_RECENT_PAIR_INCOMPATIBLE_3,
GOOGLE_API_CORE_GIT_GOOGLE_API_PYTHON_CLIENT_RECENT_PAIR_INCOMPATIBLE_2,
GOOGLE_API_CORE_GIT_GOOGLE_API_PYTHON_CLIENT_RECENT_PAIR_INCOMPATIBLE_3,
GOOGLE_API_CORE_TENSORFLOW_RECENT_PAIR_INCOMPATIBLE_3,
GOOGLE_API_CORE_GIT_TENSORFLOW_RECENT_PAIR_INCOMPATIBLE_3,
GOOGLE_API_PYTHON_CLIENT_TENSORFLOW_RECENT_SUCCESS_3,
]
UP_TO_DATE_DEPS = {
'google-auth': {
'current_time': datetime.datetime(2019, 5, 7, 0, 0, 0),
'installed_version': '1.6.3',
'installed_version_time': datetime.datetime(
2019, 2, 19, 21, 15, 56),
'is_latest': True,
'latest_version': '1.6.3',
'latest_version_time': datetime.datetime(
2019, 2, 19, 21, 15, 56)
},
'grpcio': {
'current_time': datetime.datetime(2019, 5, 7, 0, 0, 0),
'installed_version': '1.19.0',
'installed_version_time': datetime.datetime(
2019, 2, 27, 0, 0, 53),
'is_latest': True,
'latest_version': '1.19.0',
'latest_version_time': datetime.datetime(
2019, 2, 27, 0, 0, 53)
},
'requests': {
'current_time': datetime.datetime(2019, 5, 7, 0, 0, 0),
'installed_version': '1.9.0',
'installed_version_time': datetime.datetime(
2019, 4, 5, 18, 1, 48),
'is_latest': True,
'latest_version': '1.9.0',
'latest_version_time': datetime.datetime(
2019, 4, 5, 18, 1, 48)
},
}
class BadgeTestCase(unittest.TestCase):
"""Base class for tests of badge images."""
def setUp(self):
self.fake_store = fake_compatibility_store.CompatibilityStore()
self.dependency_highlighter_stub = dependency_highlighter.DependencyHighlighter(
store=self.fake_store)
self.deprecated_dep_finder_stub = deprecated_dep_finder_stub.DeprecatedDepFinderStub(
)
main.app.config['TESTING'] = True
self.client = main.app.test_client()
self._store_patch = unittest.mock.patch('utils.store', self.fake_store)
self._highlighter_patch = unittest.mock.patch(
'utils.highlighter', self.dependency_highlighter_stub)
self._finder_patch = unittest.mock.patch(
'utils.finder', self.deprecated_dep_finder_stub)
self._pkg_list_patch = unittest.mock.patch(
'compatibility_lib.configs.PKG_LIST', [
'apache-beam[gcp]',
'google-api-core',
'google-api-python-client',
'tensorflow',
])
self._whitelist_urls_patch = unittest.mock.patch(
'compatibility_lib.configs.WHITELIST_URLS', {
'git+git://github.com/google/apache-beam.git':
'apache-beam[gcp]',
'git+git://github.com/google/api-core.git': 'google-api-core',
'git+git://github.com/google/api-python-client.git':
'google-api-python-client',
'git+git://github.com/google/tensorflow.git': 'tensorflow',
})
self._store_patch.start()
self.addCleanup(self._store_patch.stop)
self._highlighter_patch.start()
self.addCleanup(self._highlighter_patch.stop)
self._finder_patch.start()
self.addCleanup(self._finder_patch.stop)
self._pkg_list_patch.start()
self.addCleanup(self._pkg_list_patch.stop)
self._whitelist_urls_patch.start()
self.addCleanup(self._whitelist_urls_patch.stop)
def get_image_json(self, package):
"""Return the calculated badge data for a package as a dict."""
return self.client.get(
'/one_badge_image', query_string={
'package': package
}).get_json()
def get_target_json(self, package):
"""Return the calculated details page data for a package as a dict."""
return self.client.get(
'/one_badge_target', query_string={
'package': package
}).get_json()
def assertLinkUrl(self, package, actual_url):
"""Assert that the link for the badge image is correct for a package."""
parsed_url = urllib.parse.urlparse(actual_url)
params = urllib.parse.parse_qs(parsed_url.query)
self.assertEqual([package], params['package'])
def _assertImageResponse(
self, package_name, expected_status, expected_left_text):
"""Assert that the badge image response is correct for a package."""
json_response = self.get_image_json(package_name)
self.assertEqual(json_response['left_text'], expected_left_text)
self.assertEqual(json_response['right_text'], expected_status.value)
self.assertEqual(json_response['right_color'],
main.BADGE_STATUS_TO_COLOR.get(expected_status))
self.assertLinkUrl(package_name, json_response['whole_link'])
def _assertImageResponsePyPI(self, package_name, expected_status):
"""Assert that the badge image response is correct for a PyPI package."""
self._assertImageResponse(
package_name, expected_status, 'compatibility check (PyPI)')
def _assertImageResponseGithub(self, package_name, expected_status):
"""Assert that the badge image response is correct for a github package."""
self._assertImageResponse(
package_name, expected_status, 'compatibility check (master)')
def assertBadgeStatusToColor(self, badge_status_to_color):
"""Assert that the given badge status to color mapping is correct."""
for status, color in badge_status_to_color.items():
badge_status = main.BadgeStatus(status)
self.assertEqual(main.BADGE_STATUS_TO_COLOR[badge_status], color)
class TestSuccess(BadgeTestCase):
"""Tests for the cases where the badge image displays'success.'"""
def setUp(self):
BadgeTestCase.setUp(self)
self.success_data = RECENT_SUCCESS_DATA
# All of the CompatibilityResults in pairs_without_common_versions and
# github_pairs have erroneous statuses but should still yield a
#'success' status as they should be skipped.
self.pairs_without_common_versions = [
APACHE_BEAM_GOOGLE_API_CORE_RECENT_INSTALL_ERROR_3,
APACHE_BEAM_GOOGLE_API_CORE_GIT_RECENT_INSTALL_ERROR_3,
GOOGLE_API_CORE_TENSORFLOW_RECENT_INSTALL_ERROR_2,
GOOGLE_API_CORE_GIT_TENSORFLOW_RECENT_INSTALL_ERROR_2,
]
self.github_pairs = [
compatibility_store.CompatibilityResult(
[
package.Package('git+git://github.com/google/apache-beam.git'),
package.Package('google-api-core')
],
python_major_version=2,
status=compatibility_store.Status.INSTALL_ERROR,
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0)),
compatibility_store.CompatibilityResult(
[
package.Package('git+git://github.com/google/tensorflow.git'),
package.Package('google-api-core')
],
python_major_version=3,
status=compatibility_store.Status.INSTALL_ERROR,
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0)),
]
def assertImageResponsePyPI(self, package_name):
"""Assert that the badge image response is correct for a PyPI package."""
BadgeTestCase._assertImageResponsePyPI(
self, package_name, main.BadgeStatus.SUCCESS)
def assertImageResponseGithub(self, package_name):
"""Assert that the badge image response is correct for a github package."""
BadgeTestCase._assertImageResponseGithub(
self, package_name, main.BadgeStatus.SUCCESS)
def assertTargetResponse(self, package_name, *supported_pyversions):
expected_status = main.BadgeStatus.SUCCESS
json_response = self.get_target_json(package_name)
self.assertEqual(json_response['package_name'], package_name)
self.assertBadgeStatusToColor(json_response['badge_status_to_color'])
# self compatibility result check
for pyversion in ['py2', 'py3']:
expected_details = utils.EMPTY_DETAILS
if pyversion not in supported_pyversions:
expected_details = ('The package does not support this '
'version of python.')
self.assertEqual(
json_response['self_compat_res'][pyversion],
{'details': expected_details,'status': expected_status})
# pair compatibility result check
expected_result = {
'py2': {'status': expected_status, 'details': {}},
'py3': {'status': expected_status, 'details': {}}
}
self.assertEqual(json_response['google_compat_res'], expected_result)
# dependency result check
self.assertEqual(
json_response['dependency_res'],
{'deprecated_deps': '', 'details': {},'status': expected_status})
def test_pypi_py2py3_fresh_nodeps(self):
self.fake_store.save_compatibility_statuses(self.success_data)
package_name = 'google-api-core'
self.assertImageResponsePyPI(package_name)
self.assertTargetResponse(package_name, 'py2', 'py3')
def test_git_py2py3_fresh_nodeps(self):
self.fake_store.save_compatibility_statuses(self.success_data)
package_name = 'git+git://github.com/google/api-core.git'
self.assertImageResponseGithub(package_name)
self.assertTargetResponse(package_name, 'py2', 'py3')
def test_pypi_py2_fresh_nodeps(self):
self.fake_store.save_compatibility_statuses(self.success_data)
package_name = 'apache-beam[gcp]'
self.assertImageResponsePyPI(package_name)
self.assertTargetResponse(package_name, 'py2')
def test_git_py2_fresh_nodeps(self):
self.fake_store.save_compatibility_statuses(self.success_data)
package_name = 'git+git://github.com/google/apache-beam.git'
self.assertImageResponseGithub(package_name)
self.assertTargetResponse(package_name, 'py2')
def test_pypi_py3_fresh_nodeps(self):
self.fake_store.save_compatibility_statuses(self.success_data)
package_name = 'tensorflow'
self.assertImageResponsePyPI(package_name)
self.assertTargetResponse(package_name, 'py3')
def test_git_py3_fresh_nodeps(self):
self.fake_store.save_compatibility_statuses(self.success_data)
package_name = 'git+git://github.com/google/tensorflow.git'
self.assertImageResponseGithub(package_name)
self.assertTargetResponse(package_name, 'py3')
def test_pypi_py2py3_fresh_nodeps_ignore_pairs_without_common_versions(
self):
"""Tests that pairs not sharing a common version are ignored."""
fake_results = self.success_data + self.pairs_without_common_versions
self.fake_store.save_compatibility_statuses(fake_results)
package_name = 'google-api-core'
self.assertImageResponsePyPI(package_name)
self.assertTargetResponse(package_name, 'py2', 'py3')
def test_git_py2py3_fresh_nodeps_ignore_pairs_without_common_versions(
self):
"""Tests that pairs not sharing a common version are ignored."""
fake_results = self.success_data + self.pairs_without_common_versions
self.fake_store.save_compatibility_statuses(fake_results)
package_name = 'git+git://github.com/google/api-core.git'
self.assertImageResponseGithub(package_name)
self.assertTargetResponse(package_name, 'py2', 'py3')
def test_pypi_py2py3_fresh_nodeps_ignore_git(self):
"""Tests that pair results containing git packages are ignored."""
fake_results = self.success_data + self.github_pairs
self.fake_store.save_compatibility_statuses(fake_results)
package_name = 'google-api-core'
self.assertImageResponsePyPI(package_name)
self.assertTargetResponse(package_name, 'py2', 'py3')
def test_git_py2py3_fresh_nodeps_ignore_git(self):
"""Tests that pair results containing git packages are ignored."""
fake_results = self.success_data + self.github_pairs
self.fake_store.save_compatibility_statuses(fake_results)
package_name = 'git+git://github.com/google/api-core.git'
self.assertImageResponseGithub(package_name)
self.assertTargetResponse(package_name, 'py2', 'py3')
class TestUnknownPackage(BadgeTestCase):
"""Tests for the cases where the badge image displays 'unknown package.'"""
def assertImageResponsePyPI(self, package_name):
"""Assert that the badge image response is correct for a PyPI package."""
BadgeTestCase._assertImageResponsePyPI(
self, package_name, main.BadgeStatus.UNKNOWN_PACKAGE)
def assertImageResponseGithub(self, package_name):
"""Assert that the badge image response is correct for a github package."""
BadgeTestCase._assertImageResponseGithub(
self, package_name, main.BadgeStatus.UNKNOWN_PACKAGE)
def assertTargetResponse(self, package_name):
json_response = self.get_target_json(package_name)
self.assertEqual(json_response['package_name'], package_name)
self.assertBadgeStatusToColor(json_response['badge_status_to_color'])
# self compatibility result check
expected_status = main.BadgeStatus.UNKNOWN_PACKAGE
expected_details = ('This package is not a whitelisted google '
'python package; to whitelist a package, '
'contact the python team.')
expected_result = {
'py2': {'status': expected_status, 'details': expected_details},
'py3': {'status': expected_status, 'details': expected_details}
}
self.assertEqual(json_response['self_compat_res'], expected_result)
# pair compatibility result check
expected_result = {
'py2': {'status': expected_status, 'details': {}},
'py3': {'status': expected_status, 'details': {}}
}
self.assertEqual(json_response['google_compat_res'], expected_result)
# dependency result check
expected_result = {'status': expected_status, 'details': {}}
self.assertEqual(json_response['dependency_res'], expected_result)
def test_pypi_unknown_package(self):
self.fake_store.save_compatibility_statuses(RECENT_SUCCESS_DATA)
package_name = 'xxx'
self.assertImageResponsePyPI(package_name)
self.assertTargetResponse(package_name)
def test_github_unknown_package(self):
self.fake_store.save_compatibility_statuses(RECENT_SUCCESS_DATA)
package_name = 'https://github.com/brianquinlan/notebooks'
self.assertImageResponseGithub(package_name)
self.assertTargetResponse(package_name)
class TestMissingData(BadgeTestCase):
"""Tests for the cases where the badge image displays'missing data.'"""
def assertImageResponsePyPI(self, package_name):
"""Assert that the badge image response is correct for a PyPI package."""
BadgeTestCase._assertImageResponsePyPI(
self, package_name, main.BadgeStatus.MISSING_DATA)
def test_missing_self_compatibility_data(self):
package_name = 'google-api-core'
missing_self_data = list(RECENT_SUCCESS_DATA)
missing_self_data.remove(GOOGLE_API_CORE_RECENT_SUCCESS_2)
self.fake_store.save_compatibility_statuses(missing_self_data)
# Test badge image
self.assertImageResponsePyPI(package_name)
# Test badge details page
json_response = self.get_target_json(package_name)
self.assertEqual(json_response['package_name'], package_name)
self.assertBadgeStatusToColor(json_response['badge_status_to_color'])
# self compatibility result check
expected_status = main.BadgeStatus.MISSING_DATA
expected_details = ("Missing data for packages=['google-api-core'], "
"versions=[2]")
expected_result = {
'py2': {'status': expected_status, 'details': expected_details},
'py3': {'status': expected_status, 'details': expected_details}
}
self.assertEqual(json_response['self_compat_res'], expected_result)
# pair compatibility result check
expected_status = main.BadgeStatus.SUCCESS
expected_result = {
'py2': {'status': expected_status, 'details': {}},
'py3': {'status': expected_status, 'details': {}}
}
self.assertEqual(json_response['google_compat_res'], expected_result)
# dependency result check
self.assertEqual(
json_response['dependency_res'],
{'deprecated_deps': '', 'details': {},'status': expected_status})
def test_missing_pair_compatibility_data(self):
package_name = 'google-api-core'
missing_self_data = list(RECENT_SUCCESS_DATA)
missing_self_data.remove(
GOOGLE_API_CORE_GOOGLE_API_PYTHON_CLIENT_RECENT_SUCCESS_2)
self.fake_store.save_compatibility_statuses(missing_self_data)
# Test badge image
self.assertImageResponsePyPI(package_name)
# Test badge details page
json_response = self.get_target_json(package_name)
self.assertEqual(json_response['package_name'], package_name)
self.assertBadgeStatusToColor(json_response['badge_status_to_color'])
expected_status = main.BadgeStatus.MISSING_DATA
expected_details = {
'google-api-python-client': (
"Missing data for packages=['google-api-core', "
"'google-api-python-client'], versions=[2]")
}
expected_result = {
'py2': {'status': expected_status, 'details': expected_details},
'py3': {'status': expected_status, 'details': expected_details}
}
self.assertEqual(json_response['google_compat_res'], expected_result)
expected_status = main.BadgeStatus.SUCCESS
expected_result = {
'py2': {'status': expected_status, 'details': utils.EMPTY_DETAILS},
'py3': {'status': expected_status, 'details': utils.EMPTY_DETAILS}
}
self.assertEqual(json_response['self_compat_res'], expected_result)
self.assertEqual(
json_response['dependency_res'],
{'deprecated_deps': '', 'details': {},'status': expected_status})
class TestSelfIncompatible(BadgeTestCase):
"""Tests for the cases where the badge image displays'self incompatible.'"""
def assertImageResponsePyPI(self, package_name):
"""Assert that the badge image response is correct for a PyPI package."""
BadgeTestCase._assertImageResponsePyPI(
self, package_name, main.BadgeStatus.SELF_INCOMPATIBLE)
def assertImageResponseGithub(self, package_name):
"""Assert that the badge image response is correct for a github package."""
BadgeTestCase._assertImageResponseGithub(
self, package_name, main.BadgeStatus.SELF_INCOMPATIBLE)
def assertTargetResponse(self, package_name, expected_pair_result):
json_response = self.get_target_json(package_name)
self.assertEqual(json_response['package_name'], package_name)
self.assertBadgeStatusToColor(json_response['badge_status_to_color'])
# self
|
b2ec682fba5d5eec999d160d44c829f93d8caa36
|
Python
|
<|begin_of_text|># -*- coding: utf-8 -*-
# @Time : 2018/4/9 14:53:17
# @Author : SilverMaple
# @Site : https://github.com/SilverMaple
# @File : routes.py
import hashlib
import os
import logging
import sys
import shutil
import json
import subprocess
import time
from datetime import datetime
from app.decorators import async
from flask import render_template, flash, redirect, url_for, request, g, \
jsonify, current_app, session, make_response
from flask_login import current_user, login_required
from flask_babel import _, get_locale
from flask_uploads import UploadSet
from guess_language import guess_language
from app import db
from app.main.forms import EditProfileForm, PostForm, SearchForm, AddAppForm, AddAppExtensionForm, EditAppExtensionForm, \
AddAppAdminForm, AddTenantForm, AddTenantDatabaseForm, EditTenantDatabaseForm, AddAppCodeForm, AddRoleForm, AddUserForm
from app.models import User, Post, App, AppAdmin, AppExpand, AdminToApp, Tenant, TenantDb, AppCode, SaasRole, SaasUser
from app.translate import translate
from app.main import bp
from app.email import follower_notification
from app.auth import LoginType, current_login_type
from app import auth
from pip._internal import commands
from requests import Response
from werkzeug.datastructures import FileStorage
from werkzeug.test import EnvironBuilder
from werkzeug.utils import secure_filename
from werkzeug.security import generate_password_hash
logger = logging.getLogger("MirrorConstruct")
# logger = logging.getLogger("MirrorConstruct")
formatter = logging.Formatter('[%(asctime)s] %(message)s')
blank_formatter = logging.Formatter('')
# formatter = logging.Formatter('[%(asctime)s][%(levelname)s] ## %(message)s')
file_handler = logging.FileHandler("logs/mirror_construct.log")
file_handler.setFormatter(formatter) # 可以通过setFormatter指定输出格式
# 为logger添加的日志处理器
logger.addHandler(file_handler)
logger.setLevel(logging.DEBUG)
@bp.before_app_request
def before_request():
if current_user.is_authenticated:
current_user.last_seen = datetime.utcnow()
db.session.commit()
g.search_form = SearchForm()
g.locale = str(get_locale())
@bp.route('/', methods=['GET', 'POST'])
@bp.route('/index', methods=['GET', 'POST'])
@login_required
def index():
form = PostForm()
if form.validate_on_submit():
language = guess_language(form.post.data)
if language == 'UNKNOWN' or len(language) > 5:
language = ''
post = Post(body=form.post.data, author=current_user,
language=language)
db.session.add(post)
db.session.commit()
flash(_('Your post is now live!'))
return redirect(url_for('main.index'))
page = request.args.get('page', 1, type=int)
posts = current_user.followed_posts().paginate(
page, current_app.config['POSTS_PER_PAGE'], False)
next_url = url_for('main.explore', page=posts.next_num) \
if posts.has_next else None
prev_url = url_for('main.explore', page=posts.prev_num) \
if posts.has_prev else None
return render_template('index.html', title=_('Home'), form=form,
posts=posts.items, next_url=next_url,
prev_url=prev_url)
@bp.route('/index_registe')
def index_registe():
if current_user.is_authenticated and auth.current_login_type == LoginType.REGISTE_MANAGE:
return render_template('index_registe_manage.html', title=_('Registe Manage'))
else:
auth.current_login_type = LoginType.REGISTE_MANAGE
return redirect(url_for('auth.login'))
@bp.route('/index_app')
def index_app():
if current_user.is_authenticated and auth.current_login_type == LoginType.WEB_APP_MANAGE:
app_list = [a.app_id for a in AdminToApp.query.filter(
AdminToApp.app_admin_id == session['current_app_manager_id']).all()]
data = [App.query.filter(App.id == a).order_by(db.asc(App.name)).first() for a in app_list]
data.sort(key=lambda a: a.name)
app_name_list = [a.name for a in data]
current_selected_app_name = None
if session.get('current_selected_app_name'):
current_selected_app_name = session['current_selected_app_name']
return render_template('index_app_manage.html', title=_('Web App Manage'), app_name_list=app_name_list,
current_selected_app_name=current_selected_app_name)
else:
auth.current_login_type = LoginType.WEB_APP_MANAGE
return redirect(url_for('auth.login'))
@bp.route('/index_tenant')
def index_tenant():
if current_user.is_authenticated and auth.current_login_type == LoginType.TENANT_SERVICE:
return render_template('index_tenant_service.html', title=_('Tenant Service'))
else:
auth.current_login_type = LoginType.TENANT_SERVICE
return redirect(url_for('auth.login'))
@bp.route('/explore')
@login_required
def explore():
page = request.args.get('page', 1, type=int)
posts = Post.query.order_by(Post.timestamp.desc()).paginate(
page, current_app.config['POSTS_PER_PAGE'], False)
next_url = url_for('main.explore', page=posts.next_num) \
if posts.has_next else None
prev_url = url_for('main.explore', page=posts.prev_num) \
if posts.has_prev else None
return render_template('index.html', title=_('Explore'),
posts=posts.items, next_url=next_url,
prev_url=prev_url)
@bp.route('/user/<username>')
@login_required
def user(username):
user = User.query.filter_by(username=username).first_or_404()
page = request.args.get('page', 1, type=int)
posts = user.posts.order_by(Post.timestamp.desc()).paginate(
page, current_app.config['POSTS_PER_PAGE'], False)
next_url = url_for('main.user', username=user.username,
page=posts.next_num) if posts.has_next else None
prev_url = url_for('main.user', username=user.username,
page=posts.prev_num) if posts.has_prev else None
return render_template('user.html', user=user, posts=posts.items,
next_url=next_url, prev_url=prev_url)
@bp.route('/edit_profile', methods=['GET', 'POST'])
@login_required
def edit_profile():
form = EditProfileForm(current_user.username)
if form.validate_on_submit():
current_user.username = form.username.data
current_user.about_me = form.about_me.data
db.session.commit()
flash(_('Your changes have been saved.'))
return redirect(url_for('main.edit_profile'))
elif request.method == 'GET':
form.username.data = current_user.username
form.about_me.data = current_user.about_me
return render_template('edit_profile.html', title=_('Edit Profile'),
form=form)
@bp.route('/follow/<username>')
@login_required
def follow(username):
user = User.query.filter_by(username=username).first()
if user is None:
flash(_('User %(username)s not found.', username=username))
return redirect(url_for('main.index'))
if user == current_user:
flash(_('You cannot follow yourself!'))
return redirect(url_for('main.user', username=username))
current_user.follow(user)
db.session.commit()
flash(_('You are following %(username)s!', username=username))
follower_notification(user, current_user)
return redirect(url_for('main.user', username=username))
@bp.route('/unfollow/<username>')
@login_required
def unfollow(username):
user = User.query.filter_by(username=username).first()
if user is None:
flash(_('User %(username)s not found.', username=username))
return redirect(url_for('main.index'))
if user == current_user:
flash(_('You cannot unfollow yourself!'))
return redirect(url_for('main.user', username=username))
current_user.unfollow(user)
db.session.commit()
flash(_('You are not following %(username)s.', username=username))
return redirect(url_for('main.user', username=username))
@bp.route('/translate', methods=['POST'])
@login_required
def translate_text():
return jsonify({'text': translate(request.form['text'],
request.form['source_language'],
request.form['dest_language'])})
@bp.route('/search')
@login_required
def search():
if not g.search_form.validate():
return redirect(url_for('main.explore'))
page = request.args.get('page', 1, type=int)
posts, total = Post.search(g.search_form.q.data, page,
current_app.config['POSTS_PER_PAGE'])
next_url = url_for('main.search', q=g.search_form.q.data, page=page + 1) \
if total > page * current_app.config['POSTS_PER_PAGE'] else None
prev_url = url_for('main.search', q=g.search_form.q.data, page=page - 1) \
if page > 1 else None
return render_template('search.html', title=_('Search'), posts=posts,
next_url=next_url, prev_url=prev_url)
# ---------------------------------------------------------------------------------------
# registe manage app setting
# ---------------------------------------------------------------------------------------
@bp.route('/registe_manage_app_setting')
@login_required
def registe_manage_app_setting():
isCheck = True
isEdit = True
isDelete = True
session['is_delete'] = 'false'
tHead = [_('App Name'), _('App ID'), _('Creator')]
data = App.query.order_by(db.asc(App.name)).all()
return render_template('registe_manage_app_setting.html', title=_('App Setting'),
tableName=_('App List'), AppAdmin=AppAdmin,
isCheck=isCheck, isEdit=isEdit,
isDelete=isDelete, tHead=tHead, data=data)
@bp.route('/registe_manage_app_setting_add', methods=['GET', 'POST'])
@login_required
def registe_manage_app_setting_add():
form = AddAppForm(None)
if form.validate_on_submit():
app_id = hashlib.md5(form.app_name.data.encode(encoding='UTF-8')).hexdigest()
db.session.add(App(id=None, name=form.app_name.data, appid=app_id))
db.session.commit()
flash(_('New app have been added.'))
return redirect(url_for('main.registe_manage_app_setting'))
elif request.method == 'GET':
pass
return render_template('registe_manage_app_setting.html', title=_('App Setting'),
tableName=_('Add New App'), AppAdmin=AppAdmin, form=form,
addTitle=('Add New App'))
@bp.route('/registe_manage_app_setting_delete/<id>', methods=['GET', 'POST'])
@login_required
def registe_manage_app_setting_delete(id):
if request.method == 'GET':
session['current_delete_id'] = id
else:
data = request.get_json()
name = data.get('name')
if name == 'execute':
current_data = App.query.filter(App.id == session['current_delete_id']).first()
db.session.delete(current_data)
db.session.commit()
flash(_('Record have been deleted.'))
return jsonify({'result':'success'})
isCheck = True
isEdit = True
isDelete = True
tHead = [_('App Name'), _('App ID'), _('Creator')]
data = App.query.all()
confirmTitle = 'Confirm your choice:'
confirmMessage = 'Do you want to delete this record?'
return render_template('registe_manage_app_setting.html', title=_('App Setting'),
tableName=_('App List'), AppAdmin=AppAdmin,
isCheck=isCheck, isEdit=isEdit, session=session,
isDelete=isDelete, tHead=tHead, data=data,
confirmTitle=confirmTitle, confirmMessage=confirmMessage)
@bp.route('/registe_manage_app_setting_delete_select', methods=['GET', 'POST'])
@login_required
def registe_manage_app_setting_delete_select():
flash(_('Batch delete operation are not allowed now.'))
return redirect(url_for('main.registe_manage_app_setting'))
@bp.route('/registe_manage_app_setting_edit/<id>', methods=['GET', 'POST'])
@login_required
def registe_manage_app_setting_edit(id):
if session.get('validate_app_name'):
form = AddAppForm(session['validate_app_name'])
else:
form = AddAppForm(None)
if form.validate_on_submit():
current_data = App.query.filter(App.id == id).first()
current_data.name = form.app_name.data
db.session.commit()
flash(_('App have been edited.'))
return redirect(url_for('main.registe_manage_app_setting'))
elif request.method == 'GET':
current_data = App.query.filter(App.id == id).first()
form.app_name.data = current_data.name
form.app_ID.data = current_data.appid
if AppAdmin.query.filter(AppAdmin.id == current_data.creater_id).first():
form.creator_name.data = AppAdmin.query.filter(AppAdmin.id == current_data.creater_id).first().name
session['validate_app_name'] = form.app_name.data
return render_template('registe_manage_app_setting.html', title=_('App Setting'),
tableName=_('Edit App'), AppAdmin=AppAdmin, form=form,
editTitle=('Edit App'))
# ---------------------------------------------------------------------------------------
# registe manage app extension
# ---------------------------------------------------------------------------------------
@bp.route('/registe_manage_app_extension')
@login_required
def registe_manage_app_extension():
isCheck = True
isEdit = True
isDelete = True
tHead = [_('App Type'), _('Tag Template/Begin'), _('Tag Template/End'), _('Library File'), _('DB Initial Path')]
data = AppExpand.query.order_by(db.asc(AppExpand.type)).all()
return render_template('registe_manage_app_extension.html', title=_('App Extension'),
tableName=_('App Extension List'),
isCheck=isCheck, isEdit=isEdit,
isDelete=isDelete, tHead=tHead, data=data)
@bp.route('/registe_manage_app_extension_add', methods=['GET', 'POST'])
@login_required
def registe_manage_app_extension_add():
form = AddAppExtensionForm(None)
if form.validate_on_submit():
upload = UploadSet()
if hasattr(form.library_file.data, 'filename'):
filename1 = secure_filename(form.library_file.data.filename)
filePath1 = os.path.join(os.path.join(current_app.config['UPLOAD_FOLDERS']['registe_manage_app_extension'],
'library'), filename1).replace('\\', '/')
form.library_file.data.save(filePath1)
else:
filePath1=''
if hasattr(form.library_file_depend.data, 'filename'):
filename2 = secure_filename(form.library_file_depend.data.filename)
filePath2 = os.path.join(os.path.join(current_app.config['UPLOAD_FOLDERS']['registe_manage_app_extension'],
'library_depend'), filename2).replace('\\', '/')
form.library_file_depend.data.save(filePath2)
else:
filePath2 = ''
db.session.add(AppExpand(id=None, type=form.app_type.data, pattern_begin=form.tag_begin.data,
pattern_end=form.tag_end.data, library_path=filePath1,
library_depend_path=filePath2,
library_desc=form.library_file_description.data,
db_init_path=form.db_info_file_path.data))
db.session.commit()
flash(_('New app extension have been added.'))
return redirect(url_for('main.registe_manage_app_extension'))
elif request.method == 'GET':
pass
return render_template('registe_manage_app_extension.html', title=_('App Extension'),
tableName=_('Add New App Extension'), AppAdmin=AppAdmin, form=form,
addTitle=('Add New App Extension'))
@bp.route('/registe_manage_app_extension_delete/<id>', methods=['GET', 'POST'])
@login_required
def registe_manage_app_extension_delete(id):
if request.method == 'GET':
session['current_delete_id'] = id
else:
data = request.get_json()
name = data.get('name')
if name == 'execute':
current_data = AppExpand.query.filter(AppExpand.id == session['current_delete_id']).first()
db.session.delete(current_data)
db.session.commit()
flash(_('Record have been deleted.'))
return jsonify({'result':'success'})
isCheck = True
isEdit = True
isDelete = True
tHead = [_('App Type'), _('Tag Template/Begin'), _('Tag Template/End'), _('Library File'), _('DB Initial Path')]
data = AppExpand.query.all()
confirmTitle = 'Confirm your choice:'
confirmMessage = 'Do you want to delete this record?'
return render_template('registe_manage_app_extension.html', title=_('App Extension'),
tableName=_('App Extension List'),
isCheck=isCheck, isEdit=isEdit,
isDelete=isDelete, tHead=tHead, data=data,
confirmTitle=confirmTitle, confirmMessage=confirmMessage)
@bp.route('/registe_manage_app_extension_delete_select', methods=['GET', 'POST'])
@login_required
def registe_manage_app_extension_delete_select():
flash(_('Batch delete operation are not allowed now.'))
return redirect(url_for('main.registe_manage_app_extension'))
@bp.route('/registe_manage_app_extension_edit/<id>', methods=['GET', 'POST'])
@login_required
def registe_manage_app_extension_edit(id):
if session.get('validate_app_type'):
form = EditAppExtensionForm(session['validate_app_type'])
else:
form = EditAppExtensionForm(None)
if form.validate_on_submit():
current_data = AppExpand.query.filter(AppExpand.id == id).first()
current_data.type = form.app_type.data
current_data.pattern_begin = form.tag_begin.data
current_data.pattern_end = form.tag_end.data
current_data.library_desc = form.library_file_description.data
current_data.db_init_path = form.db_info_file_path.data
# print(form.library_file.data == '')
# print(form.library_file.data)
form.library_file.description = _('Selected File: ') + os.path.basename(current_data.library_path)
form.library_file_depend.description = _('Selected File: ') + os.path.basename(current_data.library_depend_path)
if hasattr(form.library_file.data, 'filename'):
filename1 = secure_filename(form.library_file.data.filename)
filePath1 = os.path.join(os.path.join(current_app.config['UPLOAD_FOLDERS']['registe_manage_app_extension'],
'library'), filename1).replace('\\', '/')
form.library_file.data.save(filePath1)
current_data.library_path = filePath1
if hasattr(form.library_file_depend.data, 'filename'):
filename2 = secure_filename(form.library_file_depend.data.filename)
filePath2 = os.path.join(os.path.join(current_app.config['UPLOAD_FOLDERS']['registe_manage_app_extension'],
'library_depend'), filename2).replace('\\', '/')
form.library_file_depend.data.save(filePath2)
current_data.library_depend_path = filePath2
db.session.commit()
flash(_('App have been edited.'))
return redirect(url_for('main.registe_manage_app_extension'))
elif request.method == 'GET':
current_data = AppExpand.query.filter(AppExpand.id == id).first()
form.app_type.data =current_data.type
form.tag_begin.data = current_data.pattern_begin
form.tag_end.data = current_data.pattern_end
form.library_file.description = _('Selected File: ') + os.path.basename(current_data.library_path)
form.library_file_depend.description = _('Selected File: ') + os.path.basename(current_data.library_depend_path)
form.library_file_description.data = current_data.library_desc
form.db_info_file_path.data = current_data.db_init_path
session['validate_app_type'] = form.app_type.data
return render_template('registe_manage_app_extension.html', title=_('App Extension'),
tableName=_('Edit App Extension'), AppAdmin=AppAdmin, form=form,
editTitle=('Edit App Extension'))
# ---------------------------------------------------------------------------------------
# registe manage app manager setting
# ---------------------------------------------------------------------------------------
@bp.route('/registe_manage_app_manager_setting')
@login_required
def registe_manage_app_manager_setting():
isCheck = True
isEdit = True
isDelete = True
tHead = [_('App Manager Name'), _('App Name')]
data = {}
preData = AppAdmin.query.all()
for p in preData:
managerName = p.name
for temp in AdminToApp.query.filter(AdminToApp.app_admin_id == p.id):
appName = App.query.filter(App.id == temp.app_id).first().name
if data.get(managerName):
data[managerName]['name'].append(appName)
else:
data[managerName] = {}
data[managerName]['id'] = p.id
data[managerName]['name'] = []
data[managerName]['name'].append(appName)
if not data.get(managerName):
data[managerName] = {}
data[managerName]['id'] = p.id
data[managerName]['name'] = ''
else:
data[managerName]['name'].sort()
data[managerName]['name'] = '; '.join(data[managerName]['name'])
data['sort'] = list(data.keys())
data['sort'].sort()
return render_template('registe_manage_app_manager_setting.html', title=_('App Manager Setting'),
tableName=_('App Manager List'),
isCheck=isCheck, isEdit=isEdit,
isDelete=isDelete, tHead=tHead, data=data)
@bp.route('/registe_manage_app_manager_setting_add', methods=['GET', 'POST'])
@login_required
def registe_manage_app_manager_setting_add():
form = AddAppAdminForm(None)
if form.validate_on_submit():
db.session.add(AppAdmin(id=None, name=form.app_admin_name.data,
password=generate_password_hash(form.app_admin_password.data)))
db.session.commit()
app_admin_id = AppAdmin.query.filter(AppAdmin.name == form.app_admin_name.data).first().id
for app_name in form.app_list.data:
app_id = App.query.filter(App.name == app_name).first().id
db.session.add(AdminToApp(id=None, app_admin_id=app_admin_id, app_id=app_id))
db.session.commit()
flash(_('New app manager have been added.'))
return redirect(url_for('main.registe_manage_app_manager_setting'))
elif request.method == 'GET':
pass
return render_template('registe_manage_app_manager_setting.html', title=_('App Manager Setting'),
tableName=_('Add New App Manager'), AppAdmin=AppAdmin, form=form,
addTitle=('Add New App Manager'))
@bp.route('/registe_manage_app_manager_setting_delete/<id>', methods=['GET', 'POST'])
@login_required
def registe_manage_app_manager_setting_delete(id):
if request.method == 'GET':
session['current_delete_id'] = id
else:
data = request.get_json()
name = data.get('name')
if name == 'execute':
current_data = AppAdmin.query.filter(AppAdmin.id == session['current_delete_id']).first()
for removeAdminToApp in AdminToApp.query.filter(AdminToApp.app_admin_id==current_data.id).all():
db.session.delete(removeAdminToApp)
db.session.delete(current_data)
db.session.commit()
flash(_('Record have been deleted.'))
return jsonify({'result':'success'})
isCheck = True
isEdit = True
isDelete = True
tHead = [_('App Manager Name'), _('App Name')]
data = {}
preData = AppAdmin.query.all()
for p in preData:
managerName = p.name
for temp in AdminToApp.query.filter(AdminToApp.app_admin_id == p.id):
appName = App.query.filter(App.id == temp.app_id).first().name
if data.get(managerName):
data[managerName]['name'].append(appName)
else:
data[managerName] = {}
data[managerName]['id'] = p.id
data[managerName]['name'] = []
data[managerName]['name'].append(appName)
if not data.get(managerName):
data[managerName] = {}
data[managerName]['id'] = p.id
data[managerName]['name'] = ''
else:
data[managerName]['name'].sort()
data[managerName]['name'] = '; '.join(data[managerName]['name'])
data['sort'] = list(data.keys())
data['sort'].sort()
confirmTitle = 'Confirm your choice:'
confirmMessage = 'Do you want to delete this record?'
return render_template('registe_manage_app_manager_setting.html', title=_('App Manager Setting'),
tableName=_('App Manager List'),
isCheck=isCheck, isEdit=isEdit,
isDelete=isDelete, tHead=tHead, data=data,
confirmTitle=confirmTitle, confirmMessage=confirmMessage)
@bp.route('/registe_manage_app_manager_setting_delete_select', methods=['GET', 'POST'])
@login_required
def registe_manage_app_manager_setting_delete_select():
flash(_('Batch delete operation are not allowed now.'))
return redirect(url_for('main.registe_manage_app_manager_setting'))
@bp.route('/registe_manage_app_manager_setting_edit/<id>', methods=['GET', 'POST'])
@login_required
def registe_manage_app_manager_setting_edit(id):
if session.get('validate_app_admin_name'):
form = AddAppAdminForm(session['validate_app_admin_name'])
else:
form = AddAppAdminForm(None)
if form.validate_on_submit():
old_app_list = session['old_app_list'] if session.get('old_app_list') else []
new_app_list = form.app_list.data
add_app_list = [a for a in new_app_list if a not in old_app_list]
remove_app_list = [a for a in old_app_list if a not in new_app_list]
current_data = AppAdmin.query.filter(AppAdmin.id == id).first()
current_data.name = form.app_admin_name.data
if not form.app_admin_password.data.strip() == '':
current_data.password = generate_password_hash(form.app_admin_password.data)
for a in add_app_list:
add_app_id = App.query.filter(App.name == a).first().id
db.session.add(AdminToApp(id=None, app_admin_id=id, app_id=add_app_id))
for a in remove_app_list:
remove_app_id = App.query.filter(App.name == a).first().id
removeAdminToApp = AdminToApp.query.filter(AdminToApp.app_admin_id==id, AdminToApp.app_id==remove_app_id).first()
db.session.delete(removeAdminToApp)
db.session.commit()
flash(_('App Admin have been edited.'))
return redirect(url_for('main.registe_manage_app_manager_setting'))
elif request.method == 'GET':
current_data = AppAdmin.query.filter(AppAdmin.id == id).first()
app_list = [a.app_id for a in AdminToApp.query.filter(AdminToApp.app_admin_id == id)]
app_name_list = [App.query.filter(App.id == a).first().name for a in app_list]
form.app_admin_name.data = current_data.name
form.app_list.data = app_name_list
session['validate_app_admin_name'] = form.app_admin_name.data
session['old_app_list'] = app_name_list
return render_template('registe_manage_app_manager_setting.html', title=_('App Manager Setting'),
tableName=_('Edit App Manager'), AppAdmin=AppAdmin, form=form,
editTitle=('Edit App Manager'))
# ---------------------------------------------------------------------------------------
# registe manage app tenant setting
# ---------------------------------------------------------------------------------------
@bp.route('/registe_manage_app_tenant_setting')
@login_required
def registe_manage_app_tenant_setting():
isCheck = True
isEdit = True
isDelete = True
tHead = [_('App Tenant Name'), _('App Tenant ID'), _('App Name')]
data = Tenant.query.order_by(db.asc(Tenant.name)).all()
return render_template('registe_manage_app_tenant_setting.html', title=_('App Tenant Setting'),
tableName=_('App Tenant List'), App=App,
isCheck=isCheck, isEdit=isEdit,
isDelete=isDelete, tHead=tHead, data=data)
@bp.route('/registe_manage_app_tenant_setting_add', methods=['GET', 'POST'])
@login_required
def registe_manage_app_tenant_setting_add():
form = AddTenantForm(None)
if form.validate_on_submit():
app_id = App.query.filter(App.name == form.app_list.data).first().id
db.session.add(Tenant(id=None, name=form.tenant_name.data,
password=generate_password_hash(form.tenant_password.data),
tenantid=hashlib.md5(form.tenant_name.data.encode(encoding='UTF-8')).hexdigest(),
app_id=app_id))
db.session.commit()
flash(_('New Tenant have been added.'))
return redirect(url_for('main.registe_manage_app_tenant_setting'))
elif request.method == 'GET':
pass
return render_template('registe_manage_app_tenant_setting.html', title=_('App Tenant Setting'),
tableName=_('Add New App Tenant'), form=form,
addTitle=('Add New App Tenant'))
@bp.route('/registe_manage_app_tenant_setting_delete/<id>', methods=['GET', 'POST'])
@login_required
def registe_manage_app_tenant_setting_delete(id):
if request.method == 'GET':
session['current_delete_id'] = id
else:
data = request.get_json()
name = data.get('name')
if name == 'execute':
current_data = Tenant.query.filter(Tenant.id == session['current_delete_id']).first()
db.session.delete(current_data)
db.session.commit()
flash(_('Record have been deleted.'))
return jsonify({'result':'success'})
isCheck = True
isEdit = True
isDelete = True
tHead = [_('App Tenant Name'), _('App Tenant ID'), _('App Name')]
data = Tenant.query.order_by(db.asc(Tenant.name)).all()
confirmTitle = 'Confirm your choice:'
confirmMessage = 'Do you want to delete this record?'
return render_template('registe_manage_app_tenant_setting.html', title=_('App Tenant Setting'),
tableName=_('App Tenant List'), App=App,
isCheck=isCheck, isEdit=isEdit,
isDelete=isDelete, tHead=tHead, data=data,
confirmTitle=confirmTitle, confirmMessage=confirmMessage)
@bp.route('/registe_manage_app_tenant_setting_delete_select', methods=['GET', 'POST'])
@login_required
def registe_manage_app_tenant_setting_delete_select():
flash(_('Batch delete operation are not allowed now.'))
return redirect(url_for('main.registe_manage_app_manager_setting'))
@bp.route('/registe_manage_app_tenant_setting_edit/<id>', methods=['GET', 'POST'])
@login_required
def registe_manage_app_tenant_setting_edit(id):
if session.get('validate_app_tenant_name'):
form = AddTenantForm(session['validate_app_tenant_name'])
else:
form = AddTenantForm(None)
if form.validate_on_submit():
current_data = Tenant.query.filter(Tenant.id == id).first()
current_data.name = form.tenant_name.data
if not form.tenant_password.data.strip() == '':
current_data.password = generate_password_hash(form.tenant_password.data)
app_id = App.query.filter(App.name == form.app_list.data).first().id
current_data.app_id = app_id
db.session.commit()
flash(_('App Tenant have been edited.'))
return redirect(url_for('main.registe_manage_app_tenant_setting'))
elif request.method == 'GET':
current_data = Tenant.query.filter(Tenant.id == id).first()
app_name = App.query.filter(App.id == current_data.app_id).first().name
form.tenant_name.data = current_data.name
form.app_list.data = app_name
form.tenant_id.data = current_data.tenantid
session['validate_app_tenant_name'] = form.tenant_name.data
return render_template('registe_manage_app_tenant_setting.html', title=_('App Tenant Setting'),
tableName=_('Edit App Tenant'), form=form,
editTitle=('Edit App Tenant'))
# ---------------------------------------------------------------------------------------
# app manage change current app
# ---------------------------------------------------------------------------------------
@bp.route('/app_manage_set_current_app', methods=['GET', 'POST'])
@login_required
def app_manage_set_current_app():
if request.method == 'POST':
data = request.get_json()
name = data.get('name')
current_data = App.query.filter(App.name == name).first()
if current_data:
session['current_selected_app_id'] = current_data.id
session['current_selected_app_name'] = current_data.name
flash(_('Switch current app success!'))
return jsonify({'result':'success'})
def get_app_name_list():
app_list = [a.app_id for a in AdminToApp.query.filter(
AdminToApp.app_admin_id == session['current_app_manager_id']).all()]
data = [App.query.filter(App.id == a).order_by(db.asc(App.name)).first() for a in app_list]
data.sort(key=lambda a: a.name)
app_name_list = [a.name for a in data]
return app_name_list
def get_current_selected_app_name():
current_selected_app_name = None
if session.get('current_selected_app_name'):
current_selected_app_name = session['current_selected_app_name']
return current_selected_app_name
# ---------------------------------------------------------------------------------------
# app manage app list
# ---------------------------------------------------------------------------------------
@bp.route('/app_manage_app_list')
@login_required
def app_manage_app_list():
isCheck = True
tHead = [_('App Name'), _('App ID'), _('Creator')]
app_list = [ a.app_id for a in AdminToApp.query.filter(
AdminToApp.app_admin_id == session['current_app_manager_id']).all()]
data = [ App.query.filter(App.id == a).order_by(db.asc(App.name)).first() for a in app_list]
data.sort(key=lambda a: a.name)
return render_template('app_manage_app_list.html', title=_('App List'),
tableName=_('App List'), AppAdmin=AppAdmin, app_name_list=get_app_name_list(),
current_selected_app_name=get_current_selected_app_name(),
isCheck=isCheck, tHead=tHead, data=data)
# ---------------------------------------------------------------------------------------
# app manage code configure
# ---------------------------------------------------------------------------------------
@bp.route('/app_manage_function_configure')
@login_required
def app_manage_function_configure():
isCheck = True
isEdit = True
isDelete = True
session['is_delete'] = 'false'
tHead = [_('App Name'), _('App ID'), _('Creator')]
data = App.query.order_by(db.asc(App.name)).all()
return render_template('app_manage_function_configure.html', title=_('Online Function'),
tableName=_('Function Configure'), app_name_list=get_app_name_list(),
current_selected_app_name=get_current_selected_app_name(),
isCheck=isCheck, isEdit=isEdit,
isDelete=isDelete, tHead=tHead, data=data)
@bp.route('/app_manage_function_configure_test')
@login_required
def app_manage_function_configure_test():
testFunc()
isCheck = True
isEdit = True
isDelete = True
session['is_delete'] = 'false'
tHead = [_('App Name'), _('App ID'), _('Creator')]
data = App.query.order_by(db.asc(App.name)).all()
return render_template('app_manage_function_configure.html', title=_('Online Function'),
tableName=_('Function Configure'), app_name_list=get_app_name_list(),
current_selected_app_name=get_current_selected_app_name(),
isCheck=isCheck, isEdit=isEdit,
isDelete=isDelete, tHead=tHead, data=data)
def testFunc():
filePath = 'F:/test/main.html'
pattern = 'x;/<dd>.*API监控.*<\/dd>/{p;q};/<dd>.*<\/dd>/{x;h;d;ta};/<dd>.*/{x;H;ta};{x;h;d};:a'
tag_begin = '{if.role_APIguanli}'
tag_end = '{end}'
args = 'cat -n %s | sed -n "%s" | { eval $(awk \'NR==1{print "a="$1} END {print "b="$1}\');'\
'sed -e "$a i %s" -e "$b a %s" %s;} > F:/test/test.txt' % (filePath, pattern, tag_begin, tag_end, filePath)
shell_file = open('F:/test/temp.sh', 'w', encoding='utf-8')
shell_file.write(args)
shell_file.flush()
shell_file.close()
exec_path = "D:\Program Files\Git\git-bash.exe"
print(args)
(status, output) = subprocess.getstatusoutput([exec_path, 'F:/test/temp.sh'])
print(status, output)
@bp.route('/get_file_path/<tag>', methods=['GET', 'POST'])
@login_required
def get_file_path(tag):
app_id = App.query.filter(App.id == session['current_selected_app_id']).first().appid
if tag =='version2package.json' or tag == 'package2function.json':
filePath = os.path.join(os.path.join(
current_app.config['UPLOAD_FOLDERS']['app_manage_function_configure'], app_id), tag)
if os.path.isfile(filePath):
filePath = os.path.join(os.path.join(
current_app.config['UPLOAD_FOLDERS']['app_manage_function_configure_html'], app_id), tag)
return jsonify({'result':'success', 'filePath': filePath})
return jsonify({'result': 'fail', 'filePath': False})
@bp.route('/app_manage_init_file/<tag>', methods=['GET', 'POST'])
@login_required
def app_manage_init_file(tag):
app_id = App.query.filter(App.id == session['current_selected_app_id']).first().appid
filePath = os.path.join(
current_app.config['UPLOAD_FOLDERS']['app_manage_function_configure'], app_id)
if not os.path.isdir(filePath):
os.makedirs(filePath)
initJson = [
{
"data": {
"file_path": "",
"item_pattern": ""
},
"id": "Root",
"parent": "#",
"text": "Root"
}
]
if tag in ['version2package.json', 'package2function.json']:
try:
new_file = open(os.path.join(filePath, tag), 'w')
new_file.write(json.dumps(initJson))
new_file.close()
flash(_('File initial for %(tag)s success.', tag=tag))
except Exception as e:
print(e)
flash(_('File initial for %(tag)s failed.', tag=tag))
return jsonify({'result':'success'})
@bp.route('/app_manage_save_file', methods=['GET', 'POST'])
@login_required
def app_manage_save_file():
data = request.get_json()
app_id = App.query.filter(App.id == session['current_selected_app_id']).first().appid
filePath = os.path.join(
current_app.config['UPLOAD_FOLDERS']['app_manage_function_configure'], app_id)
if not os.path.isdir(filePath):
os.makedirs(filePath)
tag = data['tag']
new_json = json.loads(data['json'])
print(new_json)
if tag in ['version2package.json', 'package2function.json']:
try:
new_file = open(os.path.join(filePath, tag), 'w')
# new_file.write(json.dumps(new_json))
# json.dump(new_json, new_file, ensure_ascii=False, indent=4)
json.dump(new_json, new_file, indent=4)
new_file.close()
flash(_('File save for %(tag)s success.', tag=tag))
except Exception as e:
print(e)
flash(_('File save for %(tag)s failed.', tag=tag))
return jsonify({'result':'success'})
@bp.route('/app_manage_upload_file', methods=['GET', 'POST'])
@login_required
def app_manage_upload_file():
version_to_package_file = request.files['version_to_package_file']
package_to_function_file = request.files['package_to_function_file']
app_id = App.query.filter(App.id == session['current_selected_app_id']).first().appid
filePath = os.path.join(
current_app.config['UPLOAD_FOLDERS']['app_manage_function_configure'], app_id)
if not os.path.isdir(filePath):
os.makedirs(filePath)
version_to_package_file.save(os.path.join(filePath,'version2package.json'))
package_to_function_file.save(os.path.join(filePath, 'package2function.json'))
flash(_('Import success!'))
return jsonify({'result':'success'})
# ---------------------------------------------------------------------------------------
# app manage database configure
# ---------------------------------------------------------------------------------------
@bp.route('/app_manage_database_configure')
@login_required
def app_manage_database_configure():
isCheck = True
isEdit = True
isDelete = True
session['is_delete'] = 'false'
tHead = [_('Tenant'), _('Is System Extension'), _('Database'), _('IP'), _('Port')]
data = TenantDb.query.filter(TenantDb.app_id == session['current_selected_app_id']).order_by(db.asc(TenantDb.database)).all()
return render_template('app_manage_database_configure.html', title=_('Tenant Database List'),
tableName=_('Tenant Database List'), Tenant=Tenant, app_name_list=get_app_name_list(),
current_selected_app_name=get_current_selected_app_name(),
isCheck=isCheck, isEdit=isEdit,
isDelete=isDelete, tHead=tHead, data=data)
@bp.route('/app_manage_database_configure_add', methods=['GET', 'POST'])
@login_required
def app_manage_database_configure_add():
form = AddTenantDatabaseForm(None)
if form.validate_on_submit():
current_tenant_id = Tenant.query.filter(Tenant.name == form.tenant_name.data).first().id
current_type ='system' if form.system_extension.data == 'System Extension' else 'origin'
db.session.add(TenantDb(id=None, hostname=form.host_name.data, driver=form.database_driver.data,
username=form.user_name.data,
password=generate_password_hash(form.user_password.data),
database=form.database_name.data, port=form.database_port.data,
aliasname='_'.join([form.database_driver.data, form.database_name.data]),
type=current_type, tenant_id=current_tenant_id, app_id=session['current_selected_app_id']))
db.session.commit()
flash(_('New tenant database have been added.'))
return redirect(url_for('main.app_manage_database_configure'))
elif request.method == 'GET':
form.app_name.data = session['current_selected_app_name']
form.host_name.data = 'localhost'
form.database_port.data = '3306'
form.database_driver.data ='mysql'
form.user_name.data = 'root'
pass
return render_template('app_manage_database_configure.html', title=_('Tenant Database Configure'),
tableName=_('Add New Tenant Database'), form=form, app_name_list=get_app_name_list(),
current_selected_app_name=get_current_selected_app_name(),
addTitle=('Add New Tenant Database'))
@bp.route('/app_manage_database_configure_delete/<id>', methods=['GET', 'POST'])
@login_required
def app_manage_database_configure_delete(id):
if request.method == 'GET':
session['current_delete_id'] = id
else:
data = request.get_json()
name = data.get('name')
if name == 'execute':
current_data = TenantDb.query.filter(TenantDb.id == session['current_delete_id']).first()
db.session.delete(current_data)
db.session.commit()
flash(_('Record have been deleted.'))
return jsonify({'result':'success'})
isCheck = True
isEdit = True
isDelete = True
session['is_delete'] = 'false'
tHead = [_('Tenant'), _('Is System Extension'), _('Database'), _('IP'), _('Port')]
data = TenantDb.query.filter(TenantDb.app_id == session['current_selected_app_id']).order_by(
db.asc(TenantDb.username)).all()
confirmTitle = 'Confirm your choice:'
confirmMessage = 'Do you want to delete this record?'
return render_template('app_manage_database_configure.html', title=_('Tenant Database List'),
tableName=_('Tenant Database List'), Tenant=Tenant, app_name_list=get_app_name_list(),
current_selected_app_name=get_current_selected_app_name(),
isCheck=isCheck, isEdit=isEdit,
isDelete=isDelete, tHead=tHead, data=data,
confirmTitle=confirmTitle, confirmMessage=confirmMessage)
@bp.route('/app_manage_database_configure_delete_select', methods=['GET', 'POST'])
@login_required
def app_manage_database_configure_delete_select():
flash(_('Batch delete operation are not allowed now.'))
return redirect(url_for('main.app_manage_database_configure'))
@bp.route('/app_manage_database_configure_edit/<id>', methods=['GET', 'POST'])
@login_required
def app_manage_database_configure_edit(id):
if session.get('validate_alias_name'):
form = EditTenantDatabaseForm(session['validate_alias_name'])
else:
form = EditTenantDatabaseForm(None)
if form.validate_on_submit():
current_data = TenantDb.query.filter(TenantDb.id == id).first()
current_data.hostname = form.host_name.data
current_data.driver = form.database_driver.data
current_data.username = form.user_name.data
current_data.database = form.database_name.data
current_data.port = form.database_port.data
current_data.aliasname = '_'.join([form.database_driver.data, form.database_name.data])
current_data.type ='system' if form.system_extension.data == 'System Extension' else 'origin'
current_data.tenant_id = Tenant.query.filter(Tenant.name == form.tenant_name.data).first().id
current_data.app_id = session['current_selected_app_id']
if not form.user_password.data.strip() == '':
current_data.password = generate_password_hash(form.user_password.data)
db.session.commit()
flash(_('Tenant Database have been edited.'))
return redirect(url_for('main.app_manage_database_configure'))
elif request.method == 'GET':
current_data = TenantDb.query.filter(TenantDb.id == id).first()
form.app_name.data = session['current_selected_app_name']
form.host_name.data = current_data.hostname
form.database_port.data = current_data.port
form.system_extension.data = 'System Extension' if current_data.type =='system' else 'Not System Extension'
form.database_driver.data = current_data.driver
form.database_name.data = current_data.database
form.user_name.data = current_data.username
form.user_password.description = 'In edit mode, set null in this field means no modification for current
|
8b011e89ce886f13558ab292073393f5329edff0
|
Python
|
<|begin_of_text|>from RFEM.initModel import *
from RFEM.enums import *
class MemberSetLoad():
def __init__(self,
no: int = 1,
load_case_no: int = 1,
member_sets: str = '1',
load_direction = LoadDirectionType.LOAD_DIRECTION_LOCAL_Z,
magnitude: float = 0,
comment: str = '',
params: dict = {}):
"""
Args:
no (int): Load Tag
load_case_no (int): Assigned Load Case
member_sets (str): Assigned Member Sets
load_direction (enum): Load Case Enumeration
magnitude (float): Load Magnitude
comment (str, optional): Comments
params (dict, optional): Parameters
"""
# Client model | Member Load
clientObject = clientModel.factory.create('ns0:member_set_load')
# Clears object atributes | Sets all atributes to None
clearAtributes(clientObject)
# Member Load No.
clientObject.no = no
# Load Case No.
clientObject.load_case = load_case_no
# Member Sets No. (e.g. '5 6 7 12')
clientObject.member_sets = ConvertToDlString(member_sets)
# Member Load Type
load_type = MemberSetLoadType.LOAD_TYPE_FORCE
clientObject.load_type = load_type.name
# Member Load Distribution
load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_UNIFORM
clientObject.load_distribution = load_distribution.name
# Member Load Direction
clientObject.load_direction = load_direction.name
#Load Magnitude
clientObject.magnitude = magnitude
# Comment
clientObject.comment = comment
# Adding optional parameters via dictionary
for key in params:
clientObject[key] = params[key]
# Add Load Member Load to client model
clientModel.service.set_member_set_load(load_case_no, clientObject)
def Force(self,
no: int = 1,
load_case_no: int = 1,
member_sets: str = '1',
load_distribution= MemberSetLoadDistribution.LOAD_DISTRIBUTION_UNIFORM,
load_direction= MemberSetLoadDirection.LOAD_DIRECTION_LOCAL_Z,
load_parameter = [],
force_eccentricity: bool= False,
comment: str = '',
params: dict = {}):
"""
Args:
no (int): Load Tag
load_case_no (int): Assigned Load Case
member_sets (str): Assigned Member Sets
load_distribution (enum): Load Distribution Enumeration
load_direction (enum): Load Direction Enumeration
load_parameter (list): Load Parameter
force_eccentricity (bool): Force Eccentricity Option
comment (str, optional): Comments
params (dict, optional): Parameters
for LOAD_DISTRIBUTION_UNIFORM:
load_parameter = [magnitude]
for LOAD_DISTRIBUTION_UNIFORM_TOTAL:
load_parameter = [magnitude]
for LOAD_DISTRIBUTION_CONCENTRATED_1:
load_parameter = [relative_distance = False, magnitude, distance_a]
for LOAD_DISTRIBUTION_CONCENTRATED_N:
load_parameter = [relative_distance_a = False, relative_distance_b = False, magnitude, count_n, distance_a, distance_b]
for LOAD_DISTRIBUTION_CONCENTRATED_2x2:
load_parameter = [relative_distance_a = False, relative_distance_b = False, relative_distance_c = False, magnitude, distance_a, distance_b, distance_c]
for LOAD_DISTRIBUTION_CONCENTRATED_2:
load_parameter = [relative_distance_a = False, relative_distance_b = False, magnitude_1, magnitude_2, distance_a, distance_b]
for LOAD_DISTRIBUTION_CONCENTRATED_VARYING:
load_parameter = [[distance, delta_distance, magnitude],...]
for LOAD_DISTRIBUTION_TRAPEZOIDAL:
load_parameter = [relative_distance_a = False, relative_distance_b = False,magnitude_1, magnitude_2, distance_a, distance_b]
for LOAD_DISTRIBUTION_TAPERED:
load_parameter = [relative_distance_a = False, relative_distance_b = False,magnitude_1, magnitude_2, distance_a, distance_b]
for LOAD_DISTRIBUTION_PARABOLIC:
load_parameter = [magnitude_1, magnitude_2, magnitude_3]
for LOAD_DISTRIBUTION_VARYING:
load_parameter = [[distance, delta_distance, magnitude],...]
for LOAD_DISTRIBUTION_VARYING_IN_Z:
load_parameter = [[distance, delta_distance, magnitude],...]
params:
{'eccentricity_horizontal_alignment': MemberSetLoadEccentricityHorizontalAlignment.ALIGN_NONE,
'eccentricity_vertical_alignment': MemberSetLoadEccentricityVerticalAlignment.ALIGN_NONE,
'eccentricity_section_middle': MemberSetLoadEccentricitySectionMiddle.LOAD_ECCENTRICITY_SECTION_MIDDLE_CENTER_OF_GRAVITY,
'is_eccentricity_at_end_different_from_start': False,
'eccentricity_y_at_end': 0.0,
'eccentricity_y_at_start': 0.0,
'eccentricity_z_at_end': 0.0,
'eccentricity_z_at_start': 0.0}
"""
# Client model | Member Load
clientObject = clientModel.factory.create('ns0:member_set_load')
# Clears object atributes | Sets all atributes to None
clearAtributes(clientObject)
# Member Load No.
clientObject.no = no
# Load Case No.
clientObject.load_case = load_case_no
# Members No. (e.g. '5 6 7 12')
clientObject.member_sets = ConvertToDlString(member_sets)
# Member Load Type
load_type = MemberSetLoadType.LOAD_TYPE_FORCE
clientObject.load_type = load_type.name
# Member Load Distribution
clientObject.load_distribution= load_distribution.name
#Load Magnitude and Parameters
if load_parameter == []:
raise Exception("WARNING: Load parameter cannot be empty. Kindly check list inputs completeness and correctness.")
else:
if load_distribution.name == "LOAD_DISTRIBUTION_UNIFORM" or load_distribution.name == "LOAD_DISTRIBUTION_UNIFORM_TOTAL":
if len(load_parameter) == 1:
clientObject.magnitude = load_parameter[0]
else:
raise Exception("WARNING: Load parameter array length should be 1 for LOAD_DISTRIBUTION_UNIFORM. Kindly check list inputs completeness and correctness.")
elif load_distribution.name == "LOAD_DISTRIBUTION_CONCENTRATED_1":
if len(load_parameter) == 3:
clientObject.distance_a_is_defined_as_relative = load_parameter[0]
if load_parameter[0] == False:
clientObject.magnitude = load_parameter[1]
clientObject.distance_a_absolute = load_parameter[2]
else:
clientObject.magnitude = load_parameter[1]
clientObject.distance_a_relative = load_parameter[2]
else:
raise Exception("WARNING: Load parameter array length should be 3 for LOAD_DISTRIBUTION_CONCENTRATED_1. Kindly check list inputs completeness and correctness.")
elif load_distribution.name == "LOAD_DISTRIBUTION_CONCENTRATED_N":
if len(load_parameter) == 6:
clientObject.distance_a_is_defined_as_relative = load_parameter[0]
clientObject.distance_b_is_defined_as_relative = load_parameter[1]
clientObject.magnitude = load_parameter[2]
clientObject.count_n = load_parameter[3]
if load_parameter[0] == False:
clientObject.distance_a_absolute = load_parameter[4]
else:
clientObject.distance_a_relative = load_parameter[4]
if load_parameter[1] == False:
clientObject.distance_b_absolute = load_parameter[5]
else:
clientObject.distance_b_relative = load_parameter[5]
else:
raise Exception("WARNING: Load parameter array length should be 6 for LOAD_DISTRIBUTION_CONCENTRATED_N. Kindly check list inputs completeness and correctness.")
elif load_distribution.name == "LOAD_DISTRIBUTION_CONCENTRATED_2x2":
if len(load_parameter) == 7:
clientObject.distance_a_is_defined_as_relative = load_parameter[0]
clientObject.distance_b_is_defined_as_relative = load_parameter[1]
clientObject.distance_c_is_defined_as_relative = load_parameter[2]
clientObject.magnitude = load_parameter[3]
if load_parameter[0] == False:
clientObject.distance_a_absolute = load_parameter[4]
else:
clientObject.distance_a_relative = load_parameter[4]
if load_parameter[1] == False:
clientObject.distance_b_absolute = load_parameter[5]
else:
clientObject.distance_b_relative = load_parameter[5]
if load_parameter[2] == False:
clientObject.distance_c_absolute = load_parameter[6]
else:
clientObject.distance_c_relative = load_parameter[6]
else:
raise Exception("WARNING: Load parameter array length should be 7 for LOAD_DISTRIBUTION_CONCENTRATED_N. Kindly check list inputs completeness and correctness.")
elif load_distribution.name == "LOAD_DISTRIBUTION_CONCENTRATED_2":
if len(load_parameter) == 6:
clientObject.distance_a_is_defined_as_relative = load_parameter[0]
clientObject.distance_b_is_defined_as_relative = load_parameter[1]
clientObject.magnitude_1 = load_parameter[2]
clientObject.magnitude_2 = load_parameter[3]
if load_parameter[0] == False:
clientObject.distance_a_absolute = load_parameter[4]
else:
clientObject.distance_a_relative = load_parameter[4]
if load_parameter[1] == False:
clientObject.distance_b_absolute = load_parameter[5]
else:
clientObject.distance_b_relative = load_parameter[5]
else:
raise Exception("WARNING: Load parameter array length should be 6 for LOAD_DISTRIBUTION_CONCENTRATED_2. Kindly check list inputs completeness and correctness.")
elif load_distribution.name == "LOAD_DISTRIBUTION_CONCENTRATED_VARYING":
try:
len(load_parameter[0])==3
except:
print("WARNING: MemberSetLoad no: %x, load case: %x - Wrong data input." % (no, load_case_no))
clientObject.varying_load_parameters = clientModel.factory.create('ns0:member_set_load.varying_load_parameters')
for i in range(len(load_parameter)):
mlvlp = clientModel.factory.create('ns0:member_set_load_varying_load_parameters')
mlvlp.no = i+1
mlvlp.distance = load_parameter[i][0]
mlvlp.delta_distance = load_parameter[i][1]
mlvlp.magnitude = load_parameter[i][2]
mlvlp.note = None
mlvlp.magnitude_t_c = 0.0
mlvlp.magnitude_delta_t = 0.0
mlvlp.magnitude_t_t = 0.0
mlvlp.magnitude_t_b = 0.0
clientObject.varying_load_parameters.member_set_load_varying_load_parameters.append(mlvlp)
elif load_distribution.name == "LOAD_DISTRIBUTION_TRAPEZOIDAL":
if len(load_parameter) == 6:
clientObject.distance_a_is_defined_as_relative = load_parameter[0]
clientObject.distance_b_is_defined_as_relative = load_parameter[1]
clientObject.magnitude_1 = load_parameter[2]
clientObject.magnitude_2 = load_parameter[3]
if load_parameter[0] == False:
clientObject.distance_a_absolute = load_parameter[4]
else:
clientObject.distance_a_relative = load_parameter[4]
if load_parameter[1] == False:
clientObject.distance_b_absolute = load_parameter[5]
else:
clientObject.distance_b_relative = load_parameter[5]
else:
raise Exception("WARNING: Load parameter array length should be 6 for LOAD_DISTRIBUTION_TRAPEZOIDAL. Kindly check list inputs completeness and correctness.")
elif load_distribution.name == "LOAD_DISTRIBUTION_TAPERED":
if len(load_parameter)==6:
clientObject.distance_a_is_defined_as_relative = load_parameter[0]
clientObject.distance_b_is_defined_as_relative = load_parameter[1]
clientObject.magnitude_1 = load_parameter[2]
clientObject.magnitude_2 = load_parameter[3]
if load_parameter[0] == False:
clientObject.distance_a_absolute = load_parameter[4]
else:
clientObject.distance_a_relative = load_parameter[4]
if load_parameter[1] == False:
clientObject.distance_b_absolute = load_parameter[5]
else:
clientObject.distance_b_relative = load_parameter[5]
else:
raise Exception("WARNING: Load parameter array length should be 6 for LOAD_DISTRIBUTION_TAPERED. Kindly check list inputs completeness and correctness.")
elif load_distribution.name == "LOAD_DISTRIBUTION_PARABOLIC":
if len(load_parameter)==3:
clientObject.magnitude_1 = load_parameter[0]
clientObject.magnitude_2 = load_parameter[1]
clientObject.magnitude_3 = load_parameter[2]
else:
raise Exception("WARNING: Load parameter array length should be 3 for LOAD_DISTRIBUTION_PARABOLIC. Kindly check list inputs completeness and correctness.")
elif load_distribution.name == "LOAD_DISTRIBUTION_VARYING":
try:
len(load_parameter[0])==3
except:
print("WARNING: MemberSetLoad no: %x, load case: %x - Wrong data input." % (no, load_case_no))
clientObject.varying_load_parameters = clientModel.factory.create('ns0:member_set_load.varying_load_parameters')
for i in range(len(load_parameter)):
mlvlp = clientModel.factory.create('ns0:member_set_load_varying_load_parameters')
mlvlp.no = i+1
mlvlp.distance = load_parameter[i][0]
mlvlp.delta_distance = load_parameter[i][1]
mlvlp.magnitude = load_parameter[i][2]
mlvlp.note = None
mlvlp.magnitude_t_c = 0.0
mlvlp.magnitude_delta_t = 0.0
mlvlp.magnitude_t_t = 0.0
mlvlp.magnitude_t_b = 0.0
clientObject.varying_load_parameters.member_set_load_varying_load_parameters.append(mlvlp)
elif load_distribution.name == "LOAD_DISTRIBUTION_VARYING_IN_Z":
try:
len(load_parameter[0])==3
except:
print("WARNING: MemberSetLoad no: %x, load case: %x - Wrong data input." % (no, load_case_no))
clientObject.varying_load_parameters = clientModel.factory.create('ns0:member_set_load.varying_load_parameters')
for i in range(len(load_parameter)):
mlvlp = clientModel.factory.create('ns0:member_set_load_varying_load_parameters')
mlvlp.no = i+1
mlvlp.distance = load_parameter[i][0]
mlvlp.delta_distance = load_parameter[i][1]
mlvlp.magnitude = load_parameter[i][2]
mlvlp.note = None
mlvlp.magnitude_t_c = 0.0
mlvlp.magnitude_delta_t = 0.0
mlvlp.magnitude_t_t = 0.0
mlvlp.magnitude_t_b = 0.0
clientObject.varying_load_parameters.member_set_load_varying_load_parameters.append(mlvlp)
# Member Load Direction
clientObject.load_direction = load_direction.name
#Force Eccentiricity
clientObject.has_force_eccentricity = force_eccentricity
if force_eccentricity == True:
if 'eccentricity_horizontal_alignment' and 'eccentricity_vertical_alignment' and 'eccentricity_section_middle' \
'is_eccentricity_at_end_different_from_start' and 'eccentricity_y_at_end' and 'eccentricity_y_at_start' \
'eccentricity_z_at_end' and 'eccentricity_z_at_start' in params:
pass
else:
raise Exception("WARNING: Params does not contain all the necessary parameters. Kindly check dictionary")
params_ecc = {'eccentricity_horizontal_alignment': MemberSetLoadEccentricityHorizontalAlignment.ALIGN_NONE,
'eccentricity_vertical_alignment': MemberSetLoadEccentricityVerticalAlignment.ALIGN_NONE,
'eccentricity_section_middle': MemberSetLoadEccentricitySectionMiddle.LOAD_ECCENTRICITY_SECTION_MIDDLE_CENTER_OF_GRAVITY,
'is_eccentricity_at_end_different_from_start': False,
'eccentricity_y_at_end': 0.0,
'eccentricity_y_at_start': 0.0,
'eccentricity_z_at_end': 0.0,
'eccentricity_z_at_start': 0.0}
params_ecc.update(params)
if params_ecc['is_eccentricity_at_end_different_from_start'] == False:
clientObject.eccentricity_horizontal_alignment= params_ecc['eccentricity_horizontal_alignment'].name
clientObject.eccentricity_vertical_alignment= params_ecc['eccentricity_vertical_alignment'].name
clientObject.eccentricity_section_middle = params_ecc['eccentricity_section_middle'].name
clientObject.eccentricity_y_at_end= params_ecc['eccentricity_y_at_start']
clientObject.eccentricity_y_at_start= params_ecc['eccentricity_y_at_start']
clientObject.eccentricity_z_at_end= params_ecc['eccentricity_z_at_start']
clientObject.eccentricity_z_at_start= params_ecc['eccentricity_z_at_start']
elif params_ecc['is_eccentricity_at_end_different_from_start'] == True:
clientObject.eccentricity_horizontal_alignment= params_ecc['eccentricity_horizontal_alignment']
clientObject.eccentricity_vertical_alignment= params_ecc['eccentricity_vertical_alignment']
clientObject.eccentricity_section_middle = params_ecc['eccentricity_section_middle']
clientObject.eccentricity_y_at_end= params_ecc['eccentricity_y_at_end']
clientObject.eccentricity_y_at_start= params_ecc['eccentricity_y_at_start']
clientObject.eccentricity_z_at_end= params_ecc['eccentricity_z_at_end']
clientObject.eccentricity_z_at_start= params_ecc['eccentricity_z_at_start']
# Comment
clientObject.comment = comment
# Adding optional parameters via dictionary
if 'eccentricity_horizontal_alignment' or 'eccentricity_vertical_alignment' or 'eccentricity_section_middle' or 'is_eccentricity_at_end_different_from_start' or 'eccentricity_y_at_end' or 'eccentricity_y_at_start' or 'eccentricity_z_at_end' or 'eccentricity_z_at_start':
pass
else:
for key in params:
clientObject[key] = params[key]
# Add Load Member Load to client model
clientModel.service.set_member_set_load(load_case_no, clientObject)
def Moment(self,
no: int = 1,
load_case_no: int = 1,
member_sets: str = '1',
load_distribution= MemberSetLoadDistribution.LOAD_DISTRIBUTION_UNIFORM,
load_direction= MemberSetLoadDirection.LOAD_DIRECTION_LOCAL_Z,
load_parameter = [],
comment: str = '',
params: dict = {}):
"""
Args:
no (int): Load Tag
load_case_no (int): Assigned Load Case
member_sets (str): Assigned Member Sets
load_distribution (enum): Load Distribution Enumeration
load_direction (enum): Load Direction Enumeration
load_parameter (list): Load Parameters
comment (str, optional): Comments
params (dict, optional): Parameters
for LOAD_DISTRIBUTION_UNIFORM:
load_parameter = magnitude
for LOAD_DISTRIBUTION_CONCENTRATED_1:
load_parameter = [relative_distance = False, magnitude, distance_a]
for LOAD_DISTRIBUTION_CONCENTRATED_N:
load_parameter = [relative_distance_a = False, relative_distance_b = False, magnitude, count_n, distance_a, distance_b]
for LOAD_DISTRIBUTION_CONCENTRATED_2x2:
load_parameter = [relative_distance_a = False, relative_distance_b = False, relative_distance_c = False, magnitude, distance_a, distance_b, distance_c]
for LOAD_DISTRIBUTION_CONCENTRATED_2:
load_parameter = [relative_distance_a = False, relative_distance_b = False, magnitude_1, magnitude_2, distance_a, distance_b]
for LOAD_DISTRIBUTION_CONCENTRATED_VARYING:
load_parameter = [[distance, delta_distance, magnitude],...]
for LOAD_DISTRIBUTION_TRAPEZOIDAL:
load_parameter = [relative_distance_a = False, relative_distance_b = False,magnitude_1, magnitude_2, distance_a, distance_b]
for LOAD_DISTRIBUTION_TAPERED:
load_parameter = [relative_distance_a = False, relative_distance_b = False,magnitude_1, magnitude_2, distance_a, distance_b]
for LOAD_DISTRIBUTION_PARABOLIC:
load_parameter = [magnitude_1, magnitude_2, magnitude_3]
for LOAD_DISTRIBUTION_VARYING:
load_parameter = [[distance, delta_distance, magnitude],...]
"""
# Client model | Member Load
clientObject = clientModel.factory.create('ns0:member_set_load')
# Clears object atributes | Sets all atributes to None
clearAtributes(clientObject)
# Member Load No.
clientObject.no = no
# Load Case No.
clientObject.load_case = load_case_no
# Members No. (e.g. '5 6 7 12')
clientObject.member_sets = ConvertToDlString(member_sets)
# Member Load Type
load_type = MemberSetLoadType.LOAD_TYPE_MOMENT
clientObject.load_type = load_type.name
# Member Load Distribution
clientObject.load_distribution= load_distribution.name
#Load Magnitude and Parameters
if load_distribution.name == "LOAD_DISTRIBUTION_UNIFORM":
try:
len(load_parameter)==1
except:
raise Exception("WARNING: Load parameter array length should be 1 for LOAD_DISTRIBUTION_UNIFORM. Kindly check list inputs completeness and correctness.")
clientObject.magnitude = load_parameter[0]
elif load_distribution.name == "LOAD_DISTRIBUTION_CONCENTRATED_1":
try:
len(load_parameter)==3
except:
raise Exception("WARNING: Load parameter array length should be 3 for LOAD_DISTRIBUTION_CONCENTRATED_1. Kindly check list inputs completeness and correctness.")
clientObject.distance_a_is_defined_as_relative = load_parameter[0]
if load_parameter[0] == False:
clientObject.magnitude = load_parameter[1]
clientObject.distance_a_absolute = load_parameter[2]
else:
clientObject.magnitude = load_parameter[1]
clientObject.distance_a_relative = load_parameter[2]
elif load_distribution.name == "LOAD_DISTRIBUTION_CONCENTRATED_N":
try:
len(load_parameter)==6
except:
raise Exception("WARNING: Load parameter array length should be 6 for LOAD_DISTRIBUTION_CONCENTRATED_N. Kindly check list inputs completeness and correctness.")
clientObject.distance_a_is_defined_as_relative = load_parameter[0]
clientObject.distance_b_is_defined_as_relative = load_parameter[1]
clientObject.magnitude = load_parameter[2]
clientObject.count_n = load_parameter[3]
if load_parameter[0] == False:
clientObject.distance_a_absolute = load_parameter[4]
else:
clientObject.distance_a_relative = load_parameter[4]
if load_parameter[1] == False:
clientObject.distance_b_absolute = load_parameter[5]
else:
clientObject.distance_b_relative = load_parameter[5]
elif load_distribution.name == "LOAD_DISTRIBUTION_CONCENTRATED_2x2":
try:
len(load_parameter)==7
except:
raise Exception("WARNING: Load parameter array length should be 7 for LOAD_DISTRIBUTION_CONCENTRATED_2x2. Kindly check list inputs completeness and correctness.")
clientObject.distance_a_is_defined_as_relative = load_parameter[0]
clientObject.distance_b_is_defined_as_relative = load_parameter[1]
clientObject.distance_c_is_defined_as_relative = load_parameter[2]
clientObject.magnitude = load_parameter[3]
if load_parameter[0] == False:
clientObject.distance_a_absolute = load_parameter[4]
else:
clientObject.distance_a_relative = load_parameter[4]
if load_parameter[1] == False:
clientObject.distance_b_absolute = load_parameter[5]
else:
clientObject.distance_b_relative = load_parameter[5]
if load_parameter[2] == False:
clientObject.distance_c_absolute = load_parameter[6]
else:
clientObject.distance_c_relative = load_parameter[6]
elif load_distribution.name == "LOAD_DISTRIBUTION_CONCENTRATED_2":
try:
len(load_parameter)==6
except:
raise Exception("WARNING: Load parameter array length should be 6 for LOAD_DISTRIBUTION_CONCENTRATED_2. Kindly check list inputs completeness and correctness.")
clientObject.distance_a_is_defined_as_relative = load_parameter[0]
clientObject.distance_b_is_defined_as_relative = load_parameter[1]
clientObject.magnitude_1 = load_parameter[2]
clientObject.magnitude_2 = load_parameter[3]
if load_parameter[0] == False:
clientObject.distance_a_absolute = load_parameter[4]
else:
clientObject.distance_a_relative = load_parameter[4]
if load_parameter[1] == False:
clientObject.distance_b_absolute = load_parameter[5]
else:
clientObject.distance_b_relative = load_parameter[5]
elif load_distribution.name == "LOAD_DISTRIBUTION_CONCENTRATED_VARYING":
try:
len(load_parameter[0])==3
except:
print("WARNING: MemberSetLoad no: %x, load case: %x - Wrong data input." % (no, load_case_no))
clientObject.varying_load_parameters = clientModel.factory.create('ns0:member_set_load.varying_load_parameters')
for i in range(len(load_parameter)):
mlvlp = clientModel.factory.create('ns0:member_set_load_varying_load_parameters')
mlvlp.no = i+1
mlvlp.distance = load_parameter[i][0]
mlvlp.delta_distance = load_parameter[i][1]
mlvlp.magnitude = load_parameter[i][2]
mlvlp.note = None
mlvlp.magnitude_t_c = 0.0
mlvlp.magnitude_delta_t = 0.0
mlvlp.magnitude_t_t = 0.0
mlvlp.magnitude_t_b = 0.0
clientObject.varying_load_parameters.member_set_load_varying_load_parameters.append(mlvlp)
elif load_distribution.name == "LOAD_DISTRIBUTION_TRAPEZOIDAL":
try:
len(load_parameter)==6
except:
raise Exception("WARNING: Load parameter array length should be 6 for LOAD_DISTRIBUTION_TRAPEZOIDAL. Kindly check list inputs completeness and correctness.")
clientObject.distance_a_is_defined_as_relative = load_parameter[0]
clientObject.distance_b_is_defined_as_relative = load_parameter[1]
clientObject.magnitude_1 = load_parameter[2]
clientObject.magnitude_2 = load_parameter[3]
if load_parameter[0] == False:
clientObject.distance_a_absolute = load_parameter[4]
else:
clientObject.distance_a_relative = load_parameter[4]
if load_parameter[1] == False:
clientObject.distance_b_absolute = load_parameter[5]
else:
clientObject.distance_b_relative = load_parameter[5]
elif load_distribution.name == "LOAD_DISTRIBUTION_TAPERED":
try:
len(load_parameter)==4
except:
raise Exception("WARNING: Load parameter array length should be 4 for LOAD_DISTRIBUTION_TAPERED. Kindly check list inputs completeness and correctness.")
clientObject.distance_a_is_defined_as_relative = load_parameter[0]
clientObject.distance_b_is_defined_as_relative = load_parameter[1]
clientObject.magnitude_1 = load_parameter[2]
clientObject.magnitude_2 = load_parameter[3]
if load_parameter[0] == False:
clientObject.distance_a_absolute = load_parameter[4]
else:
clientObject.distance_a_relative = load_parameter[4]
if load_parameter[1] == False:
clientObject.distance_b_absolute = load_parameter[5]
else:
clientObject.distance_b_relative = load_parameter[5]
elif load_distribution.name == "LOAD_DISTRIBUTION_PARABOLIC":
try:
len(load_parameter)==3
except:
raise Exception("WARNING: Load parameter array length should be 3 for LOAD_DISTRIBUTION_PARABOLIC. Kindly check list inputs completeness and correctness.")
clientObject.magnitude_1 = load_parameter[0]
clientObject.magnitude_2 = load_parameter[1]
clientObject.magnitude_3 = load_parameter[2]
elif load_distribution.name == "LOAD_DISTRIBUTION_VARYING":
try:
len(load_parameter[0])==3
except:
print("WARNING: MemberSetLoad no: %x, load case: %x - Wrong data input." % (no, load_case_no))
clientObject.varying_load_parameters = clientModel.factory.create('ns0:member_set_load.varying_load_parameters')
for i in range(len(load_parameter)):
mlvlp = clientModel.factory.create('ns0:member_set_load_varying_load_parameters')
mlvlp.no = i+1
mlvlp.distance = load_parameter[i][0]
mlvlp.delta_distance = load_parameter[i][1]
mlvlp.magnitude = load_parameter[i][2]
mlvlp.note = None
mlvlp.magnitude_t_c = 0.0
mlvlp.magnitude_delta_t = 0.0
mlvlp.magnitude_t_t = 0.0
mlvlp.magnitude_t_b = 0.0
clientObject.varying_load_parameters.member_set_load_varying_load_parameters.append(mlvlp)
# Member Load Direction
clientObject.load_direction = load_direction.name
# Comment
clientObject.comment = comment
# Adding optional parameters via dictionary
for key in params:
clientObject[key] = params[key]
# Add Load Member Load to client model
clientModel.service.set_member_set_load(load_case_no, clientObject)
def Mass(self,
no: int = 1,
load_case_no: int = 1,
member_sets: str = '1',
individual_mass_components: bool=False,
mass_components = [],
comment: str = '',
params: dict = {}):
"""
Args:
no (int): Load Tag
load_case_no (int): Assigned Load Case
member_sets (str): Assigned Member Sets
individual_mass_components (bool): Individiual Mass Components Option
mass_components (list): Mass Components
comment (str, optional): Comment
params (dict, optional): Parameters
"""
# Client model | Member Load
clientObject = clientModel.factory.create('ns0:member_set_load')
# Clears object atributes | Sets all atributes to None
clearAtributes(clientObject)
# Member Load No.
clientObject.no = no
# Load Case No.
clientObject.load_case = load_case_no
# Members No. (e.g. '5 6 7 12')
clientObject.member_sets = ConvertToDlString(member_sets)
# Member Load Type
clientObject.load_type = MemberSetLoadType.E_TYPE_MASS.name
# Member Load Distribution
clientObject.load_distribution= MemberSetLoadDistribution.LOAD_DISTRIBUTION_UNIFORM.name
# Individual Mass Components
if type(individual_mass_components) == bool:
pass
else:
raise Exception("WARNING: Type of individual mass components should be bool. Kindly check inputs correctness.")
clientObject.individual_mass_components = individual_mass_components
# Mass magnitude
if individual_mass_components == False:
clientObject.mass_global = mass_components[0]
else:
clientObject.mass_x = mass_components[0]
clientObject.mass_y = mass_components[1]
clientObject.mass_z = mass_components[2]
# Comment
clientObject.comment = comment
# Adding optional parameters via dictionary
for key in params:
clientObject[key] = params[key]
# Add Load Member Load to client model
clientModel.service.set_member_set_load(load_case_no, clientObject)
def Temperature(self,
no: int = 1,
load_case_no: int = 1,
member_sets: str = '1',
load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_UNIFORM,
load_direction = MemberSetLoadDirection.LOAD_DIRECTION_LOCAL_Z,
load_parameter = [],
load_over_total_length: bool= False,
comment: str = '',
params: dict = {}):
"""
Args:
no (int): Load Tag
load_case_no (int): Assigned Load Case
member_sets (str): Assigned Member Sets
load_distribution (enum): Load Distribution Enumeration
load_direction (enum): Load Direction Enumeration
load_parameter (list): Load Parameters
load_over_total_length (bool): Load Over Total Length Option
comment (str, optional): Comment
params (dict, optional): Parameters
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_UNIFORM:
load_parameter = [tt, tb]
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_TRAPEZIODAL:
for load_over_total_length: bool= False:
load_parameter = [tt1, tt2, tb1, tb2, distance_a_relative = False, distance_a_relative = False, a_distance, b_distance]
for load_over_total_length: bool= True:
load_parameter = [tt1, tt2, tb1, tb2]
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_TAPERED:
load_parameter = [tt1, tt2, tb1, tb2, distance_a_relative = False, distance_a_relative = False, a_distance, b_distance]
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_PARABOLIC:
load_parameter = [tb1, tb2, tb3, tt1, tt2, tt3]
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_VARYING:
load_parameter = [[distance, delta_distance, magnitude],...]
"""
# Client model | Member Load
clientObject = clientModel.factory.create('ns0:member_set_load')
# Clears object atributes | Sets all atributes to None
clearAtributes(clientObject)
# Member Load No.
clientObject.no = no
# Load Case No.
clientObject.load_case = load_case_no
# Members No. (e.g. '5 6 7 12')
clientObject.member_sets = ConvertToDlString(member_sets)
# Member Load Type
load_type = MemberSetLoadType.LOAD_TYPE_TEMPERATURE
clientObject.load_type = load_type.name
# Member Load Distribution
clientObject.load_distribution = load_distribution.name
# Member Load Direction
clientObject.load_direction = load_direction.name
#Load Magnitude
if load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_UNIFORM:
try:
len(load_parameter)==2
except:
raise Exception("WARNING: Load parameter array length should be 2 for LOAD_DISTRIBUTION_UNIFORM. Kindly check list inputs completeness and correctness.")
clientObject.magnitude_t_b = load_parameter[0]
clientObject.magnitude_t_t = load_parameter[1]
elif load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_TRAPEZOIDAL:
try:
len(load_parameter)==8
except:
raise Exception("WARNING: Load parameter array length should be 8 for LOAD_DISTRIBUTION_TRAPEZOIDAL. Kindly check list inputs completeness and correctness.")
clientObject.magnitude_t_b_1 = load_parameter[0]
clientObject.magnitude_t_b_2 = load_parameter[1]
clientObject.magnitude_t_t_1 = load_parameter[2]
clientObject.magnitude_t_t_2 = load_parameter[3]
if type(load_over_total_length) == bool:
pass
else:
raise Exception("WARNING: Type of load over total length should be bool. Kindly check inputs correctness.")
if load_over_total_length == False:
if load_parameter[4] == True:
clientObject.distance_a_is_defined_as_relative = True
clientObject.distance_a_relative = load_parameter[6]
else:
clientObject.distance_a_is_defined_as_relative = False
clientObject.distance_a_absolute = load_parameter[6]
if load_parameter[5] == True:
clientObject.distance_b_is_defined_as_relative = True
clientObject.distance_b_relative = load_parameter[7]
else:
clientObject.distance_b_is_defined_as_relative = False
clientObject.distance_b_absolute = load_parameter[7]
else:
clientObject.load_is_over_total_length = True
elif load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_TAPERED:
try:
len(load_parameter)==8
except:
raise Exception("WARNING: Load parameter array length should be 8 for LOAD_DISTRIBUTION_TAPERED. Kindly check list inputs completeness and correctness.")
clientObject.magnitude_t_b_1 = load_parameter[0]
clientObject.magnitude_t_b_2 = load_parameter[1]
clientObject.magnitude_t_t_1 = load_parameter[2]
clientObject.magnitude_t_t_2 = load_parameter[3]
if type(load_parameter[4]) == bool:
pass
else:
raise Exception("WARNING: Type of the fourth load parameter should be bool. Kindly check inputs correctness.")
if load_parameter[4] == True:
clientObject.distance_a_is_defined_as_relative = True
clientObject.distance_a_relative = load_parameter[6]
else:
clientObject.distance_a_is_defined_as_relative = False
clientObject.distance_a_absolute = load_parameter[6]
if type(load_parameter[5]) == bool:
pass
else:
raise Exception("WARNING: Type of the fifth load parameter should be bool. Kindly check inputs correctness.")
if load_parameter[5] == True:
clientObject.distance_b_is_defined_as_relative = True
clientObject.distance_b_relative = load_parameter[7]
else:
clientObject.distance_b_is_defined_as_relative = False
clientObject.distance_b_absolute = load_parameter[7]
elif load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_PARABOLIC:
try:
len(load_parameter)==6
except:
raise Exception("WARNING: Load parameter array length should be 6 for LOAD_DISTRIBUTION_PARABOLIC. Kindly check list inputs completeness and correctness.")
clientObject.magnitude_t_b_1 = load_parameter[0]
clientObject.magnitude_t_b_2 = load_parameter[1]
clientObject.magnitude_t_b_3 = load_parameter[2]
clientObject.magnitude_t_t_1 = load_parameter[3]
clientObject.magnitude_t_t_2 = load_parameter[4]
clientObject.magnitude_t_t_3 = load_parameter[5]
elif load_distribution.name == "LOAD_DISTRIBUTION_VARYING":
try:
len(load_parameter[0])==4
except:
print("WARNING: MemberSetLoad no: %x, load case: %x - Wrong data input." % (no, load_case_no))
clientObject.varying_load_parameters = clientModel.factory.create('ns0:member_set_load.varying_load_parameters')
for i in range(len(load_parameter)):
mlvlp = clientModel.factory.create('ns0:member_set_load_varying_load_parameters')
mlvlp.no = i+1
mlvlp.distance = load_parameter[i][0]
mlvlp.delta_distance = load_parameter[i][1]
mlvlp.magnitude = load_parameter[i][2]
mlvlp.note = None
mlvlp.magnitude_t_c = load_parameter[i][2]
mlvlp.magnitude_delta_t = load_parameter[i][3]
mlvlp.magnitude_t_t = load_parameter[i][2]
mlvlp.magnitude_t_b = load_parameter[i][3]
clientObject.varying_load_parameters.member_set_load_varying_load_parameters.append(mlvlp)
# Comment
clientObject.comment = comment
# Adding optional parameters via dictionary
for key in params:
clientObject[key] = params[key]
# Add Load Member Load to client model
clientModel.service.set_member_set_load(load_case_no, clientObject)
def TemperatureChange(self,
no: int = 1,
load_case_no: int = 1,
member_sets: str = '1',
load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_UNIFORM,
load_direction = MemberSetLoadDirection.LOAD_DIRECTION_LOCAL_Z,
load_parameter = [],
load_over_total_length: bool= False,
comment: str = '',
params: dict = {}):
"""
Args:
no (int): Load Tag
load_case_no (int): Assigned Load Case
member_sets (str): Assigned Member Sets
load_distribution (enum): Load Distribution Enumeration
load_direction (enum): Load Direction Enumeration
load_parameter (list): Load Parameters
load_over_total_length (bool): Load Over Total Length Option
comment (str, optional): Comment
params (dict, optional): Parameters
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_UNIFORM:
load_parameter = [tc, delta_t]
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_TRAPEZIODAL:
for load_over_total_length: bool= False:
load_parameter = [delta_t_1, delta_t_2, t_c_1, t_c_2, distance_a_relative = False, distance_a_relative = False, a_distance, b_distance]
for load_over_total_length: bool= True:
load_parameter = [delta_t_1, delta_t_2, t_c_1, t_c_2]
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_TAPERED:
load_parameter = [delta_t_1, delta_t_2, t_c_1, t_c_2, distance_a_relative = False, distance_a_relative = False, a_distance, b_distance]
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_PARABOLIC:
load_parameter = [delta_t_1, delta_t_2, delta_t_3, t_c_1, t_c_2, t_c_3]
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_VARYING:
load_parameter = [[distance, delta_distance, magnitude],...]
"""
# Client model | Member Load
clientObject = clientModel.factory.create('ns0:member_set_load')
# Clears object atributes | Sets all atributes to None
clearAtributes(clientObject)
# Member Load No.
clientObject.no = no
# Load Case No.
clientObject.load_case = load_case_no
# Members No. (e.g. '5 6 7 12')
clientObject.member_sets = ConvertToDlString(member_sets)
# Member Load Type
load_type = MemberSetLoadType.LOAD_TYPE_TEMPERATURE_CHANGE
clientObject.load_type = load_type.name
# Member Load Distribution
clientObject.load_distribution = load_distribution.name
# Member Load Direction
clientObject.load_direction = load_direction.name
#Load Magnitude
if load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_UNIFORM:
try:
len(load_parameter)==2
except:
raise Exception("WARNING: Load parameter array length should be 2 for LOAD_DISTRIBUTION_UNIFORM. Kindly check list inputs completeness and correctness.")
clientObject.magnitude_delta_t = load_parameter[0]
clientObject.magnitude_t_c = load_parameter[1]
elif load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_TRAPEZOIDAL:
try:
len(load_parameter)==8
except:
raise Exception("WARNING: Load parameter array length should be 8 for LOAD_DISTRIBUTION_TRAPEZOIDAL. Kindly check list inputs completeness and correctness.")
clientObject.magnitude_delta_t_1 = load_parameter[0]
clientObject.magnitude_delta_t_2 = load_parameter[1]
clientObject.magnitude_t_c_1 = load_parameter[2]
clientObject.magnitude_t_c_2 = load_parameter[3]
if type(load_over_total_length) == bool:
pass
else:
raise Exception("WARNING: Type of the load over total length should be bool. Kindly check inputs correctness.")
if load_over_total_length == False:
if load_parameter[4] == True:
clientObject.distance_a_is_defined_as_relative = True
clientObject.distance_a_relative = load_parameter[6]
else:
clientObject.distance_a_is_defined_as_relative = False
clientObject.distance_a_absolute = load_parameter[6]
if load_parameter[5] == True:
clientObject.distance_b_is_defined_as_relative = True
clientObject.distance_b_relative = load_parameter[7]
else:
clientObject.distance_b_is_defined_as_relative = False
clientObject.distance_b_absolute = load_parameter[7]
else:
clientObject.load_is_over
|
0084b5137df5a7e6e6f04e0ca2ae84d6185cadfb
|
Python
|
<|begin_of_text|>from abc import abstractmethod
from.base import OperatorConverter
class ATenPackSequenceSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::_pack_sequence(Tensor output, Tensor batch_sizes, Tensor? sorted_indices, Tensor? unsorted_indices) -> (Tensor, Tensor, Tensor?, Tensor?)'''
pass
class ATenAsTensorSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::as_tensor(Tensor(a) data, *, int? dtype=None, Device? device=None) -> (Tensor(a|b))'''
pass
class ATenUpsampleSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::__upsample(Tensor input, int? size=None, int? scale_factor=None, str mode="nearest", bool? align_corners=None) -> (Tensor)
aten::__upsample.size_list(Tensor input, int[]? size=None, int? scale_factor=None, str mode="nearest", bool? align_corners=None) -> (Tensor)'''
pass
class ATenHspmmSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::hspmm(Tensor mat1, Tensor mat2) -> (Tensor)'''
pass
class ATenValuesSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::values(Tensor(a) self) -> (Tensor(a))
aten::_values(Tensor(a) self) -> (Tensor(a))'''
pass
class ATenIndicesSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::indices(Tensor(a) self) -> (Tensor(a))
aten::_indices(Tensor(a) self) -> (Tensor(a))'''
pass
class ATenNativeNormSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::native_norm(Tensor self, Scalar p=2) -> (Tensor)
aten::native_norm.ScalarOpt_dim_dtype(Tensor self, Scalar? p, int[1] dim, bool keepdim, int? dtype) -> (Tensor)'''
pass
class ATenQuantizedMaxPool1dSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::quantized_max_pool1d(Tensor self, int[1] kernel_size, int[1] stride=[], int[1] padding=[0], int[1] dilation=[1], bool ceil_mode=False) -> (Tensor)'''
pass
class ATenToDenseSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::to_dense(Tensor self, int? dtype=None) -> (Tensor)'''
pass
class ATenFlattenDenseTensorsSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::flatten_dense_tensors(Tensor[] tensors) -> (Tensor)'''
pass
class ATenLinalgMatrixRankSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::linalg_matrix_rank(Tensor self, float? tol=None, bool hermitian=False) -> (Tensor)
aten::linalg_matrix_rank.tol_tensor(Tensor input, Tensor tol, bool hermitian=False) -> (Tensor)'''
pass
class ATenLinalgTensorinvSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::linalg_tensorinv(Tensor self, int ind=2) -> (Tensor)'''
pass
class ATenLinalgPinvSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::linalg_pinv(Tensor self, float rcond=1.0000000000000001e-15, bool hermitian=False) -> (Tensor)
aten::linalg_pinv.rcond_tensor(Tensor self, Tensor rcond, bool hermitian=False) -> (Tensor)'''
pass
class ATenLinalgCondSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::linalg_cond(Tensor self, Scalar? p=None) -> (Tensor)
aten::linalg_cond.p_str(Tensor self, str p) -> (Tensor)'''
pass
class ATenLinalgSvdvalsSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::linalg_svdvals(Tensor input) -> (Tensor)'''
pass
class ATenLinalgSvdSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::linalg_svd.U(Tensor self, bool full_matrices=True, *, Tensor(a!) U, Tensor(b!) S, Tensor(c!) Vh) -> (Tensor(a!) U, Tensor(b!) S, Tensor(c!) Vh)
aten::linalg_svd(Tensor self, bool full_matrices=True) -> (Tensor U, Tensor S, Tensor Vh)'''
pass
class ATenInnerSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::inner(Tensor self, Tensor other) -> (Tensor)'''
pass
class ATenLinalgInvSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::linalg_inv(Tensor self) -> (Tensor)'''
pass
class ATenLinalgEigvalshSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::linalg_eigvalsh(Tensor self, str UPLO="L") -> (Tensor)'''
pass
class ATenLinalgEigvalsSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::linalg_eigvals(Tensor self) -> (Tensor)'''
pass
class ATenLinalgCholeskySchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::linalg_cholesky(Tensor self) -> (Tensor)'''
pass
class ATenFftIfftshiftSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::fft_ifftshift(Tensor self, int[1]? dim=None) -> (Tensor)'''
pass
class ATenFftFftshiftSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::fft_fftshift(Tensor self, int[1]? dim=None) -> (Tensor)'''
pass
class ATenFftIrfftnSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::fft_irfftn(Tensor self, int[1]? s=None, int[1]? dim=None, str? norm=None) -> (Tensor)'''
pass
class ATenFftRfftnSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::fft_rfftn(Tensor self, int[1]? s=None, int[1]? dim=None, str? norm=None) -> (Tensor)'''
pass
class ATenFftIrfft2Schema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::fft_irfft2(Tensor self, int[1]? s=None, int[1] dim=[-2, -1], str? norm=None) -> (Tensor)'''
pass
class ATenFftRfft2Schema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::fft_rfft2(Tensor self, int[1]? s=None, int[1] dim=[-2, -1], str? norm=None) -> (Tensor)'''
pass
class ATenFftFft2Schema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::fft_fft2(Tensor self, int[1]? s=None, int[1] dim=[-2, -1], str? norm=None) -> (Tensor)'''
pass
class ATenFftIhfftSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::fft_ihfft(Tensor self, int? n=None, int dim=-1, str? norm=None) -> (Tensor)'''
pass
class ATenFftHfftSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::fft_hfft(Tensor self, int? n=None, int dim=-1, str? norm=None) -> (Tensor)'''
pass
class ATenFftIrfftSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::fft_irfft(Tensor self, int? n=None, int dim=-1, str? norm=None) -> (Tensor)'''
pass
class ATenFftRfftSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::fft_rfft(Tensor self, int? n=None, int dim=-1, str? norm=None) -> (Tensor)'''
pass
class ATenFftIfftSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::fft_ifft(Tensor self, int? n=None, int dim=-1, str? norm=None) -> (Tensor)'''
pass
class ATenSlowConv3dSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::slow_conv3d(Tensor self, Tensor weight, int[3] kernel_size, Tensor? bias=None, int[3] stride=[1, 1, 1], int[3] padding=[0, 0, 0]) -> (Tensor)'''
pass
class ATenThnnConvDepthwise2dSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::thnn_conv_depthwise2d(Tensor self, Tensor weight, int[2] kernel_size, Tensor? bias=None, int[2] stride=[1, 1], int[2] padding=[0, 0], int[2] dilation=[1, 1]) -> (Tensor)'''
pass
class ATenThnnConv2dSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::thnn_conv2d(Tensor self, Tensor weight, int[2] kernel_size, Tensor? bias=None, int[2] stride=[1, 1], int[2] padding=[0, 0]) -> (Tensor)'''
pass
class ATenLogSigmoidSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::log_sigmoid(Tensor self) -> (Tensor)'''
pass
class ATenFloatPowerSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::float_power.Tensor_Tensor(Tensor self, Tensor exponent) -> (Tensor)
aten::float_power.Scalar(Scalar self, Tensor exponent) -> (Tensor)
aten::float_power.Tensor_Scalar(Tensor self, Scalar exponent) -> (Tensor)'''
pass
class ATenArgsortSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::argsort(Tensor self, int dim=-1, bool descending=False) -> (Tensor)
aten::argsort.dimname(Tensor self, str dim, bool descending=False) -> (Tensor)'''
pass
class ATenMsortSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::msort(Tensor self) -> (Tensor)'''
pass
class ATenNanquantileSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::nanquantile.scalar(Tensor self, float q, int? dim=None, bool keepdim=False) -> (Tensor)
aten::nanquantile(Tensor self, Tensor q, int? dim=None, bool keepdim=False) -> (Tensor)
aten::nanquantile.new_scalar(Tensor self, float q, int? dim, bool keepdim, *, str interpolation) -> (Tensor)
aten::nanquantile.new(Tensor self, Tensor q, int? dim, bool keepdim, *, str interpolation) -> (Tensor)'''
pass
class ATenQuantileSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::quantile.scalar(Tensor self, float q, int? dim=None, bool keepdim=False) -> (Tensor)
aten::quantile(Tensor self, Tensor q, int? dim=None, bool keepdim=False) -> (Tensor)
aten::quantile.new_scalar(Tensor self, float q, int? dim, bool keepdim, *, str interpolation) -> (Tensor)
aten::quantile.new(Tensor self, Tensor q, int? dim, bool keepdim, *, str interpolation) -> (Tensor)'''
pass
class ATenQrSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::qr.Q(Tensor self, bool some=True, *, Tensor(a!) Q, Tensor(b!) R) -> (Tensor(a!) Q, Tensor(b!) R)
aten::qr(Tensor self, bool some=True) -> (Tensor Q, Tensor R)'''
pass
class ATenSvdSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::svd.U(Tensor self, bool some=True, bool compute_uv=True, *, Tensor(a!) U, Tensor(b!) S, Tensor(c!) V) -> (Tensor(a!) U, Tensor(b!) S, Tensor(c!) V)
aten::svd(Tensor self, bool some=True, bool compute_uv=True) -> (Tensor U, Tensor S, Tensor V)'''
pass
class ATenCrossEntropyLossSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::cross_entropy_loss(Tensor self, Tensor target, Tensor? weight=None, int reduction=1, int ignore_index=-100) -> (Tensor)'''
pass
class ATenNonzeroNumpySchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::nonzero_numpy(Tensor self) -> (Tensor[])'''
pass
class ATenTakeAlongDimSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::take_along_dim(Tensor self, Tensor indices, int? dim=None) -> (Tensor)'''
pass
class ATenScatterSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::scatter.src(Tensor self, int dim, Tensor index, Tensor src) -> (Tensor)
aten::scatter.value(Tensor self, int dim, Tensor index, Scalar value) -> (Tensor)
aten::scatter.dimname_src(Tensor self, str dim, Tensor index, Tensor src) -> (Tensor)
aten::scatter.dimname_value(Tensor self, str dim, Tensor index, Scalar value) -> (Tensor)'''
pass
class ATenIndexAddSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::index_add(Tensor self, int dim, Tensor index, Tensor source) -> (Tensor)
aten::index_add.alpha(Tensor self, int dim, Tensor index, Tensor source, *, Scalar alpha) -> (Tensor)
aten::index_add.dimname(Tensor self, str dim, Tensor index, Tensor source, *, Scalar alpha=1) -> (Tensor)'''
pass
class ATenPutSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::put(Tensor self, Tensor index, Tensor source, bool accumulate=False) -> (Tensor)'''
pass
class ATenMaskedScatterSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::masked_scatter(Tensor self, Tensor mask, Tensor source) -> (Tensor)'''
pass
class ATenQuantizedRnnReluCellSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::quantized_rnn_relu_cell(Tensor input, Tensor hx, Tensor w_ih, Tensor w_hh, Tensor b_ih, Tensor b_hh, Tensor packed_ih, Tensor packed_hh, Tensor col_offsets_ih, Tensor col_offsets_hh, Scalar scale_ih, Scalar scale_hh, Scalar zero_point_ih, Scalar zero_point_hh) -> (Tensor)'''
pass
class ATenQuantizedGruCellSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::quantized_gru_cell(Tensor input, Tensor hx, Tensor w_ih, Tensor w_hh, Tensor b_ih, Tensor b_hh, Tensor packed_ih, Tensor packed_hh, Tensor col_offsets_ih, Tensor col_offsets_hh, Scalar scale_ih, Scalar scale_hh, Scalar zero_point_ih, Scalar zero_point_hh) -> (Tensor)'''
pass
class ATenQuantizedLstmCellSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::quantized_lstm_cell(Tensor input, Tensor[] hx, Tensor w_ih, Tensor w_hh, Tensor b_ih, Tensor b_hh, Tensor packed_ih, Tensor packed_hh, Tensor col_offsets_ih, Tensor col_offsets_hh, Scalar scale_ih, Scalar scale_hh, Scalar zero_point_ih, Scalar zero_point_hh) -> (Tensor, Tensor)'''
pass
class ATenRnnReluSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::rnn_relu.input(Tensor input, Tensor hx, Tensor[] params, bool has_biases, int num_layers, float dropout, bool train, bool bidirectional, bool batch_first) -> (Tensor, Tensor)
aten::rnn_relu.data(Tensor data, Tensor batch_sizes, Tensor hx, Tensor[] params, bool has_biases, int num_layers, float dropout, bool train, bool bidirectional) -> (Tensor, Tensor)'''
pass
class ATenRnnTanhSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::rnn_tanh.input(Tensor input, Tensor hx, Tensor[] params, bool has_biases, int num_layers, float dropout, bool train, bool bidirectional, bool batch_first) -> (Tensor, Tensor)
aten::rnn_tanh.data(Tensor data, Tensor batch_sizes, Tensor hx, Tensor[] params, bool has_biases, int num_layers, float dropout, bool train, bool bidirectional) -> (Tensor, Tensor)'''
pass
class ATenGruSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::gru.input(Tensor input, Tensor hx, Tensor[] params, bool has_biases, int num_layers, float dropout, bool train, bool bidirectional, bool batch_first) -> (Tensor, Tensor)
aten::gru.data(Tensor data, Tensor batch_sizes, Tensor hx, Tensor[] params, bool has_biases, int num_layers, float dropout, bool train, bool bidirectional) -> (Tensor, Tensor)'''
pass
class ATenLstmSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::lstm.input(Tensor input, Tensor[] hx, Tensor[] params, bool has_biases, int num_layers, float dropout, bool train, bool bidirectional, bool batch_first) -> (Tensor, Tensor, Tensor)
aten::lstm.data(Tensor data, Tensor batch_sizes, Tensor[] hx, Tensor[] params, bool has_biases, int num_layers, float dropout, bool train, bool bidirectional) -> (Tensor, Tensor, Tensor)'''
pass
class ATenPadPackedSequenceSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::_pad_packed_sequence(Tensor data, Tensor batch_sizes, bool batch_first, Scalar padding_value, int total_length) -> (Tensor, Tensor)'''
pass
class ATenCombinationsSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::combinations(Tensor self, int r=2, bool with_replacement=False) -> (Tensor)'''
pass
class ATenCartesianProdSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::cartesian_prod(Tensor[] tensors) -> (Tensor)'''
pass
class ATenMeshgridSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::meshgrid(Tensor[] tensors) -> (Tensor[])'''
pass
class ATenMaskedScaleSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::_masked_scale(Tensor self, Tensor mask, float scale) -> (Tensor)'''
pass
class ATenFakeQuantizePerChannelAffineSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::fake_quantize_per_channel_affine(Tensor self, Tensor scale, Tensor zero_point, int axis, int quant_min, int quant_max) -> (Tensor)'''
pass
class ATenFakeQuantizePerTensorAffineSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::fake_quantize_per_tensor_affine(Tensor self, float scale, int zero_point, int quant_min, int quant_max) -> (Tensor)'''
pass
class ATenCoalesceSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::coalesce(Tensor(a) self) -> (Tensor(a))
aten::_coalesce(Tensor self) -> (Tensor)'''
pass
class ATenWeightNormSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::_weight_norm(Tensor v, Tensor g, int dim=0) -> (Tensor)'''
pass
class ATenNormExceptDimSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::norm_except_dim(Tensor v, int pow=2, int dim=0) -> (Tensor)'''
pass
class ATenWhereSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::where.self(Tensor condition, Tensor self, Tensor other) -> (Tensor)
aten::where.ScalarSelf(Tensor condition, Scalar self, Tensor other) -> (Tensor)
aten::where.ScalarOther(Tensor condition, Tensor self, Scalar other) -> (Tensor)
aten::where.Scalar(Tensor condition, Scalar self, Scalar other) -> (Tensor)
aten::where(Tensor condition) -> (Tensor[])'''
pass
class ATenTypeAsSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::type_as(Tensor self, Tensor other) -> (Tensor)'''
pass
class ATenFlipudSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::flipud(Tensor self) -> (Tensor)'''
pass
class ATenFliplrSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::fliplr(Tensor self) -> (Tensor)'''
pass
class ATenOneHotSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::one_hot(Tensor self, int num_classes=-1) -> (Tensor)'''
pass
class ATenTileSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::tile(Tensor self, int[] dims) -> (Tensor)'''
pass
class ATenSumToSizeSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::sum_to_size(Tensor self, int[] size) -> (Tensor)'''
pass
class ATenIstftSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::istft(Tensor self, int n_fft, int? hop_length=None, int? win_length=None, Tensor? window=None, bool center=True, bool normalized=False, bool? onesided=None, int? length=None, bool return_complex=False) -> (Tensor)'''
pass
class ATenStftSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::stft(Tensor self, int n_fft, int? hop_length=None, int? win_length=None, Tensor? window=None, bool normalized=False, bool? onesided=None, bool? return_complex=None) -> (Tensor)'''
pass
class ATenDstackSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::dstack(Tensor[] tensors) -> (Tensor)'''
pass
class ATenHstackSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::hstack(Tensor[] tensors) -> (Tensor)'''
pass
class ATenDsplitSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::dsplit.int(Tensor(a) self, int sections) -> (Tensor[])
aten::dsplit.array(Tensor(a) self, int[] indices) -> (Tensor[])'''
pass
class ATenVsplitSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::vsplit.int(Tensor(a) self, int sections) -> (Tensor[])
aten::vsplit.array(Tensor(a) self, int[] indices) -> (Tensor[])'''
pass
class ATenHsplitSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::hsplit.int(Tensor(a) self, int sections) -> (Tensor[])
aten::hsplit.array(Tensor(a) self, int[] indices) -> (Tensor[])'''
pass
class ATenSmmSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::smm(Tensor self, Tensor mat2) -> (Tensor)'''
pass
class ATenSeluSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::selu(Tensor self) -> (Tensor)'''
pass
class ATenRreluSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::rrelu(Tensor self, Scalar lower=0.125, Scalar upper=0.33333333333333331, bool training=False, Generator? generator=None) -> (Tensor)'''
pass
class ATenRavelSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::ravel(Tensor(a) self) -> (Tensor(a))'''
pass
class ATenPinverseSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::pinverse(Tensor self, float rcond=1.0000000000000001e-15) -> (Tensor)'''
pass
class ATenPinMemorySchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::pin_memory(Tensor(a) self) -> (Tensor(a))'''
pass
class ATenPixelUnshuffleSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::pixel_unshuffle(Tensor self, int downscale_factor) -> (Tensor)'''
pass
class ATenPixelShuffleSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::pixel_shuffle(Tensor self, int upscale_factor) -> (Tensor)'''
pass
class ATenPairwiseDistanceSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::pairwise_distance(Tensor x1, Tensor x2, float p=2., float eps=9.9999999999999995e-07, bool keepdim=False) -> (Tensor)'''
pass
class ATenMatrixRankSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::matrix_rank.tol(Tensor self, float tol, bool symmetric=False) -> (Tensor)
aten::matrix_rank(Tensor self, bool symmetric=False) -> (Tensor)'''
pass
class ATenKronSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::kron(Tensor self, Tensor other) -> (Tensor)'''
pass
class ATenInstanceNormSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::instance_norm(Tensor input, Tensor? weight, Tensor? bias, Tensor? running_mean, Tensor? running_var, bool use_input_stats, float momentum, float eps, bool cudnn_enabled) -> (Tensor)'''
pass
class ATenIndexCopySchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::index_copy(Tensor self, int dim, Tensor index, Tensor source) -> (Tensor)
aten::index_copy.dimname(Tensor self, str dim, Tensor index, Tensor source) -> (Tensor)'''
pass
class ATenLdexpSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::ldexp.Tensor(Tensor self, Tensor other) -> (Tensor)'''
pass
class ATenEmbeddingBagSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::embedding_bag(Tensor weight, Tensor indices, Tensor offsets, bool scale_grad_by_freq=False, int mode=0, bool sparse=False, Tensor? per_sample_weights=None, bool include_last_offset=False) -> (Tensor, Tensor, Tensor, Tensor)
aten::embedding_bag.padding_idx(Tensor weight, Tensor indices, Tensor offsets, bool scale_grad_by_freq, int mode, bool sparse, Tensor? per_sample_weights, bool include_last_offset, int? padding_idx) -> (Tensor, Tensor, Tensor, Tensor)
aten::_embedding_bag(Tensor weight, Tensor indices, Tensor offsets, bool scale_grad_by_freq=False, int mode=0, bool sparse=False, Tensor? per_sample_weights=None, bool include_last_offset=False, int padding_idx=-1) -> (Tensor, Tensor, Tensor, Tensor)'''
pass
class ATenEinsumSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::einsum(str equation, Tensor[] tensors) -> (Tensor)'''
pass
class ATenDiffSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::diff(Tensor self, int n=1, int dim=-1, Tensor? prepend=None, Tensor? append=None) -> (Tensor)'''
pass
class ATenDiagflatSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::diagflat(Tensor self, int offset=0) -> (Tensor)'''
pass
class ATenDiagEmbedSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::diag_embed(Tensor self, int offset=0, int dim1=-2, int dim2=-1) -> (Tensor)'''
pass
class ATenCtcLossSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::ctc_loss.IntList(Tensor log_probs, Tensor targets, int[] input_lengths, int[] target_lengths, int blank=0, int reduction=1, bool zero_infinity=False) -> (Tensor)
aten::ctc_loss.Tensor(Tensor log_probs, Tensor targets, Tensor input_lengths, Tensor target_lengths, int blank=0, int reduction=1, bool zero_infinity=False) -> (Tensor)
aten::_ctc_loss(Tensor log_probs, Tensor targets, int[] input_lengths, int[] target_lengths, int blank=0, bool zero_infinity=False) -> (Tensor, Tensor)'''
pass
class ATenConvolutionModeSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::_convolution_mode(Tensor input, Tensor weight, Tensor? bias, int[] stride, str padding, int[] dilation, int groups) -> (Tensor)'''
pass
class ATenCpuSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::cpu(Tensor(a) self) -> (Tensor(a|b))'''
pass
class ATenBlockDiagSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::block_diag(Tensor[] tensors) -> (Tensor)'''
pass
class ATenBroadcastToSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::broadcast_to(Tensor(a) self, int[] size) -> (Tensor(a))'''
pass
class ATenBroadcastTensorsSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::broadcast_tensors(Tensor[] tensors) -> (Tensor[])'''
pass
class ATenBatchNormImplIndexSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::_batch_norm_impl_index(Tensor input, Tensor? weight, Tensor? bias, Tensor? running_mean, Tensor? running_var, bool training, float momentum, float eps, bool cudnn_enabled) -> (Tensor, Tensor, Tensor, Tensor, int)'''
pass
class ATenBatchNormSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::batch_norm(Tensor input, Tensor? weight, Tensor? bias, Tensor? running_mean, Tensor? running_var, bool training, float momentum, float eps, bool cudnn_enabled) -> (Tensor)'''
pass
class ATenAtleast3dSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::atleast_3d(Tensor self) -> (Tensor)
aten::atleast_3d.Sequence(Tensor[] tensors) -> (Tensor[])'''
pass
class ATenAtleast2dSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::atleast_2d(Tensor self) -> (Tensor)
aten::atleast_2d.Sequence(Tensor[] tensors) -> (Tensor[])'''
pass
class ATenAtleast1dSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::atleast_1d(Tensor self) -> (Tensor)
aten::atleast_1d.Sequence(Tensor[] tensors) -> (Tensor[])'''
pass
class ATenDimArangeSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::_dim_arange(Tensor like, int dim) -> (Tensor)'''
pass
class ATenBatchNormStatsSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::batch_norm_stats(Tensor input, float eps) -> (Tensor, Tensor)'''
pass
class ATenCopyFromSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::_copy_from(Tensor self, Tensor dst, bool non_blocking=False) -> (Tensor)'''
pass
class ATenAdaptiveMaxPool1dSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::adaptive_max_pool1d(Tensor self, int[1] output_size) -> (Tensor, Tensor)'''
pass
class ATenAdaptiveAvgPool1dSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::adaptive_avg_pool1d(Tensor self, int[1] output_size) -> (Tensor)'''
pass
class ATenCrowIndicesSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::crow_indices(Tensor(a) self) -> (Tensor(a))'''
pass
class ATenAvgPool1dSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::avg_pool1d(Tensor self, int[1] kernel_size, int[1] stride=[], int[1] padding=[0], bool ceil_mode=False, bool count_include_pad=True) -> (Tensor)'''
pass
class ATenFeatureAlphaDropoutSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::feature_alpha_dropout(Tensor input, float p, bool train) -> (Tensor)'''
pass
class ATenBatchNormElemtSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::batch_norm_elemt(Tensor input, Tensor? weight, Tensor? bias, Tensor mean, Tensor invstd, float eps) -> (Tensor)'''
pass
class ATenAlphaDropoutSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::alpha_dropout(Tensor input, float p, bool train) -> (Tensor)'''
pass
class ATenFeatureDropoutSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::feature_dropout(Tensor input, float p, bool train) -> (Tensor)'''
pass
class ATenShapeAsTensorSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::_shape_as_tensor(Tensor self) -> (Tensor)'''
pass
class ATenQuantizedRnnTanhCellSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::quantized_rnn_tanh_cell(Tensor input, Tensor hx, Tensor w_ih, Tensor w_hh, Tensor b_ih, Tensor b_hh, Tensor packed_ih, Tensor packed_hh, Tensor col_offsets_ih, Tensor col_offsets_hh, Scalar scale_ih, Scalar scale_hh, Scalar zero_point_ih, Scalar zero_point_hh) -> (Tensor)'''
pass
class ATenReshapeFromTensorSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::_reshape_from_tensor(Tensor self, Tensor shape) -> (Tensor)'''
pass
class ATenSobolEngineDrawSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::_sobol_engine_draw(Tensor quasi, int n, Tensor sobolstate, int dimension, int num_generated, int? dtype) -> (Tensor, Tensor)'''
pass
class ATenLinalgQrSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::linalg_qr.out(Tensor self, str mode="reduced", *, Tensor(a!) Q, Tensor(b!) R) -> (Tensor(a!) Q, Tensor(b!) R)
aten::linalg_qr(Tensor self, str mode="reduced") -> (Tensor Q, Tensor R)'''
pass
class ATenLinalgInvExSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::linalg_inv_ex.inverse(Tensor self, *, bool check_errors=False, Tensor(a!) inverse, Tensor(b!) info) -> (Tensor(a!) inverse, Tensor(b!) info)
aten::linalg_inv_ex(Tensor self, *, bool check_errors=False) -> (Tensor inverse, Tensor info)'''
pass
class ATenLinalgEighSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::linalg_eigh.eigvals(Tensor self, str UPLO="L", *, Tensor(a!) eigvals, Tensor(b!) eigvecs) -> (Tensor(a!) eigenvalues, Tensor(b!) eigenvectors)
aten::linalg_eigh(Tensor self, str UPLO="L") -> (Tensor eigenvalues, Tensor eigenvectors)'''
pass
class ATenLuSolveSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::lu_solve(Tensor self, Tensor LU_data, Tensor LU_pivots) -> (Tensor)'''
pass
class ATenSolveSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::solve.solution(Tensor self, Tensor A, *, Tensor(a!) solution, Tensor(b!) lu) -> (Tensor(a!) solution, Tensor(b!) LU)
aten::solve(Tensor self, Tensor A) -> (Tensor solution, Tensor LU)'''
pass
class ATenCholeskySolveSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::cholesky_solve(Tensor self, Tensor input2, bool upper=False) -> (Tensor)'''
pass
class ATenEigSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::eig.e(Tensor self, bool eigenvectors=False, *, Tensor(a!) e, Tensor(b!) v) -> (Tensor(a!) eigenvalues, Tensor(b!) eigenvectors)
aten::eig(Tensor self, bool eigenvectors=False) -> (Tensor eigenvalues, Tensor eigenvectors)'''
pass
class ATenSymeigSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::symeig.e(Tensor self, bool eigenvectors=False, bool upper=True, *, Tensor(a!) e, Tensor(b!) V) -> (Tensor(a!) eigenvalues, Tensor(b!) eigenvectors)
aten::symeig(Tensor self, bool eigenvectors=False, bool upper=True) -> (Tensor eigenvalues, Tensor eigenvectors)'''
pass
class ATenChooseQparamsOptimizedSchema(OperatorConverter):
@abstractmethod
def
|
1c9c1a6f0b22d6fef4587a04d7eea1c516dc439b
|
Python
|
<|begin_of_text|>from __future__ import division
import sys
import numpy as np
import matplotlib.pyplot as plt
from sklearn import preprocessing
from sklearn import svm
import re
dataVectors = []
#the file train.csv is expected
file = open('train.csv','r')
for line in file:
dataVectors.append(line.strip().split(','))
file.close
#find the attribute names
attributes = dataVectors[0]
dataVectors = dataVectors[1:]
data=np.array(np.genfromtxt('train.csv',dtype=('S32','S32','S32','S32','S32','S32','S32',int,'S32','S32'),delimiter=',',names=True))
data.shape
#lets first convert all ages into days
#this code was meant to convert all data into days, we found out that was not going to work
#dateByDaysVec = []
#for i in range(len(dataVectors)):
# if "year" in dataVectors[i][7]:
# num = [int(s) for s in dataVectors[i][7].split() if s.isdigit()]
# dateByDaysVec.append(365*num[0])
# elif "month" in dataVectors[i][7]:
# num = [int(s) for s in dataVectors[i][7].split() if s.isdigit()]
# dateByDaysVec.append(30*num[0])
# elif "week" in dataVectors[i][7]:
# num = [int(s) for s in dataVectors[i][7].split() if s.isdigit()]
# dateByDaysVec.append(7*num[0])
# elif "day" in dataVectors[i][7]:
# num = [int(s) for s in dataVectors[i][7].split() if s.isdigit()]
# dateByDaysVec.append(num[0])
# else:
# dateByDaysVec.append(0)
yearsAlive = []
#assign number based on year
#less than a year 0
#every year after is another int
#convert all age data into yearly ints
for i in range(len(dataVectors)):
if "year" in dataVectors[i][7]:
num = [int(s) for s in dataVectors[i][7].split() if s.isdigit()]
yearsAlive.append(num[0])
data['AgeuponOutcome'][i] = num[0]
else:
yearsAlive.append(0)
data['AgeuponOutcome'][i] = 0
#used to show the age dataskew uncomment to see
#plt.hist(data['AgeuponOutcome'],4)
#plt.show()
#seperate age data into 3 distinct categories
idx_age_0=data['AgeuponOutcome']<5
idx_age_1=(data['AgeuponOutcome']>=5) & (data['AgeuponOutcome']<10)
idx_age_2=data['AgeuponOutcome']>=10
#save new data and reopen data with years now as strings instead of ints
np.savetxt('filterPass1.txt',data,fmt="%s",delimiter=',')
data=np.array(np.genfromtxt('filterPass1.txt',dtype=('S32','S32','S32','S32','S32','S32','S32','S32','S32','S32'),delimiter=',',names=attributes))
dataLen = len(dataVectors)
dataVectors = []
file = open('filterPass1.txt','r')
for line in file:
dataVectors.append(line.strip().split(','))
file.close()
dataLen2 = len(dataVectors)
#save new year data as easy to read strings
data['AgeuponOutcome'][idx_age_0]='<5years'
data['AgeuponOutcome'][idx_age_1]='>=5and<10years'
data['AgeuponOutcome'][idx_age_2]='>=10years'
#so seperating the animals by pairs of 5 years could work
#now we have defined two different ways to look at the amount of time the pets have been alive
#decide later what is more appropriate
#next step is to take the animals with no names and assign them "NoName"
#I will also keep track of unnamed pets vs named
listOfAnimalNames = []
unnamedVsNamed = []
for i in range(len(dataVectors)):
if dataVectors[i][1]!= '':
listOfAnimalNames.append(dataVectors[i][1])
unnamedVsNamed.append('Named')
else:
listOfAnimalNames.append('NoName')
unnamedVsNamed.append('NoName')
idx_name_0 = data['Name']!= ''
idx_name_1 = data['Name'] == ''
data['Name'][idx_name_0] = "Named"
data['Name'][idx_name_1] = "NoName"
#now that names are taken care of we need to handle the DateTime data
listOfSeasons = []
listOfTimeOfDays = []
#use a simple regular expression to grab distinct parts of th date data
for i in range(len(dataVectors)):
getMonthAndTime = re.findall('\d+-(\d+)-\d+ (\d+):\d+:\d+',dataVectors[i][2])
month = int(getMonthAndTime[0][0])
time = int(getMonthAndTime[0][1])
season = ''
timeOfDay = ''
if month >= 3 and month <= 5:
season = 'Spring'
if month >= 6 and month <= 8:
season = 'Summer'
if month >= 9 and month <= 11:
season = 'Fall'
if month == 12:
season = 'Winter'
if month >= 1 and month <= 2:
season = 'Winter'
if time >= 1 and time <= 6:
timeOfDay = 'Morning'
if time >= 7 and time <= 12:
timeOfDay = 'Morning'
if time >= 13 and time <= 18:
timeOfDay = 'Afternoon'
if time >= 19 and time <= 23:
timeOfDay = 'Night'
if time == 0:
timeOfDay = 'Night'
listOfSeasons.append(season)
listOfTimeOfDays.append(timeOfDay)
#save new data with name modified
np.savetxt('filterPass2.txt',data,fmt="%s",delimiter=',')
dataVectors = []
file = open('filterPass2.txt','r')
for line in file:
dataVectors.append(line.strip().split(','))
file.close()
dataLen3 = len(dataVectors)
#get rid of animalID and datetime and add timeOfDay and Seasons
for i in range(dataLen3):
dataVectors[i].pop(2)
dataVectors[i].pop(0)
dataVectors[i].insert(1, listOfSeasons[i])
dataVectors[i].insert(2, listOfTimeOfDays[i])
#save data with new timeOfDay and Seasons attributes
data2 = np.array(dataVectors)
np.savetxt('filterPass3.txt',data2,fmt="%s",delimiter=',')
#generate new data array
data=np.array(np.genfromtxt('filterPass3.txt',dtype=('S32','S32','S32','S32','S32','S32','S32','S32','S32','S32'),delimiter=',',names=attributes))
dataVectors = []
file = open('filterPass3.txt','r')
for line in file:
dataVectors.append(line.strip().split(','))
file.close
isMixOrNot = []
#determine if an animal is a mix or not
for i in range(len(dataVectors)):
if 'Mix' in data[i][8]:
isMixOrNot.append('Mix')
else:
isMixOrNot.append('Purebred')
for i in range(len(dataVectors)):
data[i][8] = isMixOrNot[i]
#np.savetxt('filterPass4.txt',data,fmt="%s",delimiter=',')
#data=np.array(np.genfromtxt('filterPass4.txt',dtype=('S32','S32','S32','S32','S32','S32','S32','S32','S32','S32'),delimiter=',',names=attributes))
#dataVectors = []
#file = open('filterPass4.txt','r')
#for line in file:
# dataVectors.append(line.strip().split(','))
#file.close
mixedColorOrNot = []
for i in range(len(dataVectors)):
if '/' in data[i][9]:
mixedColorOrNot.append('MixedColor')
else:
mixedColorOrNot.append('SolidColor')
for i in range(len(dataVectors)):
data[i][9] = mixedColorOrNot[i]
#get rid of the rest of the whitespace in the data so it can be used with Association Rules
idx_subtype_0 = data['OutcomeSubtype'] == ''
idx_subtype_1 = data['OutcomeSubtype'] == 'At Vet'
idx_subtype_2 = data['OutcomeSubtype'] == 'Foster'
idx_subtype_3 = data['OutcomeSubtype'] == 'In Foster'
idx_subtype_4 = data['OutcomeSubtype'] == 'In Kennel'
idx_subtype_5 = data['OutcomeSubtype'] == 'In Surgery'
idx_subtype_6 = data['OutcomeSubtype'] == 'Rabies Risk'
data['OutcomeSubtype'][idx_subtype_0] = "NoSubtype"
data['OutcomeSubtype'][idx_subtype_1] = "AtVet"
data['OutcomeSubtype'][idx_subtype_2] = "Foster"
data['OutcomeSubtype'][idx_subtype_3] = "Foster"
data['OutcomeSubtype'][idx_subtype_4] = "Kennel"
data['OutcomeSubtype'][idx_subtype_5] = "Surgery"
data['OutcomeSubtype'][idx_subtype_6] = "RabiesRisk"
idx_sex_0 = data['SexuponOutcome'] == ''
idx_sex_1 = data['SexuponOutcome'] == 'Intact Male'
idx_sex_2 = data['SexuponOutcome'] == 'Intact Female'
idx_sex_3 = data['SexuponOutcome'] == 'Spayed Female'
idx_sex_4 = data['SexuponOutcome'] == 'Neutered Male'
data['SexuponOutcome'][idx_sex_1] = "IntactMale"
data['SexuponOutcome'][idx_sex_2] = "IntactFemale"
data['SexuponOutcome'][idx_sex_3] = "SpayedFemale"
data['SexuponOutcome'][idx_sex_4] = "NeuteredMale"
data['SexuponOutcome'][idx_sex_0] = "Unknown"
np.savetxt('filterPass4.txt',data,fmt="%s",delimiter=',')
#dataVectors = []
#file = open('filterPass5.txt','r')
#for line in file:
# dataVectors.append(line.strip().split(','))
#file.close()
#newData = np.array(dataVectors)
#np.savetxt('filterPass6.txt',newData,fmt="%s",delimiter=',')
#listOfUniqueElements = [[] for i in range(10)]
#for i in range(len(dataVectors)):
# for k in range(len(dataVectors[i])):
# if dataVectors[i][k] not in listOfUniqueElements[k]:
# listOfUniqueElements[k].append(dataVectors[i][k])
#listOfNumericalElements = [[] for i in range(10)]
#for i in range(len(dataVectors)):
# for k in range(len(dataVectors[i])):
# listOfNumericalElements[k].append(listOfUniqueElements[k].index(dataVectors[i][k]))
#dataVectorsTest = []
#file = open('filterPass6.txt','r')
#for line in file:
# dataVectorsTest.append(line.strip().split(','))
#file.close()
#listOfNumericalElementsTest = [[] for i in range(10)]
#for i in range(len(dataVectorsTest)):
# for k in range(len(dataVectorsTest[i])):
# listOfNumericalElementsTest[k].append(listOfUniqueElements[k].index(dataVectorsTest[i][k]))
#f = open('numericalDataTrain.txt', 'w')
#for i in range(len(listOfNumericalElements[0])):
# for k in range(len(listOfNumericalElements)):
# f.write(str(listOfNumericalElements[k][i]))
# if k!= len(listOfNumericalElements) - 1:
# f.write(',')
# f.write('\n')
#f.close()
#f = open('numericalDataTest.txt', 'w')
#for i in range(len(listOfNumericalElementsTest[0])):
# for k in range(len(listOfNumericalElementsTest)):
# f.write(str(listOfNumericalElementsTest[k][i]))
# if k!= len(listOfNumericalElementsTest) - 1:
# f.write(',')
# f.write('\n')
#f.close()
#everything below this point was the code to produce those bar graphs that were in the presentation
#there was a lot of tedious and copy pasted probability calculation in it
#however all the code is down there so you can see, just uncomment if you wish to run yourself
#mixDogsAdopted = 0
#mixDogsDied = 0
#mixDogsTransfered = 0
#mixDogsReturnedToOwners = 0
#mixDogsEuthanized = 0
#purebredDogsAdopted = 0
#purebredDogsDied = 0
#purebredDogsTransfered = 0
#purebredDogsReturnedToOwners = 0
#purebredDogsEuthanized = 0
#mixCatsAdopted = 0
#mixCatsDied = 0
#mixCatsTransfered = 0
#mixCatsReturnedToOwners = 0
#mixCatsEuthanized = 0
#purebredCatsAdopted = 0
#purebredCatsDied = 0
#purebredCatsTransfered = 0
#purebredCatsReturnedToOwners = 0
#purebredCatsEuthanized = 0
#for i in range(len(dataVectors)):
# if data[i][5] == 'Dog' and data[i][3] == 'Adoption' and data[i][8] == 'Mix':
# mixDogsAdopted += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Died' and data[i][8] == 'Mix':
# mixDogsDied += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Transfer' and data[i][8] == 'Mix':
# mixDogsTransfered += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Return_to_owner' and data[i][8] == 'Mix':
# mixDogsReturnedToOwners += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Euthanasia' and data[i][8] == 'Mix':
# mixDogsEuthanized += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Adoption' and data[i][8] == 'Purebred':
# purebredDogsAdopted += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Died' and data[i][8] == 'Purebred':
# purebredDogsDied += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Transfer' and data[i][8] == 'Purebred':
# purebredDogsTransfered += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Return_to_owner' and data[i][8] == 'Purebred':
# purebredDogsReturnedToOwners += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Euthanasia' and data[i][8] == 'Purebred':
# purebredDogsEuthanized += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Adoption' and data[i][8] == 'Mix':
# mixCatsAdopted += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Died' and data[i][8] == 'Mix':
# mixCatsDied += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Transfer' and data[i][8] == 'Mix':
# mixCatsTransfered += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Return_to_owner' and data[i][8] == 'Mix':
# mixCatsReturnedToOwners += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Euthanasia' and data[i][8] == 'Mix':
# mixCatsEuthanized += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Adoption' and data[i][8] == 'Purebred':
# purebredCatsAdopted += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Died' and data[i][8] == 'Purebred':
# purebredCatsDied += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Transfer' and data[i][8] == 'Purebred':
# purebredCatsTransfered += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Return_to_owner' and data[i][8] == 'Purebred':
# purebredCatsReturnedToOwners += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Euthanasia' and data[i][8] == 'Purebred':
# purebredCatsEuthanized += 1
#nummixDogs = 0
#numpurebredDogs = 0
#nummixCats = 0
#numpurebredCats = 0
#for i in range(len(dataVectors)):
# if data[i][5] == 'Dog' and data[i][8] == 'Mix':
# nummixDogs += 1
# if data[i][5] == 'Dog' and data[i][8] == 'Purebred':
# numpurebredDogs += 1
# if data[i][5] == 'Cat' and data[i][8] == 'Mix':
# nummixCats += 1
# if data[i][5] == 'Cat' and data[i][8] == 'Purebred':
# numpurebredCats += 1
#percentagemixDogsAdopted = mixDogsAdopted/nummixDogs*100
#percentagemixDogsDied = mixDogsDied/nummixDogs*100
#percentagemixDogsTransfered = mixDogsTransfered/nummixDogs*100
#percentagemixDogsReturnToOwners = mixDogsReturnedToOwners/nummixDogs*100
#percentagemixDogsEuthanized = mixDogsEuthanized/nummixDogs*100
#percentagemixDogsOutcomes = [percentagemixDogsAdopted, percentagemixDogsDied, percentagemixDogsTransfered, percentagemixDogsReturnToOwners, percentagemixDogsEuthanized]
#percentagepurebredDogsAdopted = purebredDogsAdopted/numpurebredDogs*100
#percentagepurebredDogsDied = purebredDogsDied/numpurebredDogs*100
#percentagepurebredDogsTransfered = purebredDogsTransfered/numpurebredDogs*100
#percentagepurebredDogsReturnToOwners = purebredDogsReturnedToOwners/numpurebredDogs*100
#percentagepurebredDogsEuthanized = purebredDogsEuthanized/numpurebredDogs*100
#percentagepurebredDogsOutcomes = [percentagepurebredDogsAdopted, percentagepurebredDogsDied, percentagepurebredDogsTransfered, percentagepurebredDogsReturnToOwners, percentagepurebredDogsEuthanized]
#percentagemixCatsAdopted = mixCatsAdopted/nummixCats*100
#percentagemixCatsDied = mixCatsDied/nummixCats*100
#percentagemixCatsTransfered = mixCatsTransfered/nummixCats*100
#percentagemixCatsReturnToOwners = mixCatsReturnedToOwners/nummixCats*100
#percentagemixCatsEuthanized = mixCatsEuthanized/nummixCats*100
#percentagemixCatsOutcomes = [percentagemixCatsAdopted, percentagemixCatsDied, percentagemixCatsTransfered, percentagemixCatsReturnToOwners, percentagemixCatsEuthanized]
#percentagepurebredCatsAdopted = purebredCatsAdopted/numpurebredCats*100
#percentagepurebredCatsDied = purebredCatsDied/numpurebredCats*100
#percentagepurebredCatsTransfered = purebredCatsTransfered/numpurebredCats*100
#percentagepurebredCatsReturnToOwners = purebredCatsReturnedToOwners/numpurebredCats*100
#percentagepurebredCatsEuthanized = purebredCatsEuthanized/numpurebredCats*100
#percentagepurebredCatsOutcomes = [percentagepurebredCatsAdopted, percentagepurebredCatsDied, percentagepurebredCatsTransfered, percentagepurebredCatsReturnToOwners, percentagepurebredCatsEuthanized]
#fig, ax = plt.subplots()
#bars = ax.bar(np.arange(5), percentagemixDogsOutcomes)
#ax.set_ylabel('Percentage')
#ax.set_title('Percentage Mixed Dog Outcomes')
#ax.set_ylim([0,100])
#ax.set_xticks(np.arange(5) + 0.42)
#ax.set_xticklabels(('Adopted','Died','Transfered','Returned', 'Euthanized'))
#plt.show()
#fig, ax = plt.subplots()
#bars = ax.bar(np.arange(5), percentagepurebredDogsOutcomes)
#ax.set_ylabel('Percentage')
#ax.set_title('Percentage Purebred Dog Outcomes')
#ax.set_ylim([0,100])
#ax.set_xticks(np.arange(5) + 0.42)
#ax.set_xticklabels(('Adopted','Died','Transfered','Returned', 'Euthanized'))
#plt.show()
#fig, ax = plt.subplots()
#bars = ax.bar(np.arange(5), percentagemixCatsOutcomes)
#ax.set_ylabel('Percentage')
#ax.set_title('Percentage Mixed Cat Outcomes')
#ax.set_ylim([0,100])
#ax.set_xticks(np.arange(5) + 0.42)
#ax.set_xticklabels(('Adopted','Died','Transfered','Returned', 'Euthanized'))
#plt.show()
#fig, ax = plt.subplots()
#bars = ax.bar(np.arange(5), percentagepurebredCatsOutcomes)
#ax.set_ylabel('Percentage')
#ax.set_title('Percentage Purebred Cat Outcomes')
#ax.set_ylim([0,100])
#ax.set_xticks(np.arange(5) + 0.42)
#ax.set_xticklabels(('Adopted','Died','Transfered','Returned', 'Euthanized'))
#plt.show()
#youngDogsAdopted = 0
#youngDogsDied = 0
#youngDogsTransfered = 0
#youngDogsReturnedToOwners = 0
#youngDogsEuthanized = 0
#middleAgedDogsAdopted = 0
#middleAgedDogsDied = 0
#middleAgedDogsTransfered = 0
#middleAgedDogsReturnedToOwners = 0
#middleAgedDogsEuthanized = 0
#oldDogsAdopted = 0
#oldDogsDied = 0
#oldDogsTransfered = 0
#oldDogsReturnedToOwners = 0
#oldDogsEuthanized = 0
#######################################
#youngCatsAdopted = 0
#youngCatsDied = 0
#youngCatsTransfered = 0
#youngCatsReturnedToOwners = 0
#youngCatsEuthanized = 0
#middleAgedCatsAdopted = 0
#middleAgedCatsDied = 0
#middleAgedCatsTransfered = 0
#middleAgedCatsReturnedToOwners = 0
#middleAgedCatsEuthanized = 0
#oldCatsAdopted = 0
#oldCatsDied = 0
#oldCatsTransfered = 0
#oldCatsReturnedToOwners = 0
#oldCatsEuthanized = 0
#for i in range(len(dataVectors)):
# if data[i][5] == 'Dog' and data[i][3] == 'Adoption' and data[i][7] == '<5years':
# youngDogsAdopted += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Died' and data[i][7] == '<5years':
# youngDogsDied += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Transfer' and data[i][7] == '<5years':
# youngDogsTransfered += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Return_to_owner' and data[i][7] == '<5years':
# youngDogsReturnedToOwners += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Euthanasia' and data[i][7] == '<5years':
# youngDogsEuthanized += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Adoption' and data[i][7] == '>=5and<10years':
# middleAgedDogsAdopted += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Died' and data[i][7] == '>=5and<10years':
# middleAgedDogsDied += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Transfer' and data[i][7] == '>=5and<10years':
# middleAgedDogsTransfered += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Return_to_owner' and data[i][7] == '>=5and<10years':
# middleAgedDogsReturnedToOwners += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Euthanasia' and data[i][7] == '>=5and<10years':
# middleAgedDogsEuthanized += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Adoption' and data[i][7] == '>=10years':
# oldDogsAdopted += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Died' and data[i][7] == '>=10years':
# oldDogsDied += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Transfer' and data[i][7] == '>=10years':
# oldDogsTransfered += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Return_to_owner' and data[i][7] == '>=10years':
# oldDogsReturnedToOwners += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Euthanasia' and data[i][7] == '>=10years':
# oldDogsEuthanized += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Adoption' and data[i][7] == '<5years':
# youngCatsAdopted += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Died' and data[i][7] == '<5years':
# youngCatsDied += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Transfer' and data[i][7] == '<5years':
# youngCatsTransfered += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Return_to_owner' and data[i][7] == '<5years':
# youngCatsReturnedToOwners += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Euthanasia' and data[i][7] == '<5years':
# youngCatsEuthanized += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Adoption' and data[i][7] == '>=5and<10years':
# middleAgedCatsAdopted += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Died' and data[i][7] == '>=5and<10years':
# middleAgedCatsDied += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Transfer' and data[i][7] == '>=5and<10years':
# middleAgedCatsTransfered += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Return_to_owner' and data[i][7] == '>=5and<10years':
# middleAgedCatsReturnedToOwners += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Euthanasia' and data[i][7] == '>=5and<10years':
# middleAgedCatsEuthanized += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Adoption' and data[i][7] == '>=10years':
# oldCatsAdopted += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Died' and data[i][7] == '>=10years':
# oldCatsDied += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Transfer' and data[i][7] == '>=10years':
# oldCatsTransfered += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Return_to_owner' and data[i][7] == '>=10years':
# oldCatsReturnedToOwners += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Euthanasia' and data[i][7] == '>=10years':
# oldCatsEuthanized += 1
#numOfDogs = np.sum(data['AnimalType'] == 'Dog')
#numOfCats = np.sum(data['AnimalType'] == 'Cat')
#numAdopted = np.sum(data['OutcomeType'] == 'Adoption')
#numDied = np.sum(data['OutcomeType'] == 'Died')
#numEuthanized = np.sum(data['OutcomeType'] == 'Euthanasia')
#numTransfered = np.sum(data['OutcomeType'] == 'Transfer')
#numReturned = np.sum(data['OutcomeType'] == 'Return_to_owner')
#numYoungDogs = 0
#numMiddleDogs = 0
#numOldDogs = 0
#numYoungCats = 0
#numMiddleCats = 0
#numOldCats = 0
#for i in range(len(dataVectors)):
# if data[i][5] == 'Dog' and data[i][7] == '<5years':
# numYoungDogs += 1
# if data[i][5] == 'Dog' and data[i][7] == '>=5and<10years':
# numMiddleDogs += 1
# if data[i][5] == 'Dog' and data[i][7] == '>=10years':
# numOldDogs += 1
# if data[i][5] == 'Cat' and data[i][7] == '<5years':
# numYoungCats += 1
# if data[i][5] == 'Cat' and data[i][7] == '>=5and<10years':
# numMiddleCats += 1
# if data[i][5] == 'Cat' and data[i][7] == '>=10years':
# numOldCats += 1
#percentageYoungDogsAdopted = youngDogsAdopted/numYoungDogs*100
#percentageYoungDogsDied = youngDogsDied/numYoungDogs*100
#percentageYoungDogsTransfered = youngDogsTransfered/numYoungDogs*100
#percentageYoungDogsReturnToOwners = youngDogsReturnedToOwners/numYoungDogs*100
#percentageYoungDogsEuthanized = youngDogsEuthanized/numYoungDogs*100
#percentageYoungDogsOutcomes = [percentageYoungDogsAdopted, percentageYoungDogsDied, percentageYoungDogsTransfered, percentageYoungDogsReturnToOwners, percentageYoungDogsEuthanized]
#percentageMiddleDogsAdopted = middleAgedDogsAdopted/numMiddleDogs*100
#percentageMiddleDogsDied = middleAgedDogsDied/numMiddleDogs*100
#percentageMiddleDogsTransfered = middleAgedDogsTransfered/numMiddleDogs*100
#percentageMiddleDogsReturnToOwners = middleAgedDogsReturnedToOwners/numMiddleDogs*100
#percentageMiddleDogsEuthanized = middleAgedDogsEuthanized/numMiddleDogs*100
#percentageMiddleDogsOutcomes = [percentageMiddleDogsAdopted, percentageMiddleDogsDied, percentageMiddleDogsTransfered, percentageMiddleDogsReturnToOwners, percentageMiddleDogsEuthanized]
#percentageOldDogsAdopted = oldDogsAdopted/numOldDogs*100
#percentageOldDogsDied = oldDogsDied/numOldDogs*100
#percentageOldDogsTransfered = oldDogsTransfered/numOldDogs*100
#percentageOldDogsReturnToOwners = oldDogsReturnedToOwners/numOldDogs*100
#percentageOldDogsEuthanized = oldDogsEuthanized/numOldDogs*100
#percentageOldDogsOutcomes = [percentageOldDogsAdopted, percentageOldDogsDied, percentageOldDogsTransfered, percentageOldDogsReturnToOwners, percentageOldDogsEuthanized]
#percentageYoungCatsAdopted = youngCatsAdopted/numYoungCats*100
#percentageYoungCatsDied = youngCatsDied/numYoungCats*100
#percentageYoungCatsTransfered = youngCatsTransfered/numYoungCats*100
#percentageYoungCatsReturnToOwners = youngCatsReturnedToOwners/numYoungCats*100
#percentageYoungCatsEuthanized = youngCatsEuthanized/numYoungCats*100
#percentageYoungCatsOutcomes = [percentageYoungCatsAdopted, percentageYoungCatsDied, percentageYoungCatsTransfered, percentageYoungCatsReturnToOwners, percentageYoungCatsEuthanized]
#percentageMiddleCatsAdopted = middleAgedCatsAdopted/numMiddleCats*100
#percentageMiddleCatsDied = middleAgedCatsDied/numMiddleCats*100
#percentageMiddleCatsTransfered = middleAgedCatsTransfered/numMiddleCats*100
#percentageMiddleCatsReturnToOwners = middleAgedCatsReturnedToOwners/numMiddleCats*100
#percentageMiddleCatsEuthanized = middleAgedCatsEuthanized/numMiddleCats*100
#percentageMiddleCatsOutcomes = [percentageMiddleCatsAdopted, percentageMiddleCatsDied, percentageMiddleCatsTransfered, percentageMiddleCatsReturnToOwners, percentageMiddleCatsEuthanized]
#percentageOldCatsAdopted = oldCatsAdopted/numOldCats*100
#percentageOldCatsDied = oldCatsDied/numOldCats*100
#percentageOldCatsTransfered = oldCatsTransfered/numOldCats*100
#percentageOldCatsReturnToOwners = oldCatsReturnedToOwners/numOldCats*100
#percentageOldCatsEuthanized = oldCatsEuthanized/numOldCats*100
#percentageOldCatsOutcomes = [percentageOldCatsAdopted, percentageOldCatsDied, percentageOldCatsTransfered, percentageOldCatsReturnToOwners, percentageOldCatsEuthanized]
#fig, ax = plt.subplots()
#bars = ax.bar(np.arange(5), percentageYoungDogsOutcomes)
#ax.set_ylabel('Percentage')
#ax.set_title('Percentage Young Dog Outcomes')
#ax.set_ylim([0,100])
#ax.set_xticks(np.arange(5) + 0.42)
#ax.set_xticklabels(('Adopted','Died','Transfered','Returned', 'Euthanized'))
#plt.show()
#fig, ax = plt.subplots()
#bars = ax.bar(np.arange(5), percentageMiddleDogsOutcomes)
#ax.set_ylabel('Percentage')
#ax.set_title('Percentage Middle Aged Dog Outcomes')
#ax.set_ylim([0,100])
#ax.set_xticks(np.arange(5) + 0.42)
#ax.set_xticklabels(('Adopted','Died','Transfered','Returned', 'Euthanized'))
#plt.show()
#fig, ax = plt.subplots()
#bars = ax.bar(np.arange(5), percentageOldDogsOutcomes)
#ax.set_ylabel('Percentage')
#ax.set_title('Percentage Old Dog Outcomes')
#ax.set_ylim([0,100])
#ax.set_xticks(np.arange(5) + 0.42)
#ax.set_xticklabels(('Adopted','Died','Transfered','Returned', 'Euthanized'))
#plt.show()
#fig, ax = plt.subplots()
#bars = ax.bar(np.arange(5), percentageYoungCatsOutcomes)
#ax.set_ylabel('Percentage')
#ax.set_title('Percentage Young Cat Outcomes')
#ax.set_ylim([0,100])
#ax.set_xticks(np.arange(5) + 0.42)
#ax.set_xticklabels(('Adopted','Died','Transfered','Returned', 'Euthanized'))
#plt.show()
#fig, ax = plt.subplots()
#bars = ax.bar(np.arange(5), percentageMiddleCatsOutcomes)
#ax.set_ylabel('Percentage')
#ax.set_title('Percentage Middle Aged Cats Outcomes')
#ax.set_ylim([0,100])
#ax.set_xticks(np.arange(5) + 0.42)
#ax.set_xticklabels(('Adopted','Died','Transfered','Returned', 'Euthanized'))
#plt.show()
#fig, ax = plt.subplots()
#bars = ax.bar(np.arange(5), percentageOldCatsOutcomes)
#ax.set_ylabel('Percentage')
#ax.set_title('Percentage Old Cats Outcomes')
#ax.set_ylim([0,100])
#ax.set_xticks(np.arange(5) + 0.42)
#ax.set_xticklabels(('Adopted','Died','Transfered','Returned', 'Euthanized'))
#plt.show()
#namedDogsAdopted = 0
#namedDogsDied = 0
#namedDogsTransfered = 0
#namedDogsReturnedToOwners = 0
#namedDogsEuthanized = 0
#unNamedDogsAdopted = 0
#unNamedDogsDied = 0
#unNamedDogsTransfered = 0
#unNamedDogsReturnedToOwners = 0
#unNamedDogsEuthanized = 0
#namedCatsAdopted = 0
#namedCatsDied = 0
#namedCatsTransfered = 0
#namedCatsReturnedToOwners = 0
#namedCatsEuthanized = 0
#unNamedCatsAdopted = 0
#unNamedCatsDied = 0
#unNamedCatsTransfered = 0
#unNamedCatsReturnedToOwners = 0
#unNamedCatsEuthanized = 0
#for i in range(len(dataVectors)):
# if data[i][5] == 'Dog' and data[i][3] == 'Adoption' and data[i][0] == 'Named':
# namedDogsAdopted += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Died' and data[i][0] == 'Named':
# namedDogsDied += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Transfer' and data[i][0] == 'Named':
# namedDogsTransfered += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Return_to_owner' and data[i][0] == 'Named':
# namedDogsReturnedToOwners += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Euthanasia' and data[i][0] == 'Named':
# namedDogsEuthanized += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Adoption' and data[i][0] == 'NoName':
# unNamedDogsAdopted += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Died' and data[i][0] == 'NoName':
# unNamedDogsDied += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Transfer' and data[i][0] == 'NoName':
# unNamedDogsTransfered += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Return_to_owner' and data[i][0] == 'NoName':
# unNamedDogsReturnedToOwners += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Euthanasia' and data[i][0] == 'NoName':
# unNamedDogsEuthanized += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Adoption' and data[i][0] == 'Named':
# namedCatsAdopted += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Died' and data[i][0] == 'Named':
# namedCatsDied += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Transfer' and data[i][0] == 'Named':
# namedCatsTransfered += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Return_to_owner' and data[i][0] == 'Named':
# namedCatsReturnedToOwners += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Euthanasia' and data[i][0] == 'Named':
# namedCatsEuthanized += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Adoption' and data[i][0] == 'NoName':
# unNamedCatsAdopted += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Died' and data[i][0] == 'NoName':
# unNamed
|
fdd6a9fb7e1f297f3560ccd4ca2e29eec3e4956d
|
Python
|
<|begin_of_text|>from cep_price_console.utils.utils import is_path_exists_or_creatable, creation_date
from cep_price_console.db_management.server_utils import mysql_login_required
from cep_price_console.utils.log_utils import debug, CustomAdapter
from cep_price_console.utils.excel_utils import Workbook
import cep_price_console.db_management.server_utils as server_utils
from cep_price_console.utils import config
from sqlalchemy.schema import CreateSchema
from sqlalchemy.sql import text
# from sqlalchemy.ext.declarative import DeferredReflection
# noinspection PyUnresolvedReferences
from sqlalchemy import exc, and_, select, or_, func
import importlib
import logging
import datetime
import os
import csv
import textwrap
reflected = False
creation_module = None
@debug(lvl=logging.DEBUG, prefix='')
def get_creation_module():
global creation_module
if creation_module is None:
for table in list(server_utils.mysql_base.metadata.tables.keys()):
server_utils.mysql_base.metadata.remove(server_utils.mysql_base.metadata.tables[table])
creation_module = importlib.import_module("cep_price_console.db_management.ARW_PRF_Creation")
return creation_module
else:
return creation_module
class ArwPrfImporter(object):
logger = CustomAdapter(logging.getLogger(str(__name__)), None)
@debug(lvl=logging.DEBUG, prefix='')
@mysql_login_required
def __init__(self, relative_filename):
self.relative_filename = relative_filename
self.wb_cls = Workbook(relative_filename)
self.session = server_utils.mysql_session_maker()
@debug(lvl=logging.DEBUG)
def investigate_arw_prf_xl(self):
for sheet_name in self.wb_cls.ws_lst:
prf_obj = self.ws_format_check(sheet_name)
if prf_obj is not None:
self.field_instantiation(prf_obj)
self.wb_cls.wb.unload_sheet(sheet_name)
@debug(lvl=logging.DEBUG)
def ws_format_check(self, sheet_name):
# PrimaryReportFile.clear_dict()
formatting_error = False
tbl_init_dict = {}
self.wb_cls.ws_sel = sheet_name
for col in range(1, self.wb_cls.col_count + 1):
col_dict = dict(
arw_or_static=None,
table_name=None,
filepath_or_master_table_name=None,
)
# Table-Level loop
# Row 1 in every spreadsheet should have Y/N values signifying that the column
# be considered for table import. Import only the columns w/ Y values.
for row in range(1, 4):
cell_val = self.wb_cls.fetch_value(row, col).formatted_value
try:
cell_val = str(cell_val).strip()
except ValueError:
ArwPrfImporter.logger.error("Sheet Name: {0}, Column: {1}, Row: {2}, Value not a string: {3}"
.format(sheet_name, col, str(row), cell_val))
else:
if row == 1:
if cell_val in ('Y', 'S', 'N', 'MySQL File?'):
col_dict['arw_or_static'] = cell_val
else:
formatting_error = True
ArwPrfImporter.logger.error("Sheet Name: {0}, Column: {1}, Row: {2}, First row value not "
"'Y', 'S', 'N' or 'MySQL File?': {3}".format(sheet_name, col,
row, cell_val))
break
elif row == 2:
if self.wb_cls.fetch_value(1, col).formatted_value!= 'S':
if cell_val.strip()!= "N/A":
if cell_val[-4:].upper() == ".CSV":
fileroot = config.config["directory"]["arw_export_dir"]
filepath = os.path.join(fileroot, cell_val)
ArwPrfImporter.logger.log(logging.DEBUG, "filepath: {0}".format(filepath))
if not is_path_exists_or_creatable(filepath):
formatting_error = True
ArwPrfImporter.logger.error("Sheet Name: {0}, Column: {1}, Row: {2}, Invalid "
"filepath: {3}".format(sheet_name, col, row,
cell_val))
break
else:
col_dict['filepath_or_master_table_name'] = filepath
else:
formatting_error = True
ArwPrfImporter.logger.error("Sheet Name: {0}, Column: {1}, Row: {2}, "
"Second row value must be a filepath or "
"'N/A': {3}".format(sheet_name, col, row, cell_val))
break
elif cell_val.strip() == "N/A":
col_dict['filepath_or_master_table_name'] = cell_val
elif self.wb_cls.fetch_value(1, col).formatted_value == 'S':
col_dict['filepath_or_master_table_name'] = cell_val
elif row == 3:
# table_name = None
ArwPrfImporter.logger.log(logging.NOTSET,
"Sheet Name: {0}, Column: {1}, Row: {2}, "
"ARW Column List: {3}, Cell Value: {4}"
.format(sheet_name, col, row, arw_col_list.get(str(col)), cell_val))
if col <= 22:
if arw_col_list.get(str(col))!= cell_val:
formatting_error = True
ArwPrfImporter.logger.error("Sheet Name: {0}, Column: {1}, Row: {2}, Column Ordering "
"Error: {3}".format(sheet_name, col, row, cell_val))
break
elif arw_col_list.get(str(col)) == cell_val:
col_dict['table_name'] = cell_val
else:
col_dict['table_name'] = cell_val
if formatting_error:
break
# ArwPrfImporter.logger.log(logging.NOTSET, "Sheet Name: {0}, Column: {1}".format(sheet_name, col))
# for str_key in col_dict.keys():
# str_value = col_dict.get(str_key)
# ArwPrfImporter.logger.log(logging.DEBUG, "Key: {0}, Value: {1}".format(str_key, str_value))
if col > 22:
tbl_init_dict[str(col)] = col_dict
if not formatting_error:
prf_obj = PrimaryReportFile(self.session, sheet_name)
for col_key in sorted(tbl_init_dict.keys(), key=lambda x: int(x)):
col_value = tbl_init_dict.get(col_key)
ArwPrfImporter.logger.log(logging.NOTSET, "Key: {0}, Value: {1}".format(col_key, col_value.values()))
prf_obj.tbl_init_dict = tbl_init_dict
self.table_instantiation(prf_obj)
return prf_obj
else:
return None
# self.wb_cls.wb.unload_sheet(sheet_name)
@debug(lvl=logging.DEBUG)
def table_instantiation(self, prf_obj):
for col in sorted(prf_obj.tbl_init_dict.keys(), key=lambda x: int(x)):
col_dict = prf_obj.tbl_init_dict.get(col)
if col_dict.get('arw_or_static') == 'Y':
current_table = CurrentTable(
session=self.session,
prf_name=prf_obj.filename,
prf_col=int(col),
base_table_name=col_dict.get('table_name'),
table_name=col_dict.get('table_name') + "_01_current",
filepath=col_dict.get('filepath_or_master_table_name'))
prf_obj.current_tbl_dict[col] = current_table
archive_table = ArchiveTable(
session=self.session,
prf_name=prf_obj.filename,
prf_col=int(col),
base_table_name=col_dict.get('table_name'),
table_name=col_dict.get('table_name') + "_02_archive",
filepath=col_dict.get('filepath_or_master_table_name'))
prf_obj.archive_tbl_dict[col] = archive_table
elif col_dict.get('arw_or_static') == 'S':
static_table = StaticTable(
session=self.session,
prf_name=prf_obj.filename,
prf_col=int(col),
base_table_name=col_dict.get('table_name'),
table_name=col_dict.get('table_name') + "_01_static",
master_table_name=col_dict.get('filepath_or_master_table_name'))
prf_obj.static_tbl_dict[col] = static_table
@debug(lvl=logging.DEBUG)
def field_instantiation(self, prf_obj):
self.wb_cls.ws_sel = prf_obj.sheetname
col_num_list = list(prf_obj.current_tbl_dict.keys()) + list(prf_obj.archive_tbl_dict.keys()) + list(
prf_obj.static_tbl_dict.keys())
col_num_list = [int(x) for x in list(set(col_num_list))]
# print(col_num_list)
for row in range(4, self.wb_cls.row_count + 1):
try:
new_field = Field(
arw_name=self.wb_cls.fetch_value(row, "A").formatted_value,
logical_field=self.wb_cls.fetch_value(row, "B").formatted_value,
tag=self.wb_cls.fetch_value(row, "C").formatted_value,
length=self.wb_cls.fetch_value(row, "D").formatted_value,
nested=self.wb_cls.fetch_value(row, "E").formatted_value,
desc=self.wb_cls.fetch_value(row, "F").formatted_value,
column_name=self.wb_cls.fetch_value(row, "H").formatted_value,
data_type=self.wb_cls.fetch_value(row, "I").formatted_value,
fill=self.wb_cls.fetch_value(row, "J").formatted_value,
primary_key=self.wb_cls.fetch_value(row, "K").formatted_value,
nullable=self.wb_cls.fetch_value(row, "L").formatted_value,
unique=self.wb_cls.fetch_value(row, "M").formatted_value,
index=self.wb_cls.fetch_value(row, "N").formatted_value,
binary_col=self.wb_cls.fetch_value(row, "O").formatted_value,
auto_incremental=self.wb_cls.fetch_value(row, "P").formatted_value,
generated=self.wb_cls.fetch_value(row, "Q").formatted_value,
static_key=self.wb_cls.fetch_value(row, "R").formatted_value,
dflt_exp=self.wb_cls.fetch_value(row, "U").raw_raw_val,
notes=self.wb_cls.fetch_value(row, "A").formatted_value,
)
except ValueError as err:
if not err.args:
err.args = ('',)
err.args = ("Sheet Name: {0}, Row: {1}"
.format(prf_obj.sheetname,
row),
) + err.args
ArwPrfImporter.logger.error(err.args)
else:
for col in sorted(col_num_list):
try:
order = int(self.wb_cls.fetch_value(row, col).formatted_value)
except ValueError:
ArwPrfImporter.logger.log(
logging.DEBUG, "Value is not an integer. Field not appended to any dictionary.")
else:
current_tbl_obj = prf_obj.current_tbl_dict.get(str(col))
if current_tbl_obj is not None:
ArwPrfImporter.logger.log(
logging.DEBUG,
"Column: {0}, Table: {1}, Value is an integer. Field appended to dictionary.".format(
col, current_tbl_obj.table_name))
current_tbl_obj.fields[str(order)] = new_field
else:
ArwPrfImporter.logger.log(
logging.DEBUG,
"Column: {0}. Current Table Dictionary. Get returned 'None'".format(col))
archive_tbl_obj = prf_obj.archive_tbl_dict.get(str(col))
if archive_tbl_obj is not None:
ArwPrfImporter.logger.log(
logging.DEBUG,
"Column: {0}, Table: {1}, Value is an integer. Field appended to dictionary.".format(
col, archive_tbl_obj.table_name))
archive_tbl_obj.fields[str(order)] = new_field
else:
ArwPrfImporter.logger.log(
logging.DEBUG,
"Column: {0}. Archive Table Dictionary. Get returned 'None'".format(col))
static_tbl_obj = prf_obj.static_tbl_dict.get(str(col))
if static_tbl_obj is not None:
ArwPrfImporter.logger.log(
logging.DEBUG,
"Column: {0}, Table: {1}, Value is an integer. Field appended to dictionary.".format(
col, static_tbl_obj.table_name))
static_tbl_obj.fields[str(order)] = new_field
else:
ArwPrfImporter.logger.log(
logging.DEBUG,
"Row: {1}, Column: {0}. Static Table Dictionary. Get returned 'None'".format(col, row))
tbl_obj_lst = \
list(prf_obj.current_tbl_dict.values()) + \
list(prf_obj.archive_tbl_dict.values()) + \
list(prf_obj.static_tbl_dict.values())
for tbl_obj in tbl_obj_lst:
tbl_obj.post_field_instantiation()
# self.wb_cls.wb.unload_sheet(prf_obj.sheetname)
@debug(lvl=logging.DEBUG)
def write_module_file(self, creation=False, mapping=False):
if bool(PrimaryReportFile.prf_dict.values()):
filename = None
if sum([creation, mapping])!= 1:
raise ValueError
elif creation:
filename = config.SOURCE_PATH / "cep_price_console" / "db_management" / "ARW_PRF_Creation.py"
with filename.open("w") as module_file:
print("from sqlalchemy.ext.declarative import DeferredReflection", file=module_file)
print("from sqlalchemy import Column, Table, func", file=module_file)
print("from sqlalchemy.sql import case, and_, or_, literal", file=module_file)
print("from sqlalchemy.ext.hybrid import hybrid_property", file=module_file)
print("from sqlalchemy.types import Date, DateTime, Integer, Numeric, String, Time",
file=module_file)
print("from sqlalchemy.dialects.mysql import LONGTEXT", file=module_file)
print("import cep_price_console.db_management.server_utils as server_utils\n\n", file=module_file)
elif mapping:
filename = config.SOURCE_PATH / "cep_price_console" / "db_management" / "ARW_PRF_Mapping.py"
with filename.open("w") as module_file:
print("from sqlalchemy.ext.declarative import DeferredReflection", file=module_file)
print("from sqlalchemy import Table, func", file=module_file)
print("from sqlalchemy.sql import case, and_, or_, literal", file=module_file)
print("from sqlalchemy.ext.hybrid import hybrid_property", file=module_file)
print("import cep_price_console.db_management.server_utils as server_utils\n\n", file=module_file)
with filename.open("a") as module_file:
filename_statement = "Workbook Filename: {0}\n".format(self.wb_cls.xl_fullpath_pretty)
max_length = 110
fmt_string = "# " + "\n# ".join([filename_statement[i:i + max_length] for i in
range(0, len(filename_statement), max_length)])
print(fmt_string, file=module_file)
print("# Timestamp: {0}".format(datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")),
file=module_file)
print("\n", file=module_file)
print("class InformReflection(DeferredReflection, server_utils.mysql_base):", file=module_file)
print(" __abstract__ = True\n\n", file=module_file)
for prf_obj in PrimaryReportFile.prf_dict.values():
ArwPrfImporter.logger.log(logging.NOTSET, "Primary Report File: {0}".
format(prf_obj.sheetname))
tbl_obj_lst = \
list(prf_obj.current_tbl_dict.values()) + \
list(prf_obj.archive_tbl_dict.values()) + \
list(prf_obj.static_tbl_dict.values())
for tbl_obj in sorted(tbl_obj_lst, key=lambda x: x.table_name):
ArwPrfImporter.logger.log(logging.NOTSET, "Tablename: {0}".format(tbl_obj.table_name))
if creation:
print(tbl_obj.creation_stmt, file=module_file)
elif mapping:
print(tbl_obj.mapping_stmt, file=module_file)
elif not bool(PrimaryReportFile.prf_dict.values()):
ArwPrfImporter.logger.error("Primary Report File list empty.")
self.investigate_arw_prf_xl()
self.write_module_file(creation, mapping)
@debug(lvl=logging.DEBUG)
def create_schemas(self):
for prf_obj in PrimaryReportFile.prf_dict.values():
prf_obj.create_if_not_exists()
@debug(lvl=logging.DEBUG)
def drop_and_create_all_tables(self):
for prf_obj in PrimaryReportFile.prf_dict.values():
prf_obj.drop_and_create_tables()
@debug(lvl=logging.DEBUG)
def scheduled_script(self):
if hasattr(self,'session'):
if bool(PrimaryReportFile.prf_dict.values()):
for prf_obj in PrimaryReportFile.prf_dict.values():
prf_obj.update_schema()
schema_create_if_not_exists('pythontest')
self.fill_prod_uom()
elif not bool(PrimaryReportFile.prf_dict.values()):
ArwPrfImporter.logger.error("Primary Report File list empty.")
self.investigate_arw_prf_xl()
self.scheduled_script()
@debug(lvl=logging.DEBUG, prefix='')
def fill_prod_uom(self):
import cep_price_console.db_management.ARW_PRF_Mapping as ARW_PRF_Mapping
base_uom_update = ARW_PRF_Mapping.prod_uom_v2_01_current.__table__.update().where(
ARW_PRF_Mapping.prod_uom_v2_01_current.__table__.c.UOM_Factor_Desc == "1"
).values(
Base_UOM_Factor=ARW_PRF_Mapping.prod_uom_v2_01_current.__table__.c.UOM,
Base_UOM_Qty=1
)
server_utils.mysql_engine.execute(base_uom_update)
self.session.commit()
# noinspection PyPep8,PyComparisonWithNone
no_base_uom = self.session.query(ARW_PRF_Mapping.prod_uom_v2_01_current.__table__.c.ID).filter(
and_(ARW_PRF_Mapping.prod_uom_v2_01_current.__table__.c.Base_UOM_Factor.is_(None),
ARW_PRF_Mapping.prod_uom_v2_01_current.__table__.c.Base_UOM_Qty.is_(None)))
while no_base_uom.count() > 0:
# noinspection PyPep8,PyComparisonWithNone
has_base_uom = \
select([ARW_PRF_Mapping.prod_uom_v2_01_current.__table__.c.Prod_Num,
ARW_PRF_Mapping.prod_uom_v2_01_current.__table__.c.UOM,
ARW_PRF_Mapping.prod_uom_v2_01_current.__table__.c.UOM_Qty,
ARW_PRF_Mapping.prod_uom_v2_01_current.Of_UOM,
ARW_PRF_Mapping.prod_uom_v2_01_current.__table__.c.Base_UOM_Factor,
ARW_PRF_Mapping.prod_uom_v2_01_current.__table__.c.Base_UOM_Qty,
ARW_PRF_Mapping.prod_uom_v2_01_current.__table__.c.UOM_Factor_Desc]) \
.where(and_(
ARW_PRF_Mapping.prod_uom_v2_01_current.__table__.c.Base_UOM_Factor.isnot(None),
ARW_PRF_Mapping.prod_uom_v2_01_current.__table__.c.Base_UOM_Qty.isnot(None))) \
.distinct() \
.alias("has_base_uom")
# for _ in server_utils.mysql_engine.execute(has_base_uom):
# ArwPrfImporter.logger.log(logging.DEBUG, _)
# noinspection PyPep8,PyComparisonWithNone
update_next_uom_level = ARW_PRF_Mapping.prod_uom_v2_01_current.__table__.update().where(and_(
or_(
and_(
ARW_PRF_Mapping.prod_uom_v2_01_current.__table__.c.Prod_Num.is_(None),
has_base_uom.c.Prod_Num.is_(None)),
ARW_PRF_Mapping.prod_uom_v2_01_current.__table__.c.Prod_Num == has_base_uom.c.Prod_Num),
or_(
and_(
ARW_PRF_Mapping.prod_uom_v2_01_current.Of_UOM.is_(None),
has_base_uom.c.UOM.is_(None)),
ARW_PRF_Mapping.prod_uom_v2_01_current.Of_UOM == has_base_uom.c.UOM),
and_(ARW_PRF_Mapping.prod_uom_v2_01_current.__table__.c.Base_UOM_Factor.is_(None),
ARW_PRF_Mapping.prod_uom_v2_01_current.__table__.c.Base_UOM_Qty.is_(None)))) \
.values(Base_UOM_Factor=has_base_uom.c.Base_UOM_Factor,
Base_UOM_Qty=(has_base_uom.c.Base_UOM_Qty *
ARW_PRF_Mapping.prod_uom_v2_01_current.__table__.c.UOM_Qty))
server_utils.mysql_engine.execute(update_next_uom_level)
self.session.commit()
@debug(lvl=logging.DEBUG, prefix='')
def recreate(self):
if hasattr(self,'session'):
self.write_module_file(creation=True)
get_creation_module()
self.create_schemas()
self.drop_and_create_all_tables()
self.write_mapping()
@debug(lvl=logging.DEBUG, prefix='')
def write_mapping(self):
if hasattr(self,'session'):
self.write_module_file(mapping=True)
self.scheduled_script()
arw_col_list = {
"1": "Name",
"2": "Logical Field",
"3": "Tag",
"4": "Length",
"5": "Nested",
"6": "Description",
"7": "|",
"8": "Column Name",
"9": "Datatype",
"10": "Fill",
"11": "PK",
"12": "Nullable",
"13": "UQ",
"14": "IND",
"15": "B",
"16": "AI",
"17": "G",
"18": "SK",
"19": "Mapping",
"20": "Static Name",
"21": "Default/ Expression",
"22": "Notes"
}
class PrimaryReportFile(object):
logger = CustomAdapter(logging.getLogger(str(__name__)), None)
prf_dict = {}
@debug(lvl=logging.DEBUG, prefix='Primary Report File Initiated')
def __init__(self,
session,
filename):
self.session = session
self.filename = filename.lower()
self.sheetname = filename
self.tbl_init_dict = {}
self.current_tbl_dict = {}
self.archive_tbl_dict = {}
self.static_tbl_dict = {}
PrimaryReportFile.prf_dict[self.sheetname] = self
# @classmethod
# def clear_dict(cls):
# cls.prf_dict = {}
@debug(lvl=logging.DEBUG, prefix='')
def exists(self):
try:
server_utils.mysql_engine.execute("SHOW CREATE SCHEMA `{0}`;".format(self.filename)).scalar()
PrimaryReportFile.logger.log(logging.NOTSET, "Schema Exists: {0}".format(self.filename))
return True
except exc.DBAPIError:
PrimaryReportFile.logger.log(logging.NOTSET, "Schema Does Not Exist: {0}".format(self.filename))
return False
@debug(lvl=logging.DEBUG, prefix='')
def create(self):
PrimaryReportFile.logger.log(logging.NOTSET, "Creating Schema: {0}".format(self.filename))
server_utils.mysql_engine.execute(CreateSchema(self.filename))
@debug(lvl=logging.DEBUG, prefix='')
def create_if_not_exists(self):
if not self.exists():
self.create()
@debug(lvl=logging.DEBUG, prefix='')
def drop_and_create_tables(self):
tbl_lst = \
list(self.current_tbl_dict.values()) + \
list(self.archive_tbl_dict.values()) + \
list(self.static_tbl_dict.values())
for tbl_obj in tbl_lst:
tbl_obj.drop_and_create_if_not_exists()
# ARW_PRF_Mapping.InformReflection.prepare(server_utils.mysql_engine)
@debug(lvl=logging.DEBUG, prefix='')
def update_schema(self):
for current_tbl_obj in self.current_tbl_dict.values():
self.session.commit()
current_tbl_obj.truncate()
current_tbl_obj.append()
for archive_tbl_obj in self.archive_tbl_dict.values():
create_date = datetime.datetime.strptime(creation_date(archive_tbl_obj.filepath), "%Y-%m-%d %H:%M:%S")
max_date_time = archive_tbl_obj.max_date_time()
if create_date!= max_date_time:
archive_tbl_obj.append()
archive_tbl_obj.delete_sub_max_date_time()
# for static_tbl_obj in self.static_tbl_dict.values():
# pass
# append static
class Field(object):
logger = CustomAdapter(logging.getLogger(str(__name__)), None)
type_list = (
"BigInteger",
"Boolean",
"Date",
"DateTime",
"Enum",
"Float",
"Integer",
"Interval",
"LargeBinary",
"MatchType",
"Numeric",
"PickleType",
"SchemaType",
"SmallInteger",
"String",
"Text",
"Time",
"Unicode",
"UnicodeText",
"LONGTEXT"
)
@debug(lvl=logging.DEBUG, prefix='')
def __init__(self,
arw_name="",
logical_field="",
tag="",
length="",
nested="",
desc="",
column_name="",
data_type="N/A",
primary_key="",
nullable="",
unique="",
index="",
binary_col="",
fill="",
auto_incremental="",
dflt_exp="", # Don't need it
generated="", # Don't need it
static_key="", # Don't need it
default="", # Don't need it
notes=""):
self.arw_name = arw_name # ARW Name with spaces and such (Column A)
self.logical_field = logical_field # If this is true, don't look for this value in the.csv file (Column B)
self.tag = tag # ARW Tag (Column C)
self.length = length # ARW Length (Not the length associated with datatype) (Column D)
self.nested = nested # ARW value (Column E)
self.desc = desc # ARW Description of field (Column F)
# None of the above fields influence the field's status in the DB
self.column_name = column_name # My assigned name without spaces (check that this is true in setter)(Column H)
self.data_type = data_type # SQL Datatype (convert to SQL Alchemy Datatype) (Column I)
self.primary_key = primary_key # Is this a primary key? (Column K)
self.nullable = nullable # Is this a NotNull field? (Column L)
self.unique = unique # Is this a Unique Index? (Column M)
self.index = index # Is this an Index? (Column N)
self.binary_col = binary_col # Is this a Binary Column? (Column O)
self.fill = fill # Datatype length (Column J)
self.auto_incremental = auto_incremental # Is this field Auto-Incremental? (Column R)
self.generated = generated # Is this field generated? (Column S)
self.static_key = static_key # Is this field a static key? (Column T)
self.default = default # Don't really know
self.dflt_exp = dflt_exp # What is the default expression for this field? (Only used if generated) (Column W)
self.notes = notes # Don't really know (Column X)
self.get_create_field()
# region arw_name ##########################################################################################s######
@property
@debug(lvl=logging.NOTSET)
def arw_name(self):
return self._arw_name
@arw_name.setter
@debug(lvl=logging.NOTSET, prefix="")
def arw_name(self, value):
try:
str_val = str(value)
self._arw_name = str_val.strip()
except ValueError:
raise ValueError("{0}: Value cannot be converted to string: {1}".format("arw_name", value))
# endregion ########################################################################################################
# region logical_field ############################################################################################
@property
@debug(lvl=logging.NOTSET)
def logical_field(self):
return self._logical_field
@logical_field.setter
@debug(lvl=logging.NOTSET, prefix="")
def logical_field(self, value):
try:
str_val = str(value).upper().strip()
if str_val in ("Y", "N"):
self._logical_field = str_val.strip()
else:
raise ValueError("{0}.{1}: Value must be 'Y' or 'N': {2}".
format(self.arw_name, "logical_field", value))
except ValueError:
raise ValueError("{0}.{1}: Value cannot be converted to string: {2}".
format(self.arw_name, "logical_field", value))
# endregion ########################################################################################################
# region tag ######################################################################################################
@property
@debug(lvl=logging.NOTSET)
def tag(self):
return self._tag
@tag.setter
@debug(lvl=logging.NOTSET, prefix="")
def tag(self, value):
try:
str_val = str(value)
self._tag = str_val.strip()
except ValueError:
raise ValueError("{0}.{1}: Value cannot be converted to string: {2}".
format(self.arw_name, "tag", value))
# endregion ########################################################################################################
# region length ###################################################################################################
@property
@debug(lvl=logging.NOTSET)
def length(self):
return self._length
@length.setter
@debug(lvl=logging.NOTSET, prefix="")
def length(self, value):
try:
int_val = int(value)
self._length = int_val
except ValueError:
try:
str_val = str(value)
if str_val.upper().strip() == "N/A":
self._length = None
else:
raise ValueError("{0}.{1}: Value is not 'N/A': {2}".format(self.arw_name, "length", value))
except ValueError:
raise ValueError("{0}.{1}: Value cannot be converted to an integer: {2}"
.format(self.arw_name, "length", value))
# endregion ########################################################################################################
# region nested ###################################################################################################
@property
@debug(lvl=logging.NOTSET)
def nested(self):
return self._nested
@nested.setter
@debug(lvl=logging.NOTSET, prefix="")
def nested(self, value):
try:
str_val = str(value)
self._nested = str_val.strip()
except ValueError:
raise ValueError("{0}.{1}: Value cannot be converted to string: {2}".format(self.arw_name, "nested", value))
# endregion ########################################################################################################
# region desc #####################################################################################################
@property
@debug(lvl=logging.NOTSET)
def desc(self):
return self._desc
@desc.setter
@debug(lvl=logging.NOTSET, prefix="")
def desc(self, value):
try:
str_val = str(value).replace("'", '"').strip()
str_val =''.join(str_val.splitlines())
str_val.strip()
self._desc = str_val
except ValueError:
raise ValueError("{0}.{1}: Value cannot be converted to string: {2}"
.format(self.arw_name, "desc", value))
# endregion ########################################################################################################
# region column_name ##############################################################################################
@property
@debug(lvl=logging.NOTSET)
def column_name(self):
return self._column_name
@column_name.setter
@debug(lvl=logging.NOTSET, prefix="")
def column_name(self, value):
try:
str_val = str(value).strip()
if len(str_val) > 64:
raise Exception("{0}.{1}: String length greater than the 64 character limit: {2}"
.format(self.arw_name, "column_name", value))
scrubbed_val = str_val.replace("(", "").replace(")", "").replace("/", "").replace("-", "").replace("#", "")
if str_val == scrubbed_val:
try:
int(scrubbed_val[:1])
except ValueError:
self._column_name = scrubbed_val
else:
raise Exception("{0}.{1}: First character of value cannot be a number: {2}"
.format(self.arw_name, "column_name", value))
else:
raise Exception("{0}.{1}: Value has one of the following illegal characters: {{(, ), /, -, #}}: {2}"
.format(self.arw_name, "column_name", value))
except ValueError:
raise ValueError("{0}.{1}: Value cannot be converted to string: {2}"
.format(self.arw_name, "column_name", value))
# endregion ########################################################################################################
# region data_type ################################################################################################
@property
@debug(lvl=logging.NOTSET)
def data_type(self):
return self._data_type
@data_type.setter
@debug(lvl=logging.NOTSET, prefix="")
def data_type(self, value):
try:
str_val = str(value)
if str_val.strip() in Field.type_list:
self._data_type = str_val.strip()
else:
raise ValueError("{0}.{1}: Value not in datatype list: {2}"
.format(self.arw_name, "data_type", value))
except ValueError:
raise ValueError("{0}.{1}: Value cannot be converted to string: {2}"
.format(self.arw_name, "data_type", value))
# endregion ########################################################################################################
# region fill #####################################################################################################
@property
@debug(lvl=logging.NOTSET)
def fill(self):
return self._fill
@fill.setter
@debug(lvl=logging.NOTSET, prefix="")
def fill(self, value):
if self.data_type in (
"BigInteger",
"Boolean",
"Date",
"DateTime",
"Integer",
"SmallInteger",
"Time",
"Text",
"LONGTEXT"
):
if value not in ("", None):
raise ValueError("{0}.{1}: Datatype does not allow for a fill: {2}"
.format(self.arw_name, "fill", self.data_type))
else:
self._fill = None
elif self.data_type in (
"LargeBinary",
"String",
# "Text",
"Unicode",
"UnicodeText",
"Float"
):
if value in ("", None):
raise ValueError("{0}.{1}: Datatype requires a fill: {2}"
.format(self.arw_name, "fill", self.data_type))
else:
try:
int_val = int(value)
if self.data_type == "String" and self.binary_col:
self._fill = "length={0}, collation='binary'".format(str(int_val))
else:
self._fill = "length={0}".format(str(int_val))
except ValueError:
raise ValueError("{0}.{1}: Value cannot be converted to an integer: {2}"
.format(self.arw_name, "fill", value))
elif self.data_type == "Float":
try:
int_val = int(value)
self._fill = "precision={0}".format(str(int_val))
except ValueError:
raise ValueError("{0}.{1}: Value cannot be converted to an integer: {2}"
.format(self.arw_name, "fill", value))
elif self.data_type == "Numeric":
try:
str_val = str(value).strip()
pre_str, scale_str = str_val.split(",")
try:
pre_int = int(pre_str.strip())
scale_int = int(scale_str.strip())
self._fill = "precision={0}, scale={1}".format(str(pre_int), str(scale_int))
except ValueError:
raise ValueError("{0}.{1}: Error with precision or scale integer conversion: "
"precision={2}, scale={3}".
format(self.arw_name, "fill", pre_str, scale_str))
except ValueError:
raise ValueError("{0}.{1}: Value cannot be converted to string: {2}".
format(self.arw_name, "fill", value))
elif self.data_type in (
"Enum",
"Interval",
"MatchType",
"PickleType",
"SchemaType"
):
raise ValueError("{0}.{1}: What the fuck are you doing using this datatype?: {2}"
.format(self.arw_name, "fill", self.data_type))
# endregion ########################################################################################################
# region primary_key ##############################################################################################
@property
@debug(lvl=logging.NOTSET)
def primary_key(self):
return self._primary_key
@primary_key.setter
@debug(lvl=logging.NOTSET, prefix="")
def primary_key(self, value):
if value is None:
str_val = ""
else:
try:
str_val = str(value)
except ValueError:
raise ValueError("{0}.{1}: Value cannot be converted to string: {2}".
format(self.arw_name, "primary_key", value))
if str_val.strip().upper() == "X":
self._primary_key = True
elif str_val.strip().upper() == "":
self._primary_key = False
else:
raise ValueError("{0}.{1}: Value must be empty or 'X': {2}".
format(self.arw_name, "primary_key", value))
# endregion ########################################################################################################
# region nullable #################################################################################################
@property
@debug(lvl=logging.NOTSET)
def nullable(self):
return self._nullable
@nullable.setter
@debug(lvl=logging.NOTSET, prefix="")
def nullable(self, value):
if value is None:
str_val = ""
else:
try:
str_val = str(value)
except ValueError:
raise ValueError("{0}.{1}: Value cannot be converted to string: {2}".
format(self.arw_name, "nullable", value))
if str_val.strip().upper() == "X":
if not self.primary_key:
self._nullable = True
else:
raise ValueError("{0}.{1}: Primary key cannot be nullable: {2}".
format(self.arw_name, "nullable", value))
elif str_val.strip().upper() == "":
self._nullable = False
else:
raise ValueError("{0}.{1}: Value must be empty or 'X': {2}".
format(self.arw_name, "nullable", value))
# endregion ########################################################################################################
# region unique ###################################################################################################
@property
@debug(lvl=logging.NOTSET)
def unique(self):
return self._unique
@unique.setter
@debug(lvl=logging.NOTSET, prefix="")
def unique(self, value):
if value is None:
str_val = ""
else:
try:
str_val = str(value)
except ValueError:
raise ValueError("{0}.{1}: Value cannot be converted to string: {2}".
format(self.arw_name, "unique", value))
if str_val.strip().upper() == "X":
self._unique = True
elif str_val.strip().upper() == "":
self._unique = False
else:
raise ValueError("{0}.{1}: Value must be empty or 'X': {2}".
format(self.arw_name, "unique", value))
# endregion ########################################################################################################
# region index ####################################################################################################
@property
@debug(lvl=logging.NOTSET)
def index(self):
return self._index
@index.setter
@debug(lvl=logging.NOTSET, prefix="")
def index(self, value):
if value is None:
str_val = ""
else:
try:
str_val = str(value)
except ValueError:
raise ValueError("{0}.{1}: Value cannot be converted to string: {2}".
format(self.arw_name, "index", value))
if str_val.strip().upper() == "X":
self._index = True
elif str_val.strip().upper() == "":
self._index = False
else:
raise ValueError("{0}.{1}: Value must be empty or 'X': {2}".
format(self.arw_name, "index", value))
# endregion ########################################################################################################
# region binary_col ###############################################################################################
@property
@debug(lvl=logging.NOTSET)
def binary_col(self):
return self._binary_col
@binary_col.setter
@debug(lvl=logging.NOTSET, prefix="")
def binary_col(self, value):
if value is None:
str_val = ""
else:
try:
str_val = str(value)
except ValueError:
raise ValueError("{0}.{1}: Value cannot be converted to string: {2}".
format(self.arw_name, "binary_col", value))
if str_val.strip().upper() == "X":
if self.data_type in ("String", "Text"):
self._binary_col = True
else:
raise ValueError("{0}.{1}: Only string and text datatypes can be binary: {2}".
format(self.arw_name, "binary_col", self.data_type))
elif str_val.strip().upper() == "":
self._binary_col = False
else:
raise ValueError("{0}.{1}: Value must be empty or 'X': {2}".
format(self.arw_name, "binary_col", value))
# endregion ########################################################################################################
# region auto_incremental #########################################################################################
@property
@debug(lvl=logging.NOTSET)
def auto_incremental(self):
return self._auto_incremental
@auto_incremental.setter
@debug(lvl=logging.NOTSET, prefix="")
def auto_incremental(self, value):
if value is None:
str_val = ""
else:
try:
str_val = str(value)
except ValueError:
raise ValueError("{0}.{1}: Value cannot be converted to string: {2}".
format(self.arw_name, "auto_incremental", value))
if str_val.strip().upper() == "X":
if self.index and self.data_type in (
"BigInteger",
"Boolean",
"Float",
"Integer",
"Numeric",
"SmallInteger"):
self._auto_incremental = True
else:
raise ValueError("{0}.{1}: Autoincremented columns must be indexed and numeric.".
format(self.arw_name, "auto_incremental"))
elif str_val.strip().upper() == "":
self._auto_incremental = False
else:
raise ValueError("{0}.{1}: Value must be empty or 'X': {2}".
format(self.arw_name, "auto_incremental", value))
# endregion ########################################################################################################
# region generated ################################################################################################
@property
@debug(lvl=logging.NOTSET)
def generated(self):
return self._generated
@generated.setter
@debug(lvl=logging.NOTSET, prefix="")
def generated(self, value):
if value is None:
str_val = ""
else:
try:
str_val = str(value)
except ValueError:
raise ValueError("{0}.{1}: Value cannot be converted to string: {2}".
format(self.arw_name, "generated", value))
if str_val.strip().upper() == "X":
if not self.auto_incremental:
self._generated = True
else:
raise ValueError("{0}.{1}: Value cannot be generated and autoincremented: {2}".
format(self.arw_name, "generated", value))
elif str_val.strip().upper() == "":
self._generated = False
else:
raise ValueError("{0}.{1}: Value must be empty or 'X': {2}".
format(self.arw_name, "generated", value))
# endregion ########################################################################################################
# region static_key ###############################################################################################
@property
@debug(lvl=logging.NOTSET)
def static_key(self):
return self._static_key
@static_key.setter
@debug(lvl=logging.NOTSET, prefix="")
def static_key(self, value):
if value is None:
str_val = ""
else:
try:
str_val = str(value)
except ValueError:
raise ValueError("{0}.{1}: Value cannot be converted to string: {2}".
format(self.arw_name, "static_key", value))
if str_val.strip().upper() == "X":
self._static_key = True
elif str_val.strip().upper() == "":
self._static_key = False
else:
raise ValueError("{0}.{1}: Value must be empty or 'X': {2}".
format(self.arw_name, "static_key", value))
# endregion ########################################################################################################
# region default ##################################################################################################
@property
@debug(lvl=logging.NOTSET)
def default(self):
return self._default
@default.setter
@debug(lvl=logging.NOTSET, prefix="")
def default(self, value):
if value is None:
str_val = ""
else:
try:
str_val = str(value)
except ValueError:
raise ValueError("{0}.{1}: Value cannot be converted to string: {2}".
format(self.arw_name, "default", value))
if str_val.strip().upper() == "X":
self._default = True
elif
|
4d81f9fd95cb285139f7a2febae1ab8f6cf26d42
|
Python
|
<|begin_of_text|>import rejig.pybytecode
from rejig.syntaxtree import *
def check(what_is, what_should_be):
global failed, total
env = {}
if "\n" in what_is or " = " in what_is or "def " in what_is or "print(" in what_is:
exec("def f():\n " + "\n ".join(what_is.split("\n")), env)
else:
exec("def f():\n return " + what_is, env)
ast = rejig.pybytecode.ast(env["f"])
print(str(ast))
assert ast == what_should_be, "\nshould be: " + repr(what_should_be) + "\nyet it is: " + repr(ast)
check('"hello"', Suite((Call('return', Const('hello')),)))
check('''.3''', Suite((Call('return', Const(.3)),)))
check('''-3''', Suite((Call('return', Const(-3)),)))
check('''--3''', Suite((Call('return', Const(--3)),)))
check('''+3''', Suite((Call('return', Const(+3)),)))
check('''++3''', Suite((Call('return', Const(++3)),)))
check('''+-3''', Suite((Call('return', Const(+-3)),)))
check('''3e1''', Suite((Call('return', Const(3e1)),)))
check('''-3e1''', Suite((Call('return', Const(-3e1)),)))
check('''+3e1''', Suite((Call('return', Const(+3e1)),)))
check('0x123', Suite((Call('return', Const(0x123)),)))
check('0o123', Suite((Call('return', Const(0o123)),)))
check('3+4j', Suite((Call('return', Const(3+4j)),)))
check('''[]''', Suite((Call('return', Call('list')),)))
check('''[3]''', Suite((Call('return', Call('list', Const(3))),)))
check('''[3,]''', Suite((Call('return', Call('list', Const(3))),)))
check('''[3, 4]''', Suite((Call('return', Call('list', Const(3), Const(4))),)))
check('''[3, 4,]''', Suite((Call('return', Call('list', Const(3), Const(4))),)))
check('''[3, 4, 5]''', Suite((Call('return', Call('list', Const(3), Const(4), Const(5))),)))
check('''[3, 4, 5,]''', Suite((Call('return', Call('list', Const(3), Const(4), Const(5))),)))
check('''[3, 4, 5, 6]''', Suite((Call('return', Call('list', Const(3), Const(4), Const(5), Const(6))),)))
check('''[3, 4, 5, 6,]''', Suite((Call('return', Call('list', Const(3), Const(4), Const(5), Const(6))),)))
check('''[[1], 2, 3, 4, 5]''', Suite((Call('return', Call('list', Call('list', Const(1)), Const(2), Const(3), Const(4), Const(5))),)))
check('''[[1, 2], 3, 4, 5]''', Suite((Call('return', Call('list', Call('list', Const(1), Const(2)), Const(3), Const(4), Const(5))),)))
check('''[[1, 2, 3], 4, 5]''', Suite((Call('return', Call('list', Call('list', Const(1), Const(2), Const(3)), Const(4), Const(5))),)))
check('''[[1, 2, 3, 4], 5]''', Suite((Call('return', Call('list', Call('list', Const(1), Const(2), Const(3), Const(4)), Const(5))),)))
check('''[[1, 2, 3, 4, 5]]''', Suite((Call('return', Call('list', Call('list', Const(1), Const(2), Const(3), Const(4), Const(5)))),)))
check('''[[[1], 2, 3, 4, 5]]''', Suite((Call('return', Call('list', Call('list', Call('list', Const(1)), Const(2), Const(3), Const(4), Const(5)))),)))
check('''[[[1, 2], 3, 4, 5]]''', Suite((Call('return', Call('list', Call('list', Call('list', Const(1), Const(2)), Const(3), Const(4), Const(5)))),)))
check('''[[[1, 2, 3], 4, 5]]''', Suite((Call('return', Call('list', Call('list', Call('list', Const(1), Const(2), Const(3)), Const(4), Const(5)))),)))
check('''[[[1, 2, 3, 4], 5]]''', Suite((Call('return', Call('list', Call('list', Call('list', Const(1), Const(2), Const(3), Const(4)), Const(5)))),)))
check('''[[[1, 2, 3, 4, 5]]]''', Suite((Call('return', Call('list', Call('list', Call('list', Const(1), Const(2), Const(3), Const(4), Const(5))))),)))
check('''[1, 2, 3, 4, [5]]''', Suite((Call('return', Call('list', Const(1), Const(2), Const(3), Const(4), Call('list', Const(5)))),)))
check('''[1, 2, 3, [4, 5]]''', Suite((Call('return', Call('list', Const(1), Const(2), Const(3), Call('list', Const(4), Const(5)))),)))
check('''[1, 2, [3, 4, 5]]''', Suite((Call('return', Call('list', Const(1), Const(2), Call('list', Const(3), Const(4), Const(5)))),)))
check('''[1, [2, 3, 4, 5]]''', Suite((Call('return', Call('list', Const(1), Call('list', Const(2), Const(3), Const(4), Const(5)))),)))
check('''[[1, 2, 3, 4, [5]]]''', Suite((Call('return', Call('list', Call('list', Const(1), Const(2), Const(3), Const(4), Call('list', Const(5))))),)))
check('''[[1, 2, 3, [4, 5]]]''', Suite((Call('return', Call('list', Call('list', Const(1), Const(2), Const(3), Call('list', Const(4), Const(5))))),)))
check('''[[1, 2, [3, 4, 5]]]''', Suite((Call('return', Call('list', Call('list', Const(1), Const(2), Call('list', Const(3), Const(4), Const(5))))),)))
check('''[[1, [2, 3, 4, 5]]]''', Suite((Call('return', Call('list', Call('list', Const(1), Call('list', Const(2), Const(3), Const(4), Const(5))))),)))
check('''x = (None)''', Suite((Assign((Name('x'),), Const(None)), Call('return', Const(None)),)))
check('''x = (3, None)''', Suite((Assign((Name('x'),), Call('tuple', Const(3), Const(None))), Call('return', Const(None)),)))
check('''x = (3, 4, None)''', Suite((Assign((Name('x'),), Call('tuple', Const(3), Const(4), Const(None))), Call('return', Const(None)),)))
check('''x = (3, 4, 5, None)''', Suite((Assign((Name('x'),), Call('tuple', Const(3), Const(4), Const(5), Const(None))), Call('return', Const(None)),)))
check('''x = (3, 4, 5, 6, None)''', Suite((Assign((Name('x'),), Call('tuple', Const(3), Const(4), Const(5), Const(6), Const(None))), Call('return', Const(None)),)))
check('''x = ((1, None), 2, 3, 4, 5, None)''', Suite((Assign((Name('x'),), Call('tuple', Call('tuple', Const(1), Const(None)), Const(2), Const(3), Const(4), Const(5), Const(None))), Call('return', Const(None)),)))
check('''x = ((1, 2, None), 3, 4, 5, None)''', Suite((Assign((Name('x'),), Call('tuple', Call('tuple', Const(1), Const(2), Const(None)), Const(3), Const(4), Const(5), Const(None))), Call('return', Const(None)),)))
check('''x = ((1, 2, 3, None), 4, 5, None)''', Suite((Assign((Name('x'),), Call('tuple', Call('tuple', Const(1), Const(2), Const(3), Const(None)), Const(4), Const(5), Const(None))), Call('return', Const(None)),)))
check('''x = ((1, 2, 3, 4, None), 5, None)''', Suite((Assign((Name('x'),), Call('tuple', Call('tuple', Const(1), Const(2), Const(3), Const(4), Const(None)), Const(5), Const(None))), Call('return', Const(None)),)))
check('''x = ((1, 2, 3, 4, 5, None), None)''', Suite((Assign((Name('x'),), Call('tuple', Call('tuple', Const(1), Const(2), Const(3), Const(4), Const(5), Const(None)), Const(None))), Call('return', Const(None)),)))
check('''x = (((1, None), 2, 3, 4, 5, None), None)''', Suite((Assign((Name('x'),), Call('tuple', Call('tuple', Call('tuple', Const(1), Const(None)), Const(2), Const(3), Const(4), Const(5), Const(None)), Const(None))), Call('return', Const(None)),)))
check('''x = (((1, 2, None), 3, 4, 5, None), None)''', Suite((Assign((Name('x'),), Call('tuple', Call('tuple', Call('tuple', Const(1), Const(2), Const(None)), Const(3), Const(4), Const(5), Const(None)), Const(None))), Call('return', Const(None)),)))
check('''x = (((1, 2, 3, None), 4, 5, None), None)''', Suite((Assign((Name('x'),), Call('tuple', Call('tuple', Call('tuple', Const(1), Const(2), Const(3), Const(None)), Const(4), Const(5), Const(None)), Const(None))), Call('return', Const(None)),)))
check('''x = (((1, 2, 3, 4, None), 5, None), None)''', Suite((Assign((Name('x'),), Call('tuple', Call('tuple', Call('tuple', Const(1), Const(2), Const(3), Const(4), Const(None)), Const(5), Const(None)), Const(None))), Call('return', Const(None)),)))
check('''x = (((1, 2, 3, 4, 5, None), None), None)''', Suite((Assign((Name('x'),), Call('tuple', Call('tuple', Call('tuple', Const(1), Const(2), Const(3), Const(4), Const(5), Const(None)), Const(None)), Const(None))), Call('return', Const(None)),)))
check('''x = (1, 2, 3, 4, (5, None), None)''', Suite((Assign((Name('x'),), Call('tuple', Const(1), Const(2), Const(3), Const(4), Call('tuple', Const(5), Const(None)), Const(None))), Call('return', Const(None)),)))
check('''x = (1, 2, 3, (4, 5, None), None)''', Suite((Assign((Name('x'),), Call('tuple', Const(1), Const(2), Const(3), Call('tuple', Const(4), Const(5), Const(None)), Const(None))), Call('return', Const(None)),)))
check('''x = (1, 2, (3, 4, 5, None), None)''', Suite((Assign((Name('x'),), Call('tuple', Const(1), Const(2), Call('tuple', Const(3), Const(4), Const(5), Const(None)), Const(None))), Call('return', Const(None)),)))
check('''x = (1, (2, 3, 4, 5, None), None)''', Suite((Assign((Name('x'),), Call('tuple', Const(1), Call('tuple', Const(2), Const(3), Const(4), Const(5), Const(None)), Const(None))), Call('return', Const(None)),)))
check('''x = ((1, 2, 3, 4, (5, None), None), None)''', Suite((Assign((Name('x'),), Call('tuple', Call('tuple', Const(1), Const(2), Const(3), Const(4), Call('tuple', Const(5), Const(None)), Const(None)), Const(None))), Call('return', Const(None)),)))
check('''x = ((1, 2, 3, (4, 5, None), None), None)''', Suite((Assign((Name('x'),), Call('tuple', Call('tuple', Const(1), Const(2), Const(3), Call('tuple', Const(4), Const(5), Const(None)), Const(None)), Const(None))), Call('return', Const(None)),)))
check('''x = ((1, 2, (3, 4, 5, None), None), None)''', Suite((Assign((Name('x'),), Call('tuple', Call('tuple', Const(1), Const(2), Call('tuple', Const(3), Const(4), Const(5), Const(None)), Const(None)), Const(None))), Call('return', Const(None)),)))
check('''x = ((1, (2, 3, 4, 5, None), None), None)''', Suite((Assign((Name('x'),), Call('tuple', Call('tuple', Const(1), Call('tuple', Const(2), Const(3), Const(4), Const(5), Const(None)), Const(None)), Const(None))), Call('return', Const(None)),)))
check('''3
''', Suite((Call('return', Const(None)),))) # hey look: Python does dead code removal!
check('''3
''', Suite((Call('return', Const(None)),)))
check('''3
''', Suite((Call('return', Const(None)),)))
check('''3
''', Suite((Call('return', Const(None)),)))
check('''
3''', Suite((Call('return', Const(None)),)))
check('''
3''', Suite((Call('return', Const(None)),)))
check('''
3''', Suite((Call('return', Const(None)),)))
check('''
3''', Suite((Call('return', Const(None)),)))
check('''a''', Suite((Call('return', Name('a')),)))
check('''a.b''', Suite((Call('return', Call('.', Name('a'), 'b')),)))
check('''a.b.c''', Suite((Call('return', Call('.', Call('.', Name('a'), 'b'), 'c')),)))
check('''a.b.c.d''', Suite((Call('return', Call('.', Call('.', Call('.', Name('a'), 'b'), 'c'), 'd')),)))
check('''a.b.c.d.e''', Suite((Call('return', Call('.', Call('.', Call('.', Call('.', Name('a'), 'b'), 'c'), 'd'), 'e')),)))
check('''a[1]''', Suite((Call('return', Call('[.]', Name('a'), Const(1))),)))
check('''a[1][2]''', Suite((Call('return', Call('[.]', Call('[.]', Name('a'), Const(1)), Const(2))),)))
check('''a[1][2][3]''', Suite((Call('return', Call('[.]', Call('[.]', Call('[.]', Name('a'), Const(1)), Const(2)), Const(3))),)))
check('''a[1][2][3][4]''', Suite((Call('return', Call('[.]', Call('[.]', Call('[.]', Call('[.]', Name('a'), Const(1)), Const(2)), Const(3)), Const(4))),)))
check('''(9, None).stuff''', Suite((Call('return', Call('.', Call('tuple', Const(9), Const(None)),'stuff')),)))
check('''((9, None), None).stuff''', Suite((Call('return', Call('.', Call('tuple', Call('tuple', Const(9), Const(None)), Const(None)),'stuff')),)))
check('''(((9, None), None), None).stuff''', Suite((Call('return', Call('.', Call('tuple', Call('tuple', Call('tuple', Const(9), Const(None)), Const(None)), Const(None)),'stuff')),)))
check('''a[1]''', Suite((Call('return', Call('[.]', Name('a'), Const(1))),)))
check('''a["hey"]''', Suite((Call('return', Call('[.]', Name('a'), Const('hey'))),)))
check('''a[1:2]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(2), Const(None)))),)))
check('''a[:]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(None), Const(None)))),)))
check('''a[1:]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(None), Const(None)))),)))
check('''a[:1]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(1), Const(None)))),)))
check('''a[::]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(None), Const(None)))),)))
check('''a[1::]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(None), Const(None)))),)))
check('''a[:1:]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(1), Const(None)))),)))
check('''a[::1]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(None), Const(1)))),)))
check('''a[1:2:]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(2), Const(None)))),)))
check('''a[:1:2]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(1), Const(2)))),)))
check('''a[1::2]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(None), Const(2)))),)))
check('''a[1:2:3]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(2), Const(3)))),)))
check('''a[1,]''', Suite((Call('return', Call('[.]', Name('a'), Const(1))),)))
check('''a["hey",]''', Suite((Call('return', Call('[.]', Name('a'), Const('hey'))),)))
check('''a[1:2,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(2), Const(None)))),)))
check('''a[:,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(None), Const(None)))),)))
check('''a[1:,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(None), Const(None)))),)))
check('''a[:1,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(1), Const(None)))),)))
check('''a[::,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(None), Const(None)))),)))
check('''a[1::,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(None), Const(None)))),)))
check('''a[:1:,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(1), Const(None)))),)))
check('''a[::1,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(None), Const(1)))),)))
check('''a[1:2:,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(2), Const(None)))),)))
check('''a[:1:2,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(1), Const(2)))),)))
check('''a[1::2,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(None), Const(2)))),)))
check('''a[1:2:3,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(2), Const(3)))),)))
check('''a[1,5]''', Suite((Call('return', Call('[.]', Name('a'), Const(1), Const(5))),)))
check('''a["hey",5]''', Suite((Call('return', Call('[.]', Name('a'), Const('hey'), Const(5))),)))
check('''a[1:2,5]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(2), Const(None)), Const(5))),)))
check('''a[:,5]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(None), Const(None)), Const(5))),)))
check('''a[1:,5]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(None), Const(None)), Const(5))),)))
check('''a[:1,5]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(1), Const(None)), Const(5))),)))
check('''a[::,5]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(None), Const(None)), Const(5))),)))
check('''a[1::,5]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(None), Const(None)), Const(5))),)))
check('''a[:1:,5]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(1), Const(None)), Const(5))),)))
check('''a[::1,5]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(None), Const(1)), Const(5))),)))
check('''a[1:2:,5]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(2), Const(None)), Const(5))),)))
check('''a[:1:2,5]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(1), Const(2)), Const(5))),)))
check('''a[1::2,5]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(None), Const(2)), Const(5))),)))
check('''a[1:2:3,5]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(2), Const(3)), Const(5))),)))
check('''a[1,5,]''', Suite((Call('return', Call('[.]', Name('a'), Const(1), Const(5))),)))
check('''a["hey",5,]''', Suite((Call('return', Call('[.]', Name('a'), Const('hey'), Const(5))),)))
check('''a[1:2,5,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(2), Const(None)), Const(5))),)))
check('''a[:,5,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(None), Const(None)), Const(5))),)))
check('''a[1:,5,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(None), Const(None)), Const(5))),)))
check('''a[:1,5,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(1), Const(None)), Const(5))),)))
check('''a[::,5,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(None), Const(None)), Const(5))),)))
check('''a[1::,5,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(None), Const(None)), Const(5))),)))
check('''a[:1:,5,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(1), Const(None)), Const(5))),)))
check('''a[::1,5,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(None), Const(1)), Const(5))),)))
check('''a[1:2:,5,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(2), Const(None)), Const(5))),)))
check('''a[:1:2,5,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(1), Const(2)), Const(5))),)))
check('''a[1::2,5,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(None), Const(2)), Const(5))),)))
check('''a[1:2:3,5,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(2), Const(3)), Const(5))),)))
check('''a[1,"a":"b"]''', Suite((Call('return', Call('[.]', Name('a'), Const(1), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a["hey","a":"b"]''', Suite((Call('return', Call('[.]', Name('a'), Const('hey'), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[1:2,"a":"b"]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(1), Const(2), Const(None)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[:,"a":"b"]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(None), Const(None), Const(None)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[1:,"a":"b"]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(1), Const(None), Const(None)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[:1,"a":"b"]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(None), Const(1), Const(None)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[::,"a":"b"]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(None), Const(None), Const(None)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[1::,"a":"b"]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(1), Const(None), Const(None)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[:1:,"a":"b"]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(None), Const(1), Const(None)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[::1,"a":"b"]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(None), Const(None), Const(1)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[1:2:,"a":"b"]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(1), Const(2), Const(None)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[:1:2,"a":"b"]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(None), Const(1), Const(2)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[1::2,"a":"b"]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(1), Const(None), Const(2)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[1:2:3,"a":"b"]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(1), Const(2), Const(3)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[1,"a":"b",]''', Suite((Call('return', Call('[.]', Name('a'), Const(1), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a["hey","a":"b",]''', Suite((Call('return', Call('[.]', Name('a'), Const('hey'), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[1:2,"a":"b",]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(1), Const(2), Const(None)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[:,"a":"b",]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(None), Const(None), Const(None)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[1:,"a":"b",]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(1), Const(None), Const(None)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[:1,"a":"b",]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(None), Const(1), Const(None)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[::,"a":"b",]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(None), Const(None), Const(None)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[1::,"a":"b",]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(1), Const(None), Const(None)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[:1:,"a":"b",]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(None), Const(1), Const(None)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[::1,"a":"b",]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(None), Const(None), Const(1)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[1:2:,"a":"b",]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(1), Const(2), Const(None)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[:1:2,"a":"b",]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(None), Const(1), Const(2)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[1::2,"a":"b",]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(1), Const(None), Const(2)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[1:2:3,"a":"b",]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(1), Const(2), Const(3)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[1,5,6]''', Suite((Call('return', Call('[.]', Name('a'), Const(1), Const(5), Const(6))),)))
check('''a["hey",5,6]''', Suite((Call('return', Call('[.]', Name('a'), Const('hey'), Const(5), Const(6))),)))
check('''a[1:2,5,6]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(2), Const(None)), Const(5), Const(6))),)))
check('''a[:,5,6]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(None), Const(None)), Const(5), Const(6))),)))
check('''a[1:,5,6]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(None), Const(None)), Const(5), Const(6))),)))
check('''a[:1,5,6]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(1), Const(None)), Const(5), Const(6))),)))
check('''a[::,5,6]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(None), Const(None)), Const(5), Const(6))),)))
check('''a[1::,5,6]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(None), Const(None)), Const(5), Const(6))),)))
check('''a[:1:,5,6]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(1), Const(None)), Const(5), Const(6))),)))
check('''a[::1,5,6]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(None), Const(1)), Const(5), Const(6))),)))
check('''a[1:2:,5,6]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(2), Const(None)), Const(5), Const(6))),)))
check('''a[:1:2,5,6]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(1), Const(2)), Const(5), Const(6))),)))
check('''a[1::2,5,6]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(None), Const(2)), Const(5), Const(6))),)))
check('''a[1:2:3,5,6]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(2), Const(3)), Const(5), Const(6))),)))
check('''a[1,5,6,]''', Suite((Call('return', Call('[.]', Name('a'), Const(1), Const(5), Const(6))),)))
check('''a["hey",5,6,]''', Suite((Call('return', Call('[.]', Name('a'), Const('hey'), Const(5), Const(6))),)))
check('''a[1:2,5,6,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(2), Const(None)), Const(5), Const(6))),)))
check('''a[:,5,6,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(None), Const(None)), Const(5), Const(6))),)))
check('''a[1:,5,6,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(None), Const(None)), Const(5), Const(6))),)))
check('''a[:1,5,6,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(1), Const(None)), Const(5), Const(6))),)))
check('''a[::,5,6,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(None), Const(None)), Const(5), Const(6))),)))
check('''a[1::,5,6,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(None), Const(None)), Const(5), Const(6))),)))
check('''a[:1:,5,6,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(1), Const(None)), Const(5), Const(6))),)))
check('''a[::1,5,6,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(None), Const(1)), Const(5), Const(6))),)))
check('''a[1:2:,5,6,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(2), Const(None)), Const(5), Const(6))),)))
check('''a[:1:2,5,6,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(1), Const(2)), Const(5), Const(6))),)))
check('''a[1::2,5,6,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(None), Const(2)), Const(5), Const(6))),)))
check('''a[1:2:3,5,6,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(2), Const(3)), Const(5), Const(6))),)))
check('''a[1:[2]:3,[],5,6,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Call('list', Const(2)), Const(3)), Call('list'), Const(5), Const(6))),)))
check('''a[1:[[2]]:3,[[]],5,6,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Call('list', Call('list', Const(2))), Const(3)), Call('list', Call('list')), Const(5), Const(6))),)))
check('''a[2].three''', Suite((Call('return', Call('.', Call('[.]', Name('a'), Const(2)), 'three')),)))
check('''a.three''', Suite((Call('return', Call('.', Name('a'), 'three')),)))
check('''a[2]''', Suite((Call('return', Call('[.]', Name('a'), Const(2))),)))
check('''a.three[2]''', Suite((Call('return', Call('[.]', Call('.', Name('a'), 'three'), Const(2))),)))
check('''x and y''', Suite((Call('return', Call('and', Name('x'), Name('y'))),)))
check('''x and y and z''', Suite((Call('return', Call('and', Name('x'), Call('and', Name('y'), Name('z')))),)))
check('''x and y and z and w''', Suite((Call('return', Call('and', Name('x'), Call('and', Name('y'), Call('and', Name('z'), Name('w'))))),)))
check('''not x''', Suite((Call('return', Call('not', Name('x'))),)))
check('''not x and y''', Suite((Call('return', Call('and', Call('not', Name('x')), Name('y'))),)))
check('''x or y''', Suite((Call('return', Call('or', Name('x'), Name('y'))),)))
check('''x or y and z''', Suite((Call('return', Call('or', Name('x'), Call('and', Name('y'), Name('z')))),)))
check('''x or y or z''', Suite((Call('return', Call('or', Name('x'), Call('or', Name('y'), Name('z')))),)))
check('''not x or y and z''', Suite((Call('return', Call('or', Call('not', Name('x')), Call('and', Name('y'), Name('z')))),)))
check('''x or not y and z''', Suite((Call('return', Call('or', Name('x'), Call('and', Call('not', Name('y')), Name('z')))),)))
check('''x or y and not z''', Suite((Call('return', Call('or', Name('x'), Call('and', Name('y'), Call('not', Name('z'))))),)))
check('''not x or not y and z''', Suite((Call('return', Call('or', Call('not', Name('x')), Call('and', Call('not', Name('y')), Name('z')))),)))
check('''not x or y and not z''', Suite((Call('return', Call('or', Call('not', Name('x')), Call('and', Name('y'), Call('not', Name('z'))))),)))
check('''x or not y and not z''', Suite((Call('return', Call('or', Name('x'), Call('and', Call('not', Name('y')), Call('not', Name('z'))))),)))
check('''not x or not y and not z''', Suite((Call('return', Call('or', Call('not', Name('x')), Call('and', Call('not', Name('y
|
a212442d91b2807e6353f21b0a68c4ee74ec8db9
|
Python
|
<|begin_of_text|>#
# PySNMP MIB module TPT-POLICY-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/TPT-POLICY-MIB
# Produced by pysmi-0.3.4 at Wed May 1 15:26:23 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, OctetString, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "Integer", "OctetString", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsUnion, SingleValueConstraint, ValueRangeConstraint, ConstraintsIntersection, ValueSizeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsUnion", "SingleValueConstraint", "ValueRangeConstraint", "ConstraintsIntersection", "ValueSizeConstraint")
Ipv6Address, = mibBuilder.importSymbols("IPV6-TC", "Ipv6Address")
SnmpAdminString, = mibBuilder.importSymbols("SNMP-FRAMEWORK-MIB", "SnmpAdminString")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
Bits, Counter64, Integer32, iso, Counter32, NotificationType, TimeTicks, Unsigned32, ModuleIdentity, Gauge32, IpAddress, MibIdentifier, MibScalar, MibTable, MibTableRow, MibTableColumn, ObjectIdentity = mibBuilder.importSymbols("SNMPv2-SMI", "Bits", "Counter64", "Integer32", "iso", "Counter32", "NotificationType", "TimeTicks", "Unsigned32", "ModuleIdentity", "Gauge32", "IpAddress", "MibIdentifier", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "ObjectIdentity")
DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention")
tpt_tpa_objs, tpt_tpa_unkparams, tpt_tpa_eventsV2 = mibBuilder.importSymbols("TPT-TPAMIBS-MIB", "tpt-tpa-objs", "tpt-tpa-unkparams", "tpt-tpa-eventsV2")
tpt_policy = ModuleIdentity((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1)).setLabel("tpt-policy")
tpt_policy.setRevisions(('2016-05-25 18:54', '2015-06-19 18:30', '2015-05-28 13:30', '2014-12-15 11:42',))
if getattr(mibBuilder,'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: tpt_policy.setRevisionsDescriptions(('Updated copyright information. Minor MIB syntax fixes.', 'Added SSL inspection notification.', 'Added SSL inspected flag parameter to policy notifications.', 'Updated table sequence entries to be SMI compliant.',))
if mibBuilder.loadTexts: tpt_policy.setLastUpdated('201605251854Z')
if mibBuilder.loadTexts: tpt_policy.setOrganization('Trend Micro, Inc.')
if mibBuilder.loadTexts: tpt_policy.setContactInfo('www.trendmicro.com')
if mibBuilder.loadTexts: tpt_policy.setDescription("TPA policy counters. Copyright (C) 2016 Trend Micro Incorporated. All Rights Reserved. Trend Micro makes no warranty of any kind with regard to this material, including, but not limited to, the implied warranties of merchantability and fitness for a particular purpose. Trend Micro shall not be liable for errors contained herein or for incidental or consequential damages in connection with the furnishing, performance, or use of this material. This document contains proprietary information, which is protected by copyright. No part of this document may be photocopied, reproduced, or translated into another language without the prior written consent of Trend Micro. The information is provided 'as is' without warranty of any kind and is subject to change without notice. The only warranties for Trend Micro products and services are set forth in the express warranty statements accompanying such products and services. Nothing herein should be construed as constituting an additional warranty. Trend Micro shall not be liable for technical or editorial errors or omissions contained herein. TippingPoint(R), the TippingPoint logo, and Digital Vaccine(R) are registered trademarks of Trend Micro. All other company and product names may be trademarks of their respective holders. All rights reserved. This document contains confidential information, trade secrets or both, which are the property of Trend Micro. No part of this documentation may be reproduced in any form or by any means or used to make any derivative work (such as translation, transformation, or adaptation) without written permission from Trend Micro or one of its subsidiaries. All other company and product names may be trademarks of their respective holders. ")
policyPacketsDropped = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: policyPacketsDropped.setStatus('current')
if mibBuilder.loadTexts: policyPacketsDropped.setDescription('The total number of packets discarded due to network congestion.')
policyPacketsBlocked = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: policyPacketsBlocked.setStatus('current')
if mibBuilder.loadTexts: policyPacketsBlocked.setDescription('The cumulative number of packets blocked because of policy actions.')
policyPacketsIncoming = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: policyPacketsIncoming.setStatus('current')
if mibBuilder.loadTexts: policyPacketsIncoming.setDescription('The total number of incoming packets.')
policyPacketsOutgoing = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: policyPacketsOutgoing.setStatus('current')
if mibBuilder.loadTexts: policyPacketsOutgoing.setDescription('The total number of outgoing packets.')
policyPacketsInvalid = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: policyPacketsInvalid.setStatus('current')
if mibBuilder.loadTexts: policyPacketsInvalid.setDescription('The total number of packets discarded because they were invalid.')
policyPacketsPermitted = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: policyPacketsPermitted.setStatus('current')
if mibBuilder.loadTexts: policyPacketsPermitted.setDescription('The cumulative number of packets permitted because of policy actions.')
policyPacketsDropped64 = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 31), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: policyPacketsDropped64.setStatus('current')
if mibBuilder.loadTexts: policyPacketsDropped64.setDescription('The total number of packets discarded due to network congestion.')
policyPacketsBlocked64 = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 32), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: policyPacketsBlocked64.setStatus('current')
if mibBuilder.loadTexts: policyPacketsBlocked64.setDescription('The cumulative number of packets blocked because of policy actions.')
policyPacketsIncoming64 = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 33), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: policyPacketsIncoming64.setStatus('current')
if mibBuilder.loadTexts: policyPacketsIncoming64.setDescription('The total number of incoming packets.')
policyPacketsOutgoing64 = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 34), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: policyPacketsOutgoing64.setStatus('current')
if mibBuilder.loadTexts: policyPacketsOutgoing64.setDescription('The total number of outgoing packets.')
policyPacketsInvalid64 = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 36), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: policyPacketsInvalid64.setStatus('current')
if mibBuilder.loadTexts: policyPacketsInvalid64.setDescription('The total number of packets discarded because they were invalid.')
policyPacketsPermitted64 = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 37), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: policyPacketsPermitted64.setStatus('current')
if mibBuilder.loadTexts: policyPacketsPermitted64.setDescription('The total number of packets permitted because of policy actions.')
policyPacketsRateLimited64 = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 38), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: policyPacketsRateLimited64.setStatus('current')
if mibBuilder.loadTexts: policyPacketsRateLimited64.setDescription('The total number of packets discarded by rate limiting filters.')
policyPacketsTrusted64 = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 39), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: policyPacketsTrusted64.setStatus('current')
if mibBuilder.loadTexts: policyPacketsTrusted64.setDescription('The cumulative number of packets trusted because of policy actions.')
policyDVObjs = ObjectIdentity((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 10))
if mibBuilder.loadTexts: policyDVObjs.setStatus('current')
if mibBuilder.loadTexts: policyDVObjs.setDescription('Sub-tree of Digital Vaccine information.')
policyDVVersion = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 10, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 80))).setMaxAccess("readonly")
if mibBuilder.loadTexts: policyDVVersion.setStatus('current')
if mibBuilder.loadTexts: policyDVVersion.setDescription('The version number of the Digital Vaccine on this machine.')
policyCounterTable = MibTable((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 5), )
if mibBuilder.loadTexts: policyCounterTable.setStatus('obsolete')
if mibBuilder.loadTexts: policyCounterTable.setDescription('Table of per-policy counter values.')
policyCounterEntry = MibTableRow((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 5, 1), ).setIndexNames((0, "TPT-POLICY-MIB", "policyGlobalID"))
if mibBuilder.loadTexts: policyCounterEntry.setStatus('obsolete')
if mibBuilder.loadTexts: policyCounterEntry.setDescription('An entry in the policy counter table. Rows cannot be created or deleted. ')
policyGlobalID = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 5, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 40)))
if mibBuilder.loadTexts: policyGlobalID.setStatus('obsolete')
if mibBuilder.loadTexts: policyGlobalID.setDescription('The global identifier of a policy.')
policyDescriptiveName = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 5, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 80)))
if mibBuilder.loadTexts: policyDescriptiveName.setStatus('obsolete')
if mibBuilder.loadTexts: policyDescriptiveName.setDescription('The human-readable name of a policy.')
policyCountBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 5, 1, 3), Counter64())
if mibBuilder.loadTexts: policyCountBytes.setStatus('obsolete')
if mibBuilder.loadTexts: policyCountBytes.setDescription('The total number of bytes affected by the given policy.')
policyCountPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 5, 1, 4), Counter64())
if mibBuilder.loadTexts: policyCountPackets.setStatus('obsolete')
if mibBuilder.loadTexts: policyCountPackets.setDescription('The total number of packets affected by the given policy.')
policyCreationTime = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 5, 1, 5), Unsigned32())
if mibBuilder.loadTexts: policyCreationTime.setStatus('obsolete')
if mibBuilder.loadTexts: policyCreationTime.setDescription('The time the policy was pushed to NetPAL, in seconds since the epoch.')
class PolicyProtocol(TextualConvention, Integer32):
description = 'A selection from a set of networking protocols detected by a policy.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8))
namedValues = NamedValues(("icmp", 1), ("udp", 2), ("tcp", 3), ("other-ip", 4), ("arp", 5), ("other-eth", 6), ("icmpv6", 7), ("other-ipv6", 8))
class PolicyFrameSize(TextualConvention, Integer32):
description = 'A selection from a set of layer-2 frame size categories.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11))
namedValues = NamedValues(("fs64B", 1), ("fs65to127B", 2), ("fs128to255B", 3), ("fs256to511B", 4), ("fs512to1023B", 5), ("fs1024toMaxB", 6), ("fsMaxto4095B", 7), ("fs4096to9216B", 8), ("fsUnder", 9), ("fsOver", 10), ("fs9217to16383", 11))
class PolicyFrameType(TextualConvention, Integer32):
description = 'A selection from a set of layer-2 frame types based on addressing and error status.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7))
namedValues = NamedValues(("unicast", 1), ("broadcast", 2), ("multicast", 3), ("macControl", 4), ("fcsError", 5), ("alignError", 6), ("symbolError", 7))
class PolicySeverity(TextualConvention, Integer32):
description = 'A selection from a set of severity levels used by policies. Used for both statistical reports and notifications.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))
namedValues = NamedValues(("warning", 1), ("minor", 2), ("major", 3), ("critical", 4))
topTenHitsByPolicyTable = MibTable((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 11), )
if mibBuilder.loadTexts: topTenHitsByPolicyTable.setStatus('current')
if mibBuilder.loadTexts: topTenHitsByPolicyTable.setDescription('Table of policies with the ten greatest hit counts.')
topTenHitsByPolicyEntry = MibTableRow((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 11, 1), ).setIndexNames((0, "TPT-POLICY-MIB", "topTenRank"))
if mibBuilder.loadTexts: topTenHitsByPolicyEntry.setStatus('current')
if mibBuilder.loadTexts: topTenHitsByPolicyEntry.setDescription('An entry in the top ten policies table. Rows cannot be created or deleted. ')
topTenRank = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 11, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 10))).setMaxAccess("readonly")
if mibBuilder.loadTexts: topTenRank.setStatus('current')
if mibBuilder.loadTexts: topTenRank.setDescription('The numerical ranking 1 through 10 of a policy.')
policyHitCount = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 11, 1, 2), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: policyHitCount.setStatus('current')
if mibBuilder.loadTexts: policyHitCount.setDescription('The count of alerts generated by a policy.')
policyName = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 11, 1, 3), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 80))).setMaxAccess("readonly")
if mibBuilder.loadTexts: policyName.setStatus('current')
if mibBuilder.loadTexts: policyName.setDescription('The human-readable name of a policy.')
policyUUID = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 11, 1, 4), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 40))).setMaxAccess("readonly")
if mibBuilder.loadTexts: policyUUID.setStatus('current')
if mibBuilder.loadTexts: policyUUID.setDescription('The global identifier of a policy.')
alertsBySeverityTable = MibTable((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 12), )
if mibBuilder.loadTexts: alertsBySeverityTable.setStatus('current')
if mibBuilder.loadTexts: alertsBySeverityTable.setDescription('Table of alert counts of all policies at each severity level.')
alertsBySeverityEntry = MibTableRow((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 12, 1), ).setIndexNames((0, "TPT-POLICY-MIB", "alertSeverity"))
if mibBuilder.loadTexts: alertsBySeverityEntry.setStatus('current')
if mibBuilder.loadTexts: alertsBySeverityEntry.setDescription('An entry in the alerts by severity table. Rows cannot be created or deleted. ')
alertSeverity = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 12, 1, 1), PolicySeverity()).setMaxAccess("readonly")
if mibBuilder.loadTexts: alertSeverity.setStatus('current')
if mibBuilder.loadTexts: alertSeverity.setDescription('The severity of a policy.')
severityAlertCount = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 12, 1, 2), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: severityAlertCount.setStatus('current')
if mibBuilder.loadTexts: severityAlertCount.setDescription('The count of alerts generated by all policies of a given severity.')
alertsByProtocolTable = MibTable((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 13), )
if mibBuilder.loadTexts: alertsByProtocolTable.setStatus('current')
if mibBuilder.loadTexts: alertsByProtocolTable.setDescription('Table of alert counts of all policies at each protocol.')
alertsByProtocolEntry = MibTableRow((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 13, 1), ).setIndexNames((0, "TPT-POLICY-MIB", "alertProtocol"))
if mibBuilder.loadTexts: alertsByProtocolEntry.setStatus('current')
if mibBuilder.loadTexts: alertsByProtocolEntry.setDescription('An entry in the alerts by protocol table. Rows cannot be created or deleted. ')
alertProtocol = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 13, 1, 1), PolicyProtocol()).setMaxAccess("readonly")
if mibBuilder.loadTexts: alertProtocol.setStatus('current')
if mibBuilder.loadTexts: alertProtocol.setDescription('The protocol of a policy.')
protocolAlertCount = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 13, 1, 2), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: protocolAlertCount.setStatus('current')
if mibBuilder.loadTexts: protocolAlertCount.setDescription('The count of alerts generated by all policies of a given protocol.')
alertsByZoneTable = MibTable((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 14), )
if mibBuilder.loadTexts: alertsByZoneTable.setStatus('obsolete')
if mibBuilder.loadTexts: alertsByZoneTable.setDescription('Table of alert counts of all policies for each zone.')
alertsByZoneEntry = MibTableRow((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 14, 1), ).setIndexNames((0, "TPT-POLICY-MIB", "alertSlot"), (0, "TPT-POLICY-MIB", "alertPort"))
if mibBuilder.loadTexts: alertsByZoneEntry.setStatus('obsolete')
if mibBuilder.loadTexts: alertsByZoneEntry.setDescription('An entry in the alerts by zone table. Rows cannot be created or deleted. ')
alertSlot = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 14, 1, 1), Unsigned32())
if mibBuilder.loadTexts: alertSlot.setStatus('obsolete')
if mibBuilder.loadTexts: alertSlot.setDescription('The slot portion identifying the zone affected by a policy.')
alertPort = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 14, 1, 2), Unsigned32())
if mibBuilder.loadTexts: alertPort.setStatus('obsolete')
if mibBuilder.loadTexts: alertPort.setDescription('The port portion identifying the zone affected by a policy.')
zoneAlertCount = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 14, 1, 3), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zoneAlertCount.setStatus('obsolete')
if mibBuilder.loadTexts: zoneAlertCount.setDescription('The count of alerts generated by all policies of a given zone.')
permitsByZoneTable = MibTable((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 15), )
if mibBuilder.loadTexts: permitsByZoneTable.setStatus('obsolete')
if mibBuilder.loadTexts: permitsByZoneTable.setDescription('Table of permit counts of all policies for each zone.')
permitsByZoneEntry = MibTableRow((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 15, 1), ).setIndexNames((0, "TPT-POLICY-MIB", "permitSlot"), (0, "TPT-POLICY-MIB", "permitPort"))
if mibBuilder.loadTexts: permitsByZoneEntry.setStatus('obsolete')
if mibBuilder.loadTexts: permitsByZoneEntry.setDescription('An entry in the permits by zone table. Rows cannot be created or deleted. ')
permitSlot = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 15, 1, 1), Unsigned32())
if mibBuilder.loadTexts: permitSlot.setStatus('obsolete')
if mibBuilder.loadTexts: permitSlot.setDescription('The slot portion identifying the zone affected by a policy.')
permitPort = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 15, 1, 2), Unsigned32())
if mibBuilder.loadTexts: permitPort.setStatus('obsolete')
if mibBuilder.loadTexts: permitPort.setDescription('The port portion identifying the zone affected by a policy.')
zonePermitCount = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 15, 1, 3), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zonePermitCount.setStatus('obsolete')
if mibBuilder.loadTexts: zonePermitCount.setDescription('The count of permits generated by all policies of a given zone.')
blocksByZoneTable = MibTable((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 16), )
if mibBuilder.loadTexts: blocksByZoneTable.setStatus('obsolete')
if mibBuilder.loadTexts: blocksByZoneTable.setDescription('Table of block counts of all policies for each zone.')
blocksByZoneEntry = MibTableRow((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 16, 1), ).setIndexNames((0, "TPT-POLICY-MIB", "blockSlot"), (0, "TPT-POLICY-MIB", "blockPort"))
if mibBuilder.loadTexts: blocksByZoneEntry.setStatus('obsolete')
if mibBuilder.loadTexts: blocksByZoneEntry.setDescription('An entry in the blocks by zone table. Rows cannot be created or deleted. ')
blockSlot = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 16, 1, 1), Unsigned32())
if mibBuilder.loadTexts: blockSlot.setStatus('obsolete')
if mibBuilder.loadTexts: blockSlot.setDescription('The slot portion identifying the zone affected by a policy.')
blockPort = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 16, 1, 2), Unsigned32())
if mibBuilder.loadTexts: blockPort.setStatus('obsolete')
if mibBuilder.loadTexts: blockPort.setDescription('The port portion identifying the zone affected by a policy.')
zoneBlockCount = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 16, 1, 3), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zoneBlockCount.setStatus('obsolete')
if mibBuilder.loadTexts: zoneBlockCount.setDescription('The count of blocks generated by all policies of a given zone.')
p2psByZoneTable = MibTable((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 17), )
if mibBuilder.loadTexts: p2psByZoneTable.setStatus('obsolete')
if mibBuilder.loadTexts: p2psByZoneTable.setDescription('Table of p2p counts of all policies for each zone.')
p2psByZoneEntry = MibTableRow((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 17, 1), ).setIndexNames((0, "TPT-POLICY-MIB", "p2pSlot"), (0, "TPT-POLICY-MIB", "p2pPort"))
if mibBuilder.loadTexts: p2psByZoneEntry.setStatus('obsolete')
if mibBuilder.loadTexts: p2psByZoneEntry.setDescription('An entry in the p2ps by zone table. Rows cannot be created or deleted. ')
p2pSlot = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 17, 1, 1), Unsigned32())
if mibBuilder.loadTexts: p2pSlot.setStatus('obsolete')
if mibBuilder.loadTexts: p2pSlot.setDescription('The slot portion identifying the zone affected by a policy.')
p2pPort = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 17, 1, 2), Unsigned32())
if mibBuilder.loadTexts: p2pPort.setStatus('obsolete')
if mibBuilder.loadTexts: p2pPort.setDescription('The port portion identifying the zone affected by a policy.')
zoneP2pCount = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 17, 1, 3), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zoneP2pCount.setStatus('obsolete')
if mibBuilder.loadTexts: zoneP2pCount.setDescription('The count of p2ps generated by all policies of a given zone.')
framesBySizeTable = MibTable((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 18), )
if mibBuilder.loadTexts: framesBySizeTable.setStatus('current')
if mibBuilder.loadTexts: framesBySizeTable.setDescription('Table of frame counts received in each size category.')
framesBySizeEntry = MibTableRow((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 18, 1), ).setIndexNames((0, "TPT-POLICY-MIB", "frameSize"))
if mibBuilder.loadTexts: framesBySizeEntry.setStatus('current')
if mibBuilder.loadTexts: framesBySizeEntry.setDescription('An entry in the frames by size table. Rows cannot be created or deleted. ')
frameSize = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 18, 1, 1), PolicyFrameSize()).setMaxAccess("readonly")
if mibBuilder.loadTexts: frameSize.setStatus('current')
if mibBuilder.loadTexts: frameSize.setDescription('The size category of a frame.')
sizeFrameCount = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 18, 1, 2), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sizeFrameCount.setStatus('current')
if mibBuilder.loadTexts: sizeFrameCount.setDescription('The count of frames received in a given size category.')
framesByTypeTable = MibTable((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 19), )
if mibBuilder.loadTexts: framesByTypeTable.setStatus('current')
if mibBuilder.loadTexts: framesByTypeTable.setDescription('Table of frame counts received in each type classification.')
framesByTypeEntry = MibTableRow((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 19, 1), ).setIndexNames((0, "TPT-POLICY-MIB", "frameType"))
if mibBuilder.loadTexts: framesByTypeEntry.setStatus('current')
if mibBuilder.loadTexts: framesByTypeEntry.setDescription('An entry in the frames by type table. Rows cannot be created or deleted. ')
frameType = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 19, 1, 1), PolicyFrameType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: frameType.setStatus('current')
if mibBuilder.loadTexts: frameType.setDescription('The type classification (e.g., unicast, broadcast, FCS error) of a frame.')
typeFrameCount = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 19, 1, 2), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: typeFrameCount.setStatus('current')
if mibBuilder.loadTexts: typeFrameCount.setDescription('The count of frames received in a given type classification.')
packetsByProtocolTable = MibTable((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 20), )
if mibBuilder.loadTexts: packetsByProtocolTable.setStatus('current')
if mibBuilder.loadTexts: packetsByProtocolTable.setDescription('Table of packet counts received for each protocol.')
packetsByProtocolEntry = MibTableRow((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 20, 1), ).setIndexNames((0, "TPT-POLICY-MIB", "packetProtocol"))
if mibBuilder.loadTexts: packetsByProtocolEntry.setStatus('current')
if mibBuilder.loadTexts: packetsByProtocolEntry.setDescription('An entry in the packets by protocol table. Rows cannot be created or deleted. ')
packetProtocol = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 20, 1, 1), PolicyProtocol()).setMaxAccess("readonly")
if mibBuilder.loadTexts: packetProtocol.setStatus('current')
if mibBuilder.loadTexts: packetProtocol.setDescription('The protocol of a policy.')
protocolPacketCount = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 20, 1, 2), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: protocolPacketCount.setStatus('current')
if mibBuilder.loadTexts: protocolPacketCount.setDescription('The count of packets received for a given protocol.')
portStatsTable = MibTable((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 23), )
if mibBuilder.loadTexts: portStatsTable.setStatus('current')
if mibBuilder.loadTexts: portStatsTable.setDescription('Table of statistics for each physical port.')
portStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 23, 1), ).setIndexNames((0, "TPT-POLICY-MIB", "portNumber"))
if mibBuilder.loadTexts: portStatsEntry.setStatus('current')
if mibBuilder.loadTexts: portStatsEntry.setDescription('An entry in the port statistics table. Rows cannot be created or deleted. ')
portNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 23, 1, 1), Unsigned32())
if mibBuilder.loadTexts: portNumber.setStatus('current')
if mibBuilder.loadTexts: portNumber.setDescription('The numeric index of a port.')
portName = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 23, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 8))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portName.setStatus('current')
if mibBuilder.loadTexts: portName.setDescription('The name of a port.')
portVlanTranslations = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 23, 1, 3), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: portVlanTranslations.setStatus('current')
if mibBuilder.loadTexts: portVlanTranslations.setDescription('Number of packets leaving this egress port whose VLAN IDs were translated.')
policyByNumberTable = MibTable((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 21), )
if mibBuilder.loadTexts: policyByNumberTable.setStatus('current')
if mibBuilder.loadTexts: policyByNumberTable.setDescription('Table of packet counts received for each protocol.')
policyByNumberEntry = MibTableRow((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 21, 1), ).setIndexNames((0, "TPT-POLICY-MIB", "policyNumber"))
if mibBuilder.loadTexts: policyByNumberEntry.setStatus('current')
if mibBuilder.loadTexts: policyByNumberEntry.setDescription('An entry in the policy by number table. Rows cannot be created or deleted. ')
policyNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 21, 1, 1), Unsigned32())
if mibBuilder.loadTexts: policyNumber.setStatus('current')
if mibBuilder.loadTexts: policyNumber.setDescription('The number of a policy.')
numberName = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 21, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 120))).setMaxAccess("readonly")
if mibBuilder.loadTexts: numberName.setStatus('current')
if mibBuilder.loadTexts: numberName.setDescription('The name of a policy.')
numberDesc = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 21, 1, 3), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 3000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: numberDesc.setStatus('current')
if mibBuilder.loadTexts: numberDesc.setDescription('The description of a policy.')
securityZonePairTable = MibTable((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 22), )
if mibBuilder.loadTexts: securityZonePairTable.setStatus('current')
if mibBuilder.loadTexts: securityZonePairTable.setDescription('Table of information and statistics for each security zone pair.')
securityZonePairEntry = MibTableRow((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 22, 1), ).setIndexNames((0, "TPT-POLICY-MIB", "szpUUID"))
if mibBuilder.loadTexts: securityZonePairEntry.setStatus('current')
if mibBuilder.loadTexts: securityZonePairEntry.setDescription('An entry in the security zone pair table. Rows cannot be created or deleted. ')
szpName = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 22, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 128))).setMaxAccess("readonly")
if mibBuilder.loadTexts: szpName.setStatus('current')
if mibBuilder.loadTexts: szpName.setDescription('The name of a security zone pair.')
szpInZoneName = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 22, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 128))).setMaxAccess("readonly")
if mibBuilder.loadTexts: szpInZoneName.setStatus('current')
if mibBuilder.loadTexts: szpInZoneName.setDescription('The name of the input security zone of a security zone pair.')
szpOutZoneName = MibTableColumn((1, 3, 6, 1, 4,
|
07488abfd841f0c5c54a32e5229e929f7569cca6
|
Python
|
<|begin_of_text|>"""
Module for the DomainMatrix class.
A DomainMatrix represents a matrix with elements that are in a particular
Domain. Each DomainMatrix internally wraps a DDM which is used for the
lower-level operations. The idea is that the DomainMatrix class provides the
convenience routines for converting between Expr and the poly domains as well
as unifying matrices with different domains.
"""
from collections import Counter
from functools import reduce
from typing import Union as tUnion, Tuple as tTuple
from sympy.utilities.decorator import doctest_depends_on
from sympy.core.sympify import _sympify
from..domains import Domain
from..constructor import construct_domain
from.exceptions import (
DMFormatError,
DMBadInputError,
DMShapeError,
DMDomainError,
DMNotAField,
DMNonSquareMatrixError,
DMNonInvertibleMatrixError
)
from.domainscalar import DomainScalar
from sympy.polys.domains import ZZ, EXRAW, QQ
from sympy.polys.densearith import dup_mul
from sympy.polys.densebasic import dup_convert
from sympy.polys.densetools import (
dup_mul_ground,
dup_quo_ground,
dup_content,
dup_clear_denoms,
dup_primitive,
dup_transform,
)
from sympy.polys.factortools import dup_factor_list
from sympy.polys.polyutils import _sort_factors
from.ddm import DDM
from.sdm import SDM
from.dfm import DFM
from.rref import _dm_rref, _dm_rref_den
def DM(rows, domain):
"""Convenient alias for DomainMatrix.from_list
Examples
========
>>> from sympy import ZZ
>>> from sympy.polys.matrices import DM
>>> DM([[1, 2], [3, 4]], ZZ)
DomainMatrix([[1, 2], [3, 4]], (2, 2), ZZ)
See Also
========
DomainMatrix.from_list
"""
return DomainMatrix.from_list(rows, domain)
class DomainMatrix:
r"""
Associate Matrix with :py:class:`~.Domain`
Explanation
===========
DomainMatrix uses :py:class:`~.Domain` for its internal representation
which makes it faster than the SymPy Matrix class (currently) for many
common operations, but this advantage makes it not entirely compatible
with Matrix. DomainMatrix are analogous to numpy arrays with "dtype".
In the DomainMatrix, each element has a domain such as :ref:`ZZ`
or :ref:`QQ(a)`.
Examples
========
Creating a DomainMatrix from the existing Matrix class:
>>> from sympy import Matrix
>>> from sympy.polys.matrices import DomainMatrix
>>> Matrix1 = Matrix([
... [1, 2],
... [3, 4]])
>>> A = DomainMatrix.from_Matrix(Matrix1)
>>> A
DomainMatrix({0: {0: 1, 1: 2}, 1: {0: 3, 1: 4}}, (2, 2), ZZ)
Directly forming a DomainMatrix:
>>> from sympy import ZZ
>>> from sympy.polys.matrices import DomainMatrix
>>> A = DomainMatrix([
... [ZZ(1), ZZ(2)],
... [ZZ(3), ZZ(4)]], (2, 2), ZZ)
>>> A
DomainMatrix([[1, 2], [3, 4]], (2, 2), ZZ)
See Also
========
DDM
SDM
Domain
Poly
"""
rep: tUnion[SDM, DDM, DFM]
shape: tTuple[int, int]
domain: Domain
def __new__(cls, rows, shape, domain, *, fmt=None):
"""
Creates a :py:class:`~.DomainMatrix`.
Parameters
==========
rows : Represents elements of DomainMatrix as list of lists
shape : Represents dimension of DomainMatrix
domain : Represents :py:class:`~.Domain` of DomainMatrix
Raises
======
TypeError
If any of rows, shape and domain are not provided
"""
if isinstance(rows, (DDM, SDM, DFM)):
raise TypeError("Use from_rep to initialise from SDM/DDM")
elif isinstance(rows, list):
rep = DDM(rows, shape, domain)
elif isinstance(rows, dict):
rep = SDM(rows, shape, domain)
else:
msg = "Input should be list-of-lists or dict-of-dicts"
raise TypeError(msg)
if fmt is not None:
if fmt =='sparse':
rep = rep.to_sdm()
elif fmt == 'dense':
rep = rep.to_ddm()
else:
raise ValueError("fmt should be'sparse' or 'dense'")
# Use python-flint for dense matrices if possible
if rep.fmt == 'dense' and DFM._supports_domain(domain):
rep = rep.to_dfm()
return cls.from_rep(rep)
def __reduce__(self):
rep = self.rep
if rep.fmt == 'dense':
arg = self.to_list()
elif rep.fmt =='sparse':
arg = dict(rep)
else:
raise RuntimeError # pragma: no cover
args = (arg, rep.shape, rep.domain)
return (self.__class__, args)
def __getitem__(self, key):
i, j = key
m, n = self.shape
if not (isinstance(i, slice) or isinstance(j, slice)):
return DomainScalar(self.rep.getitem(i, j), self.domain)
if not isinstance(i, slice):
if not -m <= i < m:
raise IndexError("Row index out of range")
i = i % m
i = slice(i, i+1)
if not isinstance(j, slice):
if not -n <= j < n:
raise IndexError("Column index out of range")
j = j % n
j = slice(j, j+1)
return self.from_rep(self.rep.extract_slice(i, j))
def getitem_sympy(self, i, j):
return self.domain.to_sympy(self.rep.getitem(i, j))
def extract(self, rowslist, colslist):
return self.from_rep(self.rep.extract(rowslist, colslist))
def __setitem__(self, key, value):
i, j = key
if not self.domain.of_type(value):
raise TypeError
if isinstance(i, int) and isinstance(j, int):
self.rep.setitem(i, j, value)
else:
raise NotImplementedError
@classmethod
def from_rep(cls, rep):
"""Create a new DomainMatrix efficiently from DDM/SDM.
Examples
========
Create a :py:class:`~.DomainMatrix` with an dense internal
representation as :py:class:`~.DDM`:
>>> from sympy.polys.domains import ZZ
>>> from sympy.polys.matrices import DomainMatrix
>>> from sympy.polys.matrices.ddm import DDM
>>> drep = DDM([[ZZ(1), ZZ(2)], [ZZ(3), ZZ(4)]], (2, 2), ZZ)
>>> dM = DomainMatrix.from_rep(drep)
>>> dM
DomainMatrix([[1, 2], [3, 4]], (2, 2), ZZ)
Create a :py:class:`~.DomainMatrix` with a sparse internal
representation as :py:class:`~.SDM`:
>>> from sympy.polys.matrices import DomainMatrix
>>> from sympy.polys.matrices.sdm import SDM
>>> from sympy import ZZ
>>> drep = SDM({0:{1:ZZ(1)},1:{0:ZZ(2)}}, (2, 2), ZZ)
>>> dM = DomainMatrix.from_rep(drep)
>>> dM
DomainMatrix({0: {1: 1}, 1: {0: 2}}, (2, 2), ZZ)
Parameters
==========
rep: SDM or DDM
The internal sparse or dense representation of the matrix.
Returns
=======
DomainMatrix
A :py:class:`~.DomainMatrix` wrapping *rep*.
Notes
=====
This takes ownership of rep as its internal representation. If rep is
being mutated elsewhere then a copy should be provided to
``from_rep``. Only minimal verification or checking is done on *rep*
as this is supposed to be an efficient internal routine.
"""
if not (isinstance(rep, (DDM, SDM)) or (DFM is not None and isinstance(rep, DFM))):
raise TypeError("rep should be of type DDM or SDM")
self = super().__new__(cls)
self.rep = rep
self.shape = rep.shape
self.domain = rep.domain
return self
@classmethod
def from_list(cls, rows, domain):
r"""
Convert a list of lists into a DomainMatrix
Parameters
==========
rows: list of lists
Each element of the inner lists should be either the single arg,
or tuple of args, that would be passed to the domain constructor
in order to form an element of the domain. See examples.
Returns
=======
DomainMatrix containing elements defined in rows
Examples
========
>>> from sympy.polys.matrices import DomainMatrix
>>> from sympy import FF, QQ, ZZ
>>> A = DomainMatrix.from_list([[1, 0, 1], [0, 0, 1]], ZZ)
>>> A
DomainMatrix([[1, 0, 1], [0, 0, 1]], (2, 3), ZZ)
>>> B = DomainMatrix.from_list([[1, 0, 1], [0, 0, 1]], FF(7))
>>> B
DomainMatrix([[1 mod 7, 0 mod 7, 1 mod 7], [0 mod 7, 0 mod 7, 1 mod 7]], (2, 3), GF(7))
>>> C = DomainMatrix.from_list([[(1, 2), (3, 1)], [(1, 4), (5, 1)]], QQ)
>>> C
DomainMatrix([[1/2, 3], [1/4, 5]], (2, 2), QQ)
See Also
========
from_list_sympy
"""
nrows = len(rows)
ncols = 0 if not nrows else len(rows[0])
conv = lambda e: domain(*e) if isinstance(e, tuple) else domain(e)
domain_rows = [[conv(e) for e in row] for row in rows]
return DomainMatrix(domain_rows, (nrows, ncols), domain)
@classmethod
def from_list_sympy(cls, nrows, ncols, rows, **kwargs):
r"""
Convert a list of lists of Expr into a DomainMatrix using construct_domain
Parameters
==========
nrows: number of rows
ncols: number of columns
rows: list of lists
Returns
=======
DomainMatrix containing elements of rows
Examples
========
>>> from sympy.polys.matrices import DomainMatrix
>>> from sympy.abc import x, y, z
>>> A = DomainMatrix.from_list_sympy(1, 3, [[x, y, z]])
>>> A
DomainMatrix([[x, y, z]], (1, 3), ZZ[x,y,z])
See Also
========
sympy.polys.constructor.construct_domain, from_dict_sympy
"""
assert len(rows) == nrows
assert all(len(row) == ncols for row in rows)
items_sympy = [_sympify(item) for row in rows for item in row]
domain, items_domain = cls.get_domain(items_sympy, **kwargs)
domain_rows = [[items_domain[ncols*r + c] for c in range(ncols)] for r in range(nrows)]
return DomainMatrix(domain_rows, (nrows, ncols), domain)
@classmethod
def from_dict_sympy(cls, nrows, ncols, elemsdict, **kwargs):
"""
Parameters
==========
nrows: number of rows
ncols: number of cols
elemsdict: dict of dicts containing non-zero elements of the DomainMatrix
Returns
=======
DomainMatrix containing elements of elemsdict
Examples
========
>>> from sympy.polys.matrices import DomainMatrix
>>> from sympy.abc import x,y,z
>>> elemsdict = {0: {0:x}, 1:{1: y}, 2: {2: z}}
>>> A = DomainMatrix.from_dict_sympy(3, 3, elemsdict)
>>> A
DomainMatrix({0: {0: x}, 1: {1: y}, 2: {2: z}}, (3, 3), ZZ[x,y,z])
See Also
========
from_list_sympy
"""
if not all(0 <= r < nrows for r in elemsdict):
raise DMBadInputError("Row out of range")
if not all(0 <= c < ncols for row in elemsdict.values() for c in row):
raise DMBadInputError("Column out of range")
items_sympy = [_sympify(item) for row in elemsdict.values() for item in row.values()]
domain, items_domain = cls.get_domain(items_sympy, **kwargs)
idx = 0
items_dict = {}
for i, row in elemsdict.items():
items_dict[i] = {}
for j in row:
items_dict[i][j] = items_domain[idx]
idx += 1
return DomainMatrix(items_dict, (nrows, ncols), domain)
@classmethod
def from_Matrix(cls, M, fmt='sparse',**kwargs):
r"""
Convert Matrix to DomainMatrix
Parameters
==========
M: Matrix
Returns
=======
Returns DomainMatrix with identical elements as M
Examples
========
>>> from sympy import Matrix
>>> from sympy.polys.matrices import DomainMatrix
>>> M = Matrix([
... [1.0, 3.4],
... [2.4, 1]])
>>> A = DomainMatrix.from_Matrix(M)
>>> A
DomainMatrix({0: {0: 1.0, 1: 3.4}, 1: {0: 2.4, 1: 1.0}}, (2, 2), RR)
We can keep internal representation as ddm using fmt='dense'
>>> from sympy import Matrix, QQ
>>> from sympy.polys.matrices import DomainMatrix
>>> A = DomainMatrix.from_Matrix(Matrix([[QQ(1, 2), QQ(3, 4)], [QQ(0, 1), QQ(0, 1)]]), fmt='dense')
>>> A.rep
[[1/2, 3/4], [0, 0]]
See Also
========
Matrix
"""
if fmt == 'dense':
return cls.from_list_sympy(*M.shape, M.tolist(), **kwargs)
return cls.from_dict_sympy(*M.shape, M.todod(), **kwargs)
@classmethod
def get_domain(cls, items_sympy, **kwargs):
K, items_K = construct_domain(items_sympy, **kwargs)
return K, items_K
def choose_domain(self, **opts):
"""Convert to a domain found by :func:`~.construct_domain`.
Examples
========
>>> from sympy import ZZ
>>> from sympy.polys.matrices import DM
>>> M = DM([[1, 2], [3, 4]], ZZ)
>>> M
DomainMatrix([[1, 2], [3, 4]], (2, 2), ZZ)
>>> M.choose_domain(field=True)
DomainMatrix([[1, 2], [3, 4]], (2, 2), QQ)
>>> from sympy.abc import x
>>> M = DM([[1, x], [x**2, x**3]], ZZ[x])
>>> M.choose_domain(field=True).domain
ZZ(x)
Keyword arguments are passed to :func:`~.construct_domain`.
See Also
========
construct_domain
convert_to
"""
elements, data = self.to_sympy().to_flat_nz()
dom, elements_dom = construct_domain(elements, **opts)
return self.from_flat_nz(elements_dom, data, dom)
def copy(self):
return self.from_rep(self.rep.copy())
def convert_to(self, K):
r"""
Change the domain of DomainMatrix to desired domain or field
Parameters
==========
K : Represents the desired domain or field.
Alternatively, ``None`` may be passed, in which case this method
just returns a copy of this DomainMatrix.
Returns
=======
DomainMatrix
DomainMatrix with the desired domain or field
Examples
========
>>> from sympy import ZZ, ZZ_I
>>> from sympy.polys.matrices import DomainMatrix
>>> A = DomainMatrix([
... [ZZ(1), ZZ(2)],
... [ZZ(3), ZZ(4)]], (2, 2), ZZ)
>>> A.convert_to(ZZ_I)
DomainMatrix([[1, 2], [3, 4]], (2, 2), ZZ_I)
"""
if K == self.domain:
return self.copy()
rep = self.rep
# The DFM, DDM and SDM types do not do any implicit conversions so we
# manage switching between DDM and DFM here.
if rep.is_DFM and not DFM._supports_domain(K):
rep_K = rep.to_ddm().convert_to(K)
elif rep.is_DDM and DFM._supports_domain(K):
rep_K = rep.convert_to(K).to_dfm()
else:
rep_K = rep.convert_to(K)
return self.from_rep(rep_K)
def to_sympy(self):
return self.convert_to(EXRAW)
def to_field(self):
r"""
Returns a DomainMatrix with the appropriate field
Returns
=======
DomainMatrix
DomainMatrix with the appropriate field
Examples
========
>>> from sympy import ZZ
>>> from sympy.polys.matrices import DomainMatrix
>>> A = DomainMatrix([
... [ZZ(1), ZZ(2)],
... [ZZ(3), ZZ(4)]], (2, 2), ZZ)
>>> A.to_field()
DomainMatrix([[1, 2], [3, 4]], (2, 2), QQ)
"""
K = self.domain.get_field()
return self.convert_to(K)
def to_sparse(self):
"""
Return a sparse DomainMatrix representation of *self*.
Examples
========
>>> from sympy.polys.matrices import DomainMatrix
>>> from sympy import QQ
>>> A = DomainMatrix([[1, 0],[0, 2]], (2, 2), QQ)
>>> A.rep
[[1, 0], [0, 2]]
>>> B = A.to_sparse()
>>> B.rep
{0: {0: 1}, 1: {1: 2}}
"""
if self.rep.fmt =='sparse':
return self
return self.from_rep(self.rep.to_sdm())
def to_dense(self):
"""
Return a dense DomainMatrix representation of *self*.
Examples
========
>>> from sympy.polys.matrices import DomainMatrix
>>> from sympy import QQ
>>> A = DomainMatrix({0: {0: 1}, 1: {1: 2}}, (2, 2), QQ)
>>> A.rep
{0: {0: 1}, 1: {1: 2}}
>>> B = A.to_dense()
>>> B.rep
[[1, 0], [0, 2]]
"""
rep = self.rep
if rep.fmt == 'dense':
return self
return self.from_rep(rep.to_dfm_or_ddm())
def to_ddm(self):
"""
Return a :class:`~.DDM` representation of *self*.
Examples
========
>>> from sympy.polys.matrices import DomainMatrix
>>> from sympy import QQ
>>> A = DomainMatrix({0: {0: 1}, 1: {1: 2}}, (2, 2), QQ)
>>> ddm = A.to_ddm()
>>> ddm
[[1, 0], [0, 2]]
>>> type(ddm)
<class'sympy.polys.matrices.ddm.DDM'>
See Also
========
to_sdm
to_dense
sympy.polys.matrices.ddm.DDM.to_sdm
"""
return self.rep.to_ddm()
def to_sdm(self):
"""
Return a :class:`~.SDM` representation of *self*.
Examples
========
>>> from sympy.polys.matrices import DomainMatrix
>>> from sympy import QQ
>>> A = DomainMatrix([[1, 0],[0, 2]], (2, 2), QQ)
>>> sdm = A.to_sdm()
>>> sdm
{0: {0: 1}, 1: {1: 2}}
>>> type(sdm)
<class'sympy.polys.matrices.sdm.SDM'>
See Also
========
to_ddm
to_sparse
sympy.polys.matrices.sdm.SDM.to_ddm
"""
return self.rep.to_sdm()
@doctest_depends_on(ground_types=['flint'])
def to_dfm(self):
"""
Return a :class:`~.DFM` representation of *self*.
Examples
========
>>> from sympy.polys.matrices import DomainMatrix
>>> from sympy import QQ
>>> A = DomainMatrix([[1, 0],[0, 2]], (2, 2), QQ)
>>> dfm = A.to_dfm()
>>> dfm
[[1, 0], [0, 2]]
>>> type(dfm)
<class'sympy.polys.matrices._dfm.DFM'>
See Also
========
to_ddm
to_dense
DFM
"""
return self.rep.to_dfm()
@doctest_depends_on(ground_types=['flint'])
def to_dfm_or_ddm(self):
"""
Return a :class:`~.DFM` or :class:`~.DDM` representation of *self*.
Explanation
===========
The :class:`~.DFM` representation can only be used if the ground types
are ``flint`` and the ground domain is supported by ``python-flint``.
This method will return a :class:`~.DFM` representation if possible,
but will return a :class:`~.DDM` representation otherwise.
Examples
========
>>> from sympy.polys.matrices import DomainMatrix
>>> from sympy import QQ
>>> A = DomainMatrix([[1, 0],[0, 2]], (2, 2), QQ)
>>> dfm = A.to_dfm_or_ddm()
>>> dfm
[[1, 0], [0, 2]]
>>> type(dfm) # Depends on the ground domain and ground types
<class'sympy.polys.matrices._dfm.DFM'>
See Also
========
to_ddm: Always return a :class:`~.DDM` representation.
to_dfm: Returns a :class:`~.DFM` representation or raise an error.
to_dense: Convert internally to a :class:`~.DFM` or :class:`~.DDM`
DFM: The :class:`~.DFM` dense FLINT matrix representation.
DDM: The Python :class:`~.DDM` dense domain matrix representation.
"""
return self.rep.to_dfm_or_ddm()
@classmethod
def _unify_domain(cls, *matrices):
"""Convert matrices to a common domain"""
domains = {matrix.domain for matrix in matrices}
if len(domains) == 1:
return matrices
domain = reduce(lambda x, y: x.unify(y), domains)
return tuple(matrix.convert_to(domain) for matrix in matrices)
@classmethod
def _unify_fmt(cls, *matrices, fmt=None):
"""Convert matrices to the same format.
If all matrices have the same format, then return unmodified.
Otherwise convert both to the preferred format given as *fmt* which
should be 'dense' or'sparse'.
"""
formats = {matrix.rep.fmt for matrix in matrices}
if len(formats) == 1:
return matrices
if fmt =='sparse':
return tuple(matrix.to_sparse() for matrix in matrices)
elif fmt == 'dense':
return tuple(matrix.to_dense() for matrix in matrices)
else:
raise ValueError("fmt should be'sparse' or 'dense'")
def unify(self, *others, fmt=None):
"""
Unifies the domains and the format of self and other
matrices.
Parameters
==========
others : DomainMatrix
fmt: string 'dense','sparse' or `None` (default)
The preferred format to convert to if self and other are not
already in the same format. If `None` or not specified then no
conversion if performed.
Returns
=======
Tuple[DomainMatrix]
Matrices with unified domain and format
Examples
========
Unify the domain of DomainMatrix that have different domains:
>>> from sympy import ZZ, QQ
>>> from sympy.polys.matrices import DomainMatrix
>>> A = DomainMatrix([[ZZ(1), ZZ(2)]], (1, 2), ZZ)
>>> B = DomainMatrix([[QQ(1, 2), QQ(2)]], (1, 2), QQ)
>>> Aq, Bq = A.unify(B)
>>> Aq
DomainMatrix([[1, 2]], (1, 2), QQ)
>>> Bq
DomainMatrix([[1/2, 2]], (1, 2), QQ)
Unify the format (dense or sparse):
>>> A = DomainMatrix([[ZZ(1), ZZ(2)]], (1, 2), ZZ)
>>> B = DomainMatrix({0:{0: ZZ(1)}}, (2, 2), ZZ)
>>> B.rep
{0: {0: 1}}
>>> A2, B2 = A.unify(B, fmt='dense')
>>> B2.rep
[[1, 0], [0, 0]]
See Also
========
convert_to, to_dense, to_sparse
"""
matrices = (self,) + others
matrices = DomainMatrix._unify_domain(*matrices)
if fmt is not None:
matrices = DomainMatrix._unify_fmt(*matrices, fmt=fmt)
return matrices
def to_Matrix(self):
r"""
Convert DomainMatrix to Matrix
Returns
=======
Matrix
MutableDenseMatrix for the DomainMatrix
Examples
========
>>> from sympy import ZZ
>>> from sympy.polys.matrices import DomainMatrix
>>> A = DomainMatrix([
... [ZZ(1), ZZ(2)],
... [ZZ(3), ZZ(4)]], (2, 2), ZZ)
>>> A.to_Matrix()
Matrix([
[1, 2],
[3, 4]])
See Also
========
from_Matrix
"""
from sympy.matrices.dense import MutableDenseMatrix
# XXX: If the internal representation of RepMatrix changes then this
# might need to be changed also.
if self.domain in (ZZ, QQ, EXRAW):
if self.rep.fmt == "sparse":
rep = self.copy()
else:
rep = self.to_sparse()
else:
rep = self.convert_to(EXRAW).to_sparse()
return MutableDenseMatrix._fromrep(rep)
def to_list(self):
"""
Convert :class:`DomainMatrix` to list of lists.
See Also
========
from_list
to_list_flat
to_flat_nz
to_dok
"""
return self.rep.to_list()
def to_list_flat(self):
"""
Convert :class:`DomainMatrix` to flat list.
Examples
========
>>> from sympy import ZZ
>>> from sympy.polys.matrices import DomainMatrix
>>> A = DomainMatrix([[ZZ(1), ZZ(2)], [ZZ(3), ZZ(4)]], (2, 2), ZZ)
>>> A.to_list_flat()
[1, 2, 3, 4]
See Also
========
from_list_flat
to_list
to_flat_nz
to_dok
"""
return self.rep.to_list_flat()
@classmethod
def from_list_flat(cls, elements, shape, domain):
"""
Create :class:`DomainMatrix` from flat list.
Examples
========
>>> from sympy import ZZ
>>> from sympy.polys.matrices import DomainMatrix
>>> element_list = [ZZ(1), ZZ(2), ZZ(3), ZZ(4)]
>>> A = DomainMatrix.from_list_flat(element_list, (2, 2), ZZ)
>>> A
DomainMatrix([[1, 2], [3, 4]], (2, 2), ZZ)
>>> A == A.from_list_flat(A.to_list_flat(), A.shape, A.domain)
True
See Also
========
to_list_flat
"""
ddm = DDM.from_list_flat(elements, shape, domain)
return cls.from_rep(ddm.to_dfm_or_ddm())
def to_flat_nz(self):
"""
Convert :class:`DomainMatrix` to list of nonzero elements and data.
Explanation
===========
Returns a tuple ``(elements, data)`` where ``elements`` is a list of
elements of the matrix with zeros possibly excluded. The matrix can be
reconstructed by passing these to :meth:`from_flat_nz`. The idea is to
be able to modify a flat list of the elements and then create a new
matrix of the same shape with the modified elements in the same
positions.
The format of ``data`` differs depending on whether the underlying
representation is dense or sparse but either way it represents the
positions of the elements in the list in a way that
:meth:`from_flat_nz` can use to reconstruct the matrix. The
:meth:`from_flat_nz` method should be called on the same
:class:`DomainMatrix` that was used to call :meth:`to_flat_nz`.
Examples
========
>>> from sympy import ZZ
>>> from sympy.polys.matrices import DomainMatrix
>>> A = DomainMatrix([
... [ZZ(1), ZZ(2)],
... [ZZ(3), ZZ(4)]], (2, 2), ZZ)
>>> elements, data = A.to_flat_nz()
>>> elements
[1, 2, 3, 4]
>>> A == A.from_flat_nz(elements, data, A.domain)
True
Create a matrix with the elements doubled:
>>> elements_doubled = [2*x for x in elements]
>>> A2 = A.from_flat_nz(elements_doubled, data, A.domain)
>>> A2 == 2*A
True
See Also
========
from_flat_nz
"""
return self.rep.to_flat_nz()
def from_flat_nz(self, elements, data, domain):
"""
Reconstruct :class:`DomainMatrix` after calling :meth:`to_flat_nz`.
See :meth:`to_flat_nz` for explanation.
See Also
========
to_flat_nz
"""
rep = self.rep.from_flat_nz(elements, data, domain)
return self.from_rep(rep)
def to_dok(self):
"""
Convert :class:`DomainMatrix` to dictionary of keys (dok) format.
Examples
========
>>> from sympy import ZZ
>>> from sympy.polys.matrices import DomainMatrix
>>> A = DomainMatrix([
... [ZZ(1), ZZ(0)],
... [ZZ(0), ZZ(4)]], (2, 2), ZZ)
>>> A.to_dok()
{(0, 0): 1, (1, 1): 4}
The matrix can be reconstructed by calling :meth:`from_dok` although
the reconstructed matrix will always be in sparse format:
>>> A.to_sparse() == A.from_dok(A.to_dok(), A.shape, A.domain)
True
See Also
========
from_dok
to_list
to_list_flat
to_flat_nz
"""
return self.rep.to_dok()
@classmethod
def from_dok(cls, dok, shape, domain):
"""
Create :class:`DomainMatrix` from dictionary of keys (dok) format.
See :meth:`to_dok` for explanation.
See Also
========
to_dok
"""
return cls.from_rep(SDM.from_dok(dok, shape, domain))
def nnz(self):
"""
Number of nonzero elements in the matrix.
Examples
========
>>> from sympy import ZZ
>>> from sympy.polys.matrices import DM
>>> A = DM([[1, 0], [0, 4]], ZZ)
>>> A.nnz()
2
"""
return self.rep.nnz()
def __repr__(self):
return 'DomainMatrix(%s, %r, %r)' % (str(self.rep), self.shape, self.domain)
def transpose(self):
"""Matrix transpose of ``self``"""
return self.from_rep(self.rep.transpose())
def flat(self):
rows, cols = self.shape
return [self[i,j].element for i in range(rows) for j in range(cols)]
@property
def is_zero_matrix(self):
return self.rep.is_zero_matrix()
@property
def is_upper(self):
"""
Says whether this matrix is upper-triangular. True can be returned
even if the matrix is not square.
"""
return self.rep.is_upper()
@property
def is_lower(self):
"""
Says whether this matrix is lower-triangular. True can be returned
even if the matrix is not square.
"""
return self.rep.is_lower()
@property
def is_diagonal(self):
"""
True if the matrix is diagonal.
Can return true for non-square matrices. A matrix is diagonal if
``M[i,j] == 0`` whenever ``i!= j``.
Examples
========
>>> from sympy import ZZ
>>> from sympy.polys.matrices import DM
>>> M = DM([[ZZ(1), ZZ(0)], [ZZ(0), ZZ(1)]], ZZ)
>>> M.is_diagonal
True
See Also
========
is_upper
is_lower
is_square
diagonal
"""
return self.rep.is_diagonal()
def diagonal(self):
"""
Get the diagonal entries of the matrix as a list.
Examples
========
>>> from sympy import ZZ
>>> from sympy.polys.matrices import DM
>>> M = DM([[ZZ(1), ZZ(2)], [ZZ(3), ZZ(4)]], ZZ)
>>> M.diagonal()
[1, 4]
See Also
========
is_diagonal
diag
"""
return self.rep.diagonal()
@property
def is_square(self):
"""
True if the matrix is square.
"""
return self.shape[0] == self.shape[1]
def rank(self):
rref, pivots = self.rref()
return len(pivots)
def hstack(A, *B):
r"""Horizontally stack the given matrices.
Parameters
==========
B: DomainMatrix
Matrices to stack horizontally.
Returns
=======
DomainMatrix
DomainMatrix by stacking horizontally.
Examples
========
>>> from sympy import ZZ
>>> from sympy.polys.matrices import DomainMatrix
>>> A = DomainMatrix([[ZZ(1), ZZ(2)], [ZZ(3), ZZ(4)]], (2, 2), ZZ)
>>> B = DomainMatrix([[ZZ(5), ZZ(6)], [ZZ(7), ZZ(8)]], (2, 2), ZZ)
>>> A.hstack(B)
DomainMatrix([[1, 2, 5, 6], [3, 4, 7, 8]], (2, 4), ZZ)
>>> C = DomainMatrix([[ZZ(9), ZZ(10)], [ZZ(11), ZZ(12)]], (2, 2), ZZ)
>>> A.hstack(B, C)
DomainMatrix([[1, 2, 5, 6, 9, 10], [3, 4, 7, 8, 11, 12]], (2, 6), ZZ)
See Also
========
unify
"""
A, *B = A.unify(*B, fmt=A.rep.fmt)
return DomainMatrix.from_rep(A.rep.hstack(*(Bk.rep for Bk in B)))
def vstack(A, *B):
r"""Vertically stack the given matrices.
Parameters
==========
B: DomainMatrix
Matrices to stack vertically.
Returns
=======
DomainMatrix
DomainMatrix by stacking vertically.
Examples
========
>>> from sympy import ZZ
>>> from sympy.polys.matrices import DomainMatrix
>>> A = DomainMatrix([[ZZ(1), ZZ(2)], [ZZ(3), ZZ(4)]], (2, 2), ZZ)
>>> B = DomainMatrix([[ZZ(5), ZZ(6)], [ZZ(7), ZZ(8)]], (2, 2), ZZ)
>>> A.vstack(B)
DomainMatrix([[1, 2], [3, 4], [5, 6], [7, 8]], (4, 2), ZZ)
>>> C = DomainMatrix([[ZZ(9), ZZ(10)], [ZZ(11), ZZ(12)]], (2, 2), ZZ)
>>> A.vstack(B, C)
DomainMatrix([[1, 2], [3, 4], [5, 6], [7, 8], [9, 10], [11, 12]], (6, 2), ZZ)
See Also
========
unify
"""
A, *B = A.unify(*B, fmt='dense')
return DomainMatrix.from_rep(A.rep.vstack(*(Bk.rep for Bk in B)))
def applyfunc(self, func, domain=None):
if domain is None:
domain = self.domain
return self.from_rep(self.rep.applyfunc(func, domain))
def __add__(A, B):
if not isinstance(B, DomainMatrix):
return NotImplemented
A, B = A.unify(B, fmt='dense')
return A.add(B)
def __sub__(A, B):
if not isinstance(B, DomainMatrix):
return NotImplemented
A, B = A.unify(B, fmt='dense')
return A.sub(B)
def __neg__(A):
return A.neg()
def __mul__(A, B):
"""A * B"""
if isinstance(B, DomainMatrix):
A, B = A.unify(B, fmt='dense')
return A.matmul(B)
elif B in A.domain:
return A.scalarmul(B)
elif isinstance(B, DomainScalar):
A, B = A.unify(B)
return A.scalarmul(B.element)
else:
return NotImplemented
def __rmul__(A, B):
if B in A.domain:
return A.rscalarmul(B)
elif isinstance(B, DomainScalar):
A, B = A.unify(B)
return A.rscalarmul(B.element)
else:
return NotImplemented
def __pow__(A, n):
"""A ** n"""
if not isinstance(n, int):
return NotImplemented
return A.pow(n)
def _check(a, op, b, ashape, bshape):
if a.domain!= b.domain:
msg = "Domain mismatch: %s %s %s" % (a.domain, op, b.domain)
raise DMDomainError(msg)
if ashape!= bshape:
msg = "Shape mismatch: %s %s %s" % (a.shape, op, b.shape)
raise DMShapeError(msg)
if a.rep.fmt!= b.rep.fmt:
msg = "Format mismatch: %s %s %s" % (a.rep.fmt, op, b.rep.fmt)
raise DMFormatError(msg)
if type(a.rep)!= type(b.rep):
msg = "Type mismatch: %s %s %s" % (type(a.rep), op, type(b.rep))
raise DMFormatError(msg)
def add(A, B):
r"""
Adds two DomainMatrix matrices of the same Domain
Parameters
==========
A, B: DomainMatrix
matrices to add
Returns
=======
DomainMatrix
DomainMatrix after Addition
Raises
======
DMShapeError
If the dimensions of the two DomainMatrix are not equal
ValueError
If the domain of the two DomainMatrix are not same
Examples
========
>>> from sympy import ZZ
>>> from sympy.polys.matrices import DomainMatrix
>>> A = DomainMatrix([
... [ZZ(1), ZZ(2)],
... [ZZ(3), ZZ(4)]], (2, 2), ZZ)
>>> B = DomainMatrix([
... [ZZ(4), ZZ(3)],
... [ZZ(2), ZZ(1)]], (2, 2), ZZ)
>>> A.add(B)
DomainMatrix([[5, 5], [5, 5]], (2, 2), ZZ)
See Also
========
sub, matmul
"""
A._check('+', B, A.shape, B.shape)
return A.from_rep(A.rep.add(B.rep))
def sub(A, B):
r"""
Subtracts two DomainMatrix matrices of the same Domain
Parameters
==========
A, B: DomainMatrix
matrices to subtract
Returns
=======
DomainMatrix
DomainMatrix after Subtraction
Raises
======
DMShapeError
If the dimensions of the two DomainMatrix are not equal
ValueError
If the domain of the two DomainMatrix are not same
Examples
========
>>> from sympy import ZZ
>>> from sympy.polys.matrices import DomainMatrix
>>> A = DomainMatrix([
... [ZZ(1), ZZ(2)],
... [ZZ(3), ZZ(4)]], (2, 2), ZZ)
>>> B = DomainMatrix([
... [ZZ(4), ZZ(3)],
... [ZZ(2), ZZ(1)]], (2, 2), ZZ)
>>> A.sub(B)
DomainMatrix([[-3, -1], [1, 3]], (2, 2), ZZ)
See Also
========
add, matmul
"""
A._check('-', B, A.shape, B.shape)
return A.from_rep(A.rep.sub(B.rep))
def neg(A):
r"""
Returns the negative of DomainMatrix
Parameters
==========
A : Represents a DomainMatrix
Returns
=======
DomainMatrix
DomainMatrix after Negation
Examples
========
>>> from sympy import ZZ
>>> from sympy.polys.matrices import DomainMatrix
>>> A = DomainMatrix([
... [ZZ(1), ZZ(2)],
... [ZZ(3), ZZ(4)]], (2, 2), ZZ)
>>> A.neg()
DomainMatrix([[-1, -2], [-3, -4]], (2, 2), ZZ)
"""
return A.from_rep(A.rep.neg())
def mul(A, b):
r
|
118a4483ca1cf645d66de85cad611d4b936981a8
|
Python
|
<|begin_of_text|># SOFTVEROVY NASTROJ PLGP
# Furtkevicova Ludmila, cast diplomovej prace
# script: okno s piatimi zalozkami, funkcie, tlacidla, modely
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
import os
import tkMessageBox
import Tkinter
import ttk
from Tkinter import *
from ttk import *
import sys
import subprocess
import ScrolledText
import tkFileDialog
from tkFileDialog import askdirectory, asksaveasfile
# trieda tykajuca sa programu GRASS GIS
class GRASS:
def __init__(self):
# spustenie GRASS GIS
grass7bin_win = r'C:\Program Files (x86)\GRASS GIS 7.0.0\grass70.bat'
# definovanie GRASS DATABASE (GRASS GIS database) directory
# cestaL z GUI
self.gisdb = "C:\\DP_LF"
# SOFTVER
grass7bin = grass7bin_win
# GRASS 7 a GISBASE
startcmd = [grass7bin, '--config', 'path']
p = subprocess.Popen(startcmd, shell=False,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = p.communicate()
if p.returncode!= 0:
print >>sys.stderr, "ERROR: Cannot find GRASS GIS 7 start script (%s)" % startcmd
sys.exit(-1)
self.gisbase = out.strip('\n\r')
# premenna GISBASE a PATH
os.environ['GISBASE'] = self.gisbase
os.environ['PATH'] += os.pathsep + os.path.join(self.gisbase, 'extrabin')
# definicia GRASS-Python environment
gpydir = os.path.join(self.gisbase, "etc", "python")
sys.path.append(gpydir)
os.environ['GISDBASE'] = self.gisdb
# trieda tykajuca sa presmerovania (obsah okna do konkretneho suboru)
class Presmerovanie(object):
def __init__(self, text_ctrl):
self.output = text_ctrl
def write(self, string):
self.output.insert(Tkinter.END, string)
# trieda tykajuca sa pouzivatelskeho rozhrania
class GUI(Tkinter.Frame):
Gobj = GRASS()
cestaV = ""
cestaI = ""
cestaL = ""
recl1 = "recl1"
cesta = "C:\\DP_LF\\vypocet\\"
# GUI
def __init__(self,gui):
Tkinter.Frame.__init__(self, gui)
self.gui = gui
self.gui.title(u"PLGP (Ludmila Furtkevicova, 2015) ")
note = Notebook(self.gui)
# pat zaloziek
tab1 = Tkinter.Frame(note)
tab2 = Tkinter.Frame(note)
tab3 = Tkinter.Frame(note)
tab4 = Tkinter.Frame(note)
tab5 = Tkinter.Frame(note)
# nastavenie stylu v zalozkach
ttk.Style().configure('TLabelframe.Label', foreground='forest green',font="Verdana 8 bold")
ttk.Style().configure('TButton', foreground='cadet blue',font="Helvetica 8 bold")
ttk.Style().configure("TNotebook.Tab", foreground="dim gray",font="Helvetica 8 bold")
# nastavenie popisov zaloziek
note.add(tab1, text = " 1. Settings ")
note.add(tab2, text = " 2. Parametric maps ")
note.add(tab3, text = " 3. Weight calculation ")
note.add(tab4, text = " 4. Prediction ")
note.add(tab5, text = " 5. Validation ")
note.pack()
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~ PRVA ZALOZKA ~~~~~~~~~~~~~~~~~~~~~~~~~
# nastavenie ciest a tvorba potrebnych suborov pre dalsie zalozky
ttk.Label(tab1, text=u" \n\nHandy software tool for geologists", anchor="s",\
foreground="forest green", font = "Verdana 9 italic").grid(in_=tab1,column=0, row=0,\
columnspan=7, sticky="S",padx=70, pady=17)
ttk.Label(tab1, text=u"\nPredict landslide with GRASS GIS and Python", anchor="n",\
foreground="dark green", font = "Verdana 13 bold").grid(in_=tab1,column=0, row=0,\
columnspan=7, sticky="N",padx=30, pady=1)
# prve podokno v ramci prvej zalozky (vstupne data)
self.one = ttk.Labelframe(tab1, text = " 1. Input data: ")
self.one.grid(row=1, column=0, columnspan=2, sticky='S', padx=5, pady=5, ipadx=4,\
ipady=1)
L21 = ttk.Label(self.one, text=" Path to folder with vector data: ")
L21.grid(row=2, column=0, sticky='W', pady=5)
self.E21 = ttk.Entry(self.one, width=40)
self.E21.grid(row=2, column=1, columnspan=2, sticky="WE", pady=5, padx = 5)
B21 = ttk.Button(self.one, text=" Browse...",command = self.openV)
B21.grid(row=2, column=3, sticky='W',pady=5, padx = 2)
# druhe podokno vramci prvej zalozky (nazov lokacie, epsg kod,...)
self.two = ttk.Labelframe(tab1, text = " 2. New LOCATION and new MAPSETs:\n ")
self.two.grid(row=3, column=0, columnspan=2, sticky='S', padx=5, pady=5, ipadx=4,\
ipady=5)
L10 = ttk.Label(self.two, text=" LOCATION name: ")
L10.grid(row=4, column=0, sticky='W', padx=5, pady = 5)
self.E10 = ttk.Entry(self.two, width=30)
self.E10.grid(row=4, column=1, columnspan=2, sticky="WE", pady=2)
self.E10.insert(1,"Mapy")
self.nameL = self.E10.get()
L11 = ttk.Label(self.two, text=" EPSG code:")
L11.grid(row=5, column=0, sticky='W', padx=5, pady=2)
self.E11 = ttk.Entry(self.two, width=7)
self.E11.grid(row=5, column=1, columnspan=2, sticky="WE", pady=2)
self.E11.insert(1,"2065")
self.epsg = self.E11.get()
L12 = ttk.Label(self.two, text=" Path for new LOCATION:")
L12.grid(row=6, column=0, sticky='W', padx=5, pady=2)
self.E12 = ttk.Entry(self.two, width=10)
self.E12.grid(row=6, column=1, columnspan=2, sticky="WE", pady=2)
B12 = ttk.Button(self.two, text=" Browse...",command = self.openL)
B12.grid(row=6, column=3, sticky='W', padx=5, pady=2)
L13 = ttk.Label(self.two, text=" Name of MAPSET for input data: ")
L13.grid(row=7, column=0, sticky='W', padx=5, pady=2)
self.E13 = ttk.Entry(self.two, width=10)
self.E13.grid(row=7, column=1, columnspan=2, sticky="WE", pady=2)
self.E13.insert(1,"VSTUP")
self.nameMV = self.E13.get()
L14 = ttk.Label(self.two, text=" Name of MAPSET for intermediate data: ")
L14.grid(row=8, column=0, sticky='W', padx=5, pady=2)
self.E14 = ttk.Entry(self.two, width=10)
self.E14.grid(row=8, column=1, columnspan=2, sticky="WE", pady=2)
self.E14.insert(1,"PM")
self.nameMM = self.E14.get()
L15 = ttk.Label(self.two, text=" Name of MAPSET for results: ")
L15.grid(row=9, column=0, sticky='W', padx=5, pady=2)
self.E15 = ttk.Entry(self.two, width=10)
self.E15.grid(row=9, column=1, columnspan=2, sticky="WE", pady=2)
self.E15.insert(1,"PREDIKCIA")
self.nameM = self.E15.get()
# tretie podokno vramci prvej zalozky (vysledky)
self.three = ttk.Labelframe(tab1, text = " 3. Reports, reclassification rules, information about calculation:\n ")
self.three.grid(row=10, column=0, columnspan=2, sticky='S', padx=5, pady=1, ipadx=5,\
ipady=5)
L31 = ttk.Label(self.three, text=" Path to folder for results: ")
L31.grid(row=11, column=0, sticky='WE', padx=5, pady=2)
self.E31 = ttk.Entry(self.three, width=39)
self.E31.grid(row=11, column=1, columnspan=2, sticky="WE", pady=2)
B31 = ttk.Button(self.three, text="Browse...",command = self.openI)
B31.grid(row=11, column=3, sticky='W', padx=5, pady=2)
# tlacidlo REFRESH na zmazanie predvolene nastavenych vstupov
ttk.Button(tab1, text="REFRESH",command=self.refreshALL).grid(row=13, column=0,\
sticky="WE", padx=5, pady=5,columnspan=1, rowspan=1)
# tlacidlo na ukoncenie prace
ttk.Button(tab1, text="QUIT",command=self.gEND).grid(row=13, column=1, \
sticky='WE', padx=5, pady=5,columnspan=1, rowspan=1)
# tlacidlo na ziskanie obsahu vyplnenych poloziek a tvorbu potrebnych suborov
ttk.Button(tab1, text="NEXT", command=lambda: self.valueGET(self.E10.get(),\
self.E11.get(), self.E13.get(), self.E14.get(),\
self.E15.get())).grid(row=14, column=0, \
sticky='WE', padx=5, columnspan=2, rowspan=1,pady=5)
# tlacidlo ako alternativa HELP
ttk.Button(tab1, text='INFO',command=tkMessageBox.showinfo).grid(row=12, column=0,\
sticky="WE", padx=5, pady=5,columnspan=1, rowspan=1)
# tlacidlo ktorym sa da spustit prostredie GRASS GIS
ttk.Button(tab1, text='RUN GRASS GIS',command=self.RG).grid(row=12, column=1,\
sticky="WE", padx=5, pady=5,columnspan=1, rowspan=1)
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ DRUHA ZALOZKA ~~~~~~~~~~~~~~~~~~~~~~
# zobrazenie obsahu mapsetu PERMANENT, tvorba parametrickych map
# zobrazenie informacii o mapach a ich prvotna reklasifikacia
ttk.Label(tab2, text=u" \n\nHandy software tool for geologists", anchor="s",\
foreground="forest green", font = "Verdana 9 italic").grid(in_=tab2,column=0, row=0,\
columnspan=7, sticky="S",padx=70, pady=17)
ttk.Label(tab2, text=u"\nPredict landslide with GRASS GIS and Python", anchor="n",\
foreground="dark green", font = "Verdana 13 bold").grid(in_=tab2,column=0, row=0,\
columnspan=7, sticky="N",padx=30, pady=1)
# prve podokno vramci druhej zalozky na zobrazenie obsahu map v mapsete
self.four = ttk.Labelframe(tab2, text = " 4. MAPSET content: " )
self.four.grid(row=1, column=0, columnspan=2, sticky='E', padx=10, pady=5)
self.txf1 = ScrolledText.ScrolledText(self.four, height = 5, width = 61)
self.txf1.grid(row=2, column=0,columnspan=2, rowspan=3, sticky='NS', padx=5, pady=5)
ttk.Button(tab2, text='VIEW CONTENT',command = self.wrZM).grid(row=2,\
column=1, sticky='E', padx=10, pady=5,columnspan=1, rowspan=1)
# druhe podokno vramci druhej zalozky na zobrazenie info o param. mapach
self.five = ttk.Labelframe(tab2, text = " 5. Information in TXT file: " )
self.five.grid(row=3, column=0, columnspan=2, sticky='E', padx=10, pady=5)
self.txf3 = ScrolledText.ScrolledText(self.five, height = 9, width = 61)
self.txf3.grid(row=4, column=0,columnspan=2, rowspan=3, sticky='NS', padx=5, pady=5)
ttk.Button(tab2, text='INFO',command=self.showexample).grid(row=7, column=0,\
sticky="WE", padx=5, pady=5,columnspan=1, rowspan=1)
ttk.Button(tab2, text='RUN GRASS GIS',command=self.RG).grid(row=8, column=0,\
sticky="WE", padx=5, pady=5,columnspan=1, rowspan=1)
ttk.Button(tab2, text='NEXT', command = self.II).grid(row=9, column=1,sticky='WE', padx=5,\
pady=5,columnspan=1, rowspan=1)
# tlacidlo, ktorym sa ulozi obsah v okne do konkretneho suboru
ttk.Button(tab2, text='SAVE AS',command=self.edit_save).grid(row=8, column=1,sticky='WE', padx=5,\
pady=5,columnspan=1, rowspan=1)
ttk.Button(tab2, text="QUIT", command=self.gEND).grid(row=9, column=0,sticky='WE',\
padx=5, columnspan=1, rowspan=10,pady=5)
# tlacidlo, ktorym sa nacita obsah konkretneho txt suboru do okna PLGP
ttk.Button(tab2, text='LOAD TXT', command = self.open_file).grid(row=7,\
column=1, sticky='WE', padx=5, pady=5,columnspan=1, rowspan=1)
# ~~~~~~~~~~~~~~~~~~~~~~~~ TRETIA ZALOZKA ~~~~~~~~~~~~~~~~~~~~~~~
# zobrazenie vypocitanych vah a dalsich informacii, zobrazenie rovnice Y
ttk.Label(tab3, text=u" \n\nHandy software tool for geologists", anchor="s",\
foreground="forest green", font = "Verdana 9 italic").grid(in_=tab3,column=0, row=0,\
columnspan=7, sticky="S",padx=70, pady=17)
ttk.Label(tab3, text=u"\nPredict landslide with GRASS GIS and Python", anchor="n",\
foreground="dark green", font = "Verdana 13 bold").grid(in_=tab3,column=0, row=0,\
columnspan=7, sticky="N",padx=30, pady=1)
self.six = ttk.Labelframe(tab3, text = " 6. Information about calculated weights of all factors : " )
self.six.grid(row=1, column=0, columnspan=2, sticky='E', padx=10, pady=5)
self.txf2 = ScrolledText.ScrolledText(self.six, height = 12, width = 61)
self.txf2.grid(row=2, column=0,columnspan=2, rowspan=3, sticky='NS', padx=5, pady=5)
self.seven = ttk.Labelframe(tab3, text = " 7. The equation to calculate value Y : " )
self.seven.grid(row=3, column=0, columnspan=2, sticky='E', padx=10, pady=5)
self.txf4 = ScrolledText.ScrolledText(self.seven, height = 3.5, width = 61)
self.txf4.grid(row=4, column=0,columnspan=2, rowspan=3, sticky='NS', padx=5, pady=5)
ttk.Button(tab3, text='INFO',command=tkMessageBox.showinfo).grid(row=7, column=0,\
sticky="WE", padx=5, pady=5,columnspan=1, rowspan=1)
ttk.Button(tab3, text='RUN GRASS GIS',command=self.RG).grid(row=8, column=0,\
sticky="WE", padx=5, pady=5,columnspan=1, rowspan=1)
ttk.Button(tab3, text='NEXT', command = self.III).grid(row=9, column=1,\
sticky='WE', padx=5,pady=5,columnspan=1, rowspan=1)
# zobrazenie rovnice Y
ttk.Button(tab3, text='EQUATION',command = self.WrRovnica).grid(row=8, column=1,\
sticky='WE', padx=5,pady=5,columnspan=1, rowspan=1)
ttk.Button(tab3, text="QUIT", command=self.gEND).grid(row=9, column=0,\
sticky='WE',padx=5, columnspan=1, rowspan=1,pady=5)
# vypocet vah
ttk.Button(tab3, text='CALCULATE WEIGHTS', command=self.CalculateFactors).grid(row=7,\
column=1, sticky='WE', padx=5, pady=5,columnspan=1, rowspan=1)
# ~~~~~~~~~~~~~~~~~~~~~~~~~ STVRTA ZALOZKA ~~~~~~~~~~~~~~~~~~~~~~~~~
# zobrazenie MIN a MAX hodnoty v bunke rasta Y
# reklasifikacia spojiteho intervalu
ttk.Label(tab4, text=u" \n\nHandy software tool for geologists", anchor="s",\
foreground="forest green", font = "Verdana 9 italic").grid(in_=tab4,column=0, row=0,\
columnspan=7, sticky="S",padx=70, pady=17)
ttk.Label(tab4, text=u"\nPredict landslide with GRASS GIS and Python", anchor="n",\
foreground="dark green", font = "Verdana 13 bold").grid(in_=tab4,column=0, row=0,\
columnspan=7, sticky="N",padx=30, pady=1)
self.eight = ttk.Labelframe(tab4, text = " 8. The result of equation: " )
self.eight.grid(row=1, column=0, columnspan=2, sticky='E', padx=5, pady=5)
self.txf5 = ScrolledText.ScrolledText(self.eight, height = 5, width = 62)
self.txf5.grid(row=2, column=0,columnspan=2, rowspan=3, sticky='NS', padx=5, pady=5)
self.eightt = ttk.Labelframe(tab4, text = " is raster map with MIN and MAX value:" )
self.eightt.grid(row=3, column=0, columnspan=2, sticky='E', padx=5, pady=5)
self.txf6 = ScrolledText.ScrolledText(self.eightt, height = 3, width = 62)
self.txf6.grid(row=4, column=0,columnspan=2, rowspan=3, sticky='NS', padx=5, pady=5)
ttk.Button(tab4, text='INFO',command=tkMessageBox.showinfo).grid(row=4,\
column=0,sticky="WE", padx=5, pady=5,columnspan=1, rowspan=1)
ttk.Button(tab4, text='MIN MAX',command = self.open_filey).grid(row=4,\
column=1, sticky='WE', padx=5, pady=5,columnspan=1, rowspan=1)
self.nine = ttk.Labelframe(tab4, text = " 9. Reclassification rules for result map: " )
self.nine.grid(row=5, column=0, columnspan=2, sticky='E', padx=5, pady=5)
self.txf7 = ScrolledText.ScrolledText(self.nine, height = 5.3, width = 62)
self.txf7.grid(row=6, column=0,columnspan=2, rowspan=3, sticky='NS', padx=5, pady=5)
ttk.Button(tab4, text='SAVE AS',command=self.edit_savey).grid(row=6, column=1,\
sticky="WE", padx=5, pady=5,columnspan=1, rowspan=1)
ttk.Button(tab4, text='NEXT', command = self.IV).grid(row=7,\
column=1,sticky='WE',padx=5, columnspan=1, rowspan=1,pady=5)
ttk.Button(tab4,text='RUN GRASS GIS',command=self.RG ).grid(row=6, column=0,sticky='WE',\
padx=5, pady = 5, columnspan=1, rowspan=1)
ttk.Button(tab4, text="QUIT", command=self.gEND).grid(row=7, column=0,sticky='WE',\
padx=5, columnspan=1, rowspan=10,pady=5)
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~ PIATA ZALOZKA ~~~~~~~~~~~~~~~~~~~~~~~~
# verifikacia vysledkov
# COV1, COV2 a COV3 sa tykaju cutoffvalues, co je hranica, ktora rozdeli
# interval Y na dve kategorie: stabilne a nestabilne oblasti
# v diplomovej praci je len jedna hranica (COV1,2,3 su rovnake),
# preto je ROC hranata: plot.png
ttk.Label(tab5, text=u" \n\nHandy software tool for geologists", anchor="s",\
foreground="forest green", font = "Verdana 9 italic").grid(in_=tab5,column=0, row=0,\
columnspan=7, sticky="S",padx=70, pady=17)
ttk.Label(tab5, text=u"\nPredict landslide with GRASS GIS and Python", anchor="n",\
foreground="dark green", font = "Verdana 13 bold").grid(in_=tab5,column=0, row=0,\
columnspan=7, sticky="N",padx=30, pady=1)
self.ten = ttk.Labelframe(tab5, text = " 10. Validation")
self.ten.grid(row=1, column=0, columnspan=2, sticky='E', padx=10, pady=5)
# zobrazenie intervalov, reklasifikacne pravidla pre rozdelenie vysledku
# na kategorie: stabilne a nestabilne oblasti
self.tenL = ttk.Label(self.ten,text=" Intervals according to set cutoff value:",foreground="cadet blue")
self.tenL.grid(row=2, column = 0, columnspan=2, sticky='W', padx=1, pady=1)
self.txf8 = ScrolledText.ScrolledText(self.ten, height = 8, width = 30)
self.txf8.grid(row=3, column=0,columnspan=2, rowspan=1, sticky='NS', padx=5, pady=5)
self.tenL = ttk.Label(self.ten,text=" Receiver operating characteristic :",foreground="cadet blue")
self.tenL.grid(row=2, column = 2, columnspan=2, sticky='W', padx=1, pady=5)
self.txf9 = ScrolledText.ScrolledText(self.ten, height = 17, width = 27)
self.txf9.grid(row=3, column=2,columnspan=2, rowspan=3, sticky='NS', padx=5, pady=5)
# zobrazenie plochkategorii: stabilne a nestabilne oblasti
self.tenL = ttk.Label(self.ten,text=" Area according to set cutoff value:",foreground="cadet blue")
self.tenL.grid(row=4, column = 0, columnspan=2, sticky='W', padx=1, pady=5)
self.txf10 = ScrolledText.ScrolledText(self.ten, height = 6, width = 30)
self.txf10.grid(row=5, column=0,columnspan=2, rowspan=1, sticky='NS', padx=5, pady=5)
# zobrazenie hodnot pre vypocet plochy pod ROC krivkou
ttk.Button(tab5, text="SHOW VALUES ", command = self.valid).grid(row=7,\
column=0,sticky="WE", padx=5, pady=5,columnspan=1, rowspan=1)
ttk.Button(tab5, text='RUN GRASS GIS',command=self.RG).grid(row=8,\
column=0,sticky="WE",padx=5, pady=5,columnspan=1, rowspan=1)
# zobrazenie orientacneho vysledku: bez legendy, existujucich zosuvov,...
ttk.Button(tab5, text="SHOW MAP",command = self.showimg).grid(row=8, column=1,sticky='WE',\
padx=5, pady=5,columnspan=1, rowspan=1)
# zobrazenie ROC krivky
ttk.Button(tab5, text="SHOW ROC", command = self.showROC).grid(row=7, column=1,sticky='WE',\
padx=5,pady=5,columnspan=1, rowspan=1)
ttk.Button(tab5, text="QUIT", command=self.gEND).grid(row=9,\
column=0,sticky='WE',\
padx=5, columnspan=2, rowspan=1,pady=5)
# funkcia na zobrazenie prikladu ako maju vyzerat reklasifikacne pravidla
# pre pouzitie modulu r.recode na reklasifikaciu FLOAT map
def showexample(self):
tkMessageBox.showinfo("recl_file", "\nText file for reclassification:\n\n\
MIN : ? : ?\n ? : ? : ?\n ? : ? : ?\n ... \n \n ? : ? : ?\n ? : MAX : ? ")
# funkcie na zobrazenie okna o pokracovani dalsou zalozkou
def II(self):
tkMessageBox.showinfo("GO NEXT"," Continue with third tab... ")
def III(self):
tkMessageBox.showinfo("GO NEXT"," Continue with fourth tab... ")
def IV(self):
tkMessageBox.showinfo("GO NEXT"," Continue with fifth tab... ")
# funkcia na spustenie GRASS GIS
def RG(self):
try:
os.startfile(r'C:\Program Files (x86)\GRASS GIS 7.0.0\grass70.bat')
except:
tkMessageBox.showwarning(""," Cannot run GRASS GIS. ")
# funkcia na zistenie PATH k hlavnemu priecnku
def openL(self):
self.E12.delete(0,"end")
#DEFAULT CESTA
pr = askdirectory(initialdir="C:\\DP_LF")
self.cestaL = os.path.abspath(pr)
self.E12.insert(0, self.cestaL)
self.cestaL = self.cestaL.encode("ascii","ignore")
return self.cestaL
# funkcia na ziskanie PATH, kde su ulozene vstupne data
def openV(self):
self.E21.delete(0,"end")
#DEFAULT CESTA
priecinok = askdirectory(initialdir="C:\\DP_LF\\data")
self.cestaV = os.path.abspath(priecinok)
self.E21.insert(0, self.cestaV)
self.cestaV = self.cestaV.encode("ascii","ignore")
return self.cestaV
# funkcia na ziskanie PATH, kde budu ulozene INFO o vypocte
def openI(self):
self.E31.delete(0,"end")
#DEFAULT CESTA
priecinok = askdirectory(initialdir="C:\\DP_LF\\vypocet")
self.cestaI = os.path.abspath(priecinok)
self.E31.insert(0, self.cestaI)
self.cestaI = self.cestaI.encode("ascii","ignore")
return self.cestaI
# funkcia na vykonanie akcii po stlaceni POKRACOVAT v prvej zalozke
# precitanie vyplnenych policok v prvej zalozke
def valueGET(self,a,b,c,d,e):
self.createL()
self.nameL = str(a)
self.epsg = str(b)
self.nameMV = str(c)
self.nameMM = str(d)
self.nameM = str(e)
try:
self.epsg=int(self.epsg)
except:
tkMessageBox.showerror( ""," EPSG code must be numeric! " )
self.gui.destroy()
self.epsg=str(self.epsg)
if ((self.nameL!= "") and (self.epsg!= "") and (self.nameMV!= "")\
and (self.nameMM!= "") and (self.nameM!= "") and (self.cestaL!= "")\
and (self.cestaV!= "") and (self.cestaI!= "")):
if tkMessageBox.askquestion("Settings", " New LOCATION, new MAPSETs and other\n\
necessary folders and *.txt files will be created.\n\
All existing files with the same name will be \n\
deleted.\n\n Do you really want to continue?")=="yes":
# vytvorenie novych foldrov
nf_info = self.cestaI+"\\info"
if not os.path.isdir(nf_info):
os.makedirs(nf_info)
nf_recl1 = self.cestaI+"\\recl1" #robim new folder
if not os.path.isdir(nf_recl1):
os.makedirs(nf_recl1)
nf_report = self.cestaI+"\\report" #robim new folder
if not os.path.isdir(nf_report):
os.makedirs(nf_report)
nf_recl2 = self.cestaI+"\\recl2" #robim new folder
if not os.path.isdir(nf_recl2):
os.makedirs(nf_recl2)
# vytvorenie txt suborov na prvotnu reklasifikaciu
r1_G = nf_recl1+"\\recl1_G.txt"
open(r1_G, 'w')
r1_DMR = nf_recl1+"\\recl1_DMR.txt"
open(r1_DMR, 'w')
r1_S = nf_recl1+"\\recl1_S.txt"
open(r1_S, 'w')
r1_E = nf_recl1+"\\recl1_E.txt"
open(r1_E, 'w')
r1_DS = nf_recl1+"\\recl1_DS.txt"
open(r1_DS, 'w')
r1_M = nf_recl1+"\\recl1_M.txt"
open(r1_M, 'w')
r1_K = nf_recl1+"\\recl1_K.txt"
open(r1_K, 'w')
r1_VK = nf_recl1+"\\recl1_VK.txt"
open(r1_VK, 'w')
# vytvorenie dalsich potrebnych txt suborov
open(self.cesta + "recl_y.txt","wb")
open(self.cesta + "recl_COV1.txt","wb")
open(self.cesta + "recl_COV2.txt","wb")
open(self.cesta + "recl_COV3.txt","wb")
tkMessageBox.showinfo("New folders", " In %s these folders have already been created:\
\n 1. info - information about parametric maps\
\n 2. recl1 - necessary rules for first reclassification\
\n 3. report - information about classes: areas\
\n 4. recl2 - necessary rules for second reclassification\n"\
%self.cestaI)
tkMessageBox.showinfo("First reclassification", " In %s these *.txt files have already been created:\n\
\n 1. recl1_G.txt - geology factor\
\n 2. recl1_DMR.txt - DEM factor\
\n 3. recl1_S.txt - slope factor\
\n 4. recl1_E.txt - aspect factor\
\n 5. recl1_DS.txt - flowlength factor\
\n 6. recl1_M.txt - accumulation factor\
\n 7. recl1_K.txt - curvature factor\
\n 8. recl1_VK.txt - landuse factor\n" %nf_recl1)
tkMessageBox.showinfo("GO NEXT"," Continue with second tab... ")
else:
self.gui.destroy()
else:
tkMessageBox.showerror("", " ERROR \n\n Check the input values!" )
return self.cestaL
# funkcia na vymazanie obsahu defaultne vyplnenych policok
def refreshALL(self):
self.E10.delete(0,"end")
self.E11.delete(0,"end")
self.E12.delete(0,"end")
self.E13.delete(0,"end")
self.E14.delete(0,"end")
self.E15.delete(0,"end")
self.E21.delete(0,"end")
self.E31.delete(0,"end")
# funkcia na ukoncenie prace v PLGP
def gEND(self):
if tkMessageBox.askyesno('Verification',' Do you really want to quit? '):
self.gui.destroy()
else:
tkMessageBox.askretrycancel("No",' Press ENTER to continue ')
def wrZM(self):
# vymazanie obsahu a vypisanie mapsetov, rastrov a vektorov do okna txf1
self.txf1.delete(1.0, END)
redir = Presmerovanie(self.txf1)
sys.stdout = redir
self.zm()
self.zistiR()
self.zistiV()
# self.txf1.insert(INSERT,"Existujuce rastrove mapy:\n\nExistujuce vektorove mapy:")
# print(self.txf1.get(1.0, END))
def delZM(self):
self.txf1.delete(1.0, END)
def open_file(self):
#zabezpeci ze sa obsah txt zobrazi do okna
self.txf3.delete(1.0, END)
redir = Presmerovanie(self.txf3)
sys.stdout = redir
self.txf3.delete(1.0, END)
options = {}
options['defaultextension'] = '.txt'
options['filetypes'] = [('all files', '.*'), ('text files', '.txt')]
options['initialdir'] = "C:\\DP_LF\\vypocet\\info"
options['parent'] = self.gui
options['title'] = "Open a file"
# z txt suboru INFO precita iba informacie o MIN a MAX hodnote bunky
with tkFileDialog.askopenfile(mode='r', initialdir = "C:\\DP_LF\\vypocet\\info") as f_handle:
pr = os.path.curdir
self.oo = os.path.abspath(pr)
self.oo = self.oo.encode("ascii","ignore")
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ vytlacit nazov suboru
print "Map:"
print "---------------------------------------"
print "MIN and MAX cell value in raster of selected factor :\n"
#vytlaci obsah suboru
for line in f_handle:
line = line.strip()
if line == "": continue
if "max" in line:
print line
if "min" in line:
print line
# ulozit subor txt ako...
def edit_savey(self):
options = {}
options['defaultextension'] = '.txt'
options['filetypes'] = [('all files', '.*'), ('text files', '.txt')]
options['parent'] = self.gui
options['title'] = "Save as..."
f = asksaveasfile(mode='w+', defaultextension=".txt", initialdir = "C:\\DP_LF\\vypocet")
if not f:
return
f.write(self.txf7.get(1.0, END))
f.close()
# otvorenie txt suboru INFO
def open_filey(self):
# zabezpeci ze sa obsah txt zobrazi do okna
self.txf6.delete(1.0, END)
redir = Presmerovanie(self.txf6)
sys.stdout = redir
options = {}
options['defaultextension'] = '.txt'
options['filetypes'] = [('all files', '.*'), ('text files', '.txt')]
options['initialdir'] = "C:\\DP_LF\\vypocet"
options['parent'] = self.gui
options['title'] = "Open a file"
f_handle = "C:\\DP_LF\\vypocet\\info_y.txt"
file = open(f_handle, 'r')
# vytlaci obsah suboru
for line in file:
line = line.strip()
if line == "": continue
if "max" in line:
print line
if "min" in line:
print line
def edit_save(self):
options = {}
options['defaultextension'] = '.txt'
options['filetypes'] = [('all files', '.*'), ('text files', '.txt')]
options['parent'] = self.gui
options['title'] = "Save as..."
f = asksaveasfile(mode='w+', defaultextension=".txt", initialdir = "C:\\DP_LF\\vypocet\\recl1")
if not f:
return
f.write(self.txf3.get(1.0, END))
f.close()
# vytvorenie LOCATION
def createL(self):
import grass.script as gscript
import grass.script.setup as gsetup
import grass.script.core as gcore
cestaL = self.Gobj.gisdb
nameL = self.nameL
epsg = self.epsg
mapset = self.nameMV
mapset1 = self.nameMM
mapset2 = self.nameM
gisbase = self.Gobj.gisbase
gsetup.init(gisbase, cestaL, nameL, "PERMANENT")
#vytvorenie LOCATION
gcore.create_location(cestaL, nameL, epsg=epsg, proj4=None, filename=None, wkt=None,\
datum=None, datum_trans=None, desc=None, overwrite=True)
# vytvorenie MAPSETov
gscript.run_command("g.mapset",overwrite = True,mapset = mapset, flags="c")
gscript.run_command("g.mapset",overwrite = True,mapset = mapset1, flags="c")
gscript.run_command("g.mapset",overwrite = True,mapset = mapset2, flags="c")
# vypise zoznam mapsetov v location
def zm(self):
import grass.script as gscript
print "MAPSETs:"
print gscript.read_command("g.mapsets",flags = "l")
# vypise zoznam rastrov
def zistiR(self):
import grass.script as gscript
print "Raster maps:"
for rast in gscript.list_strings(type = 'rast'):
print rast,
# vypise zoznam vektorov
def zistiV(self):
import grass.script as gscript
print "\nVector maps:"
for vect in gscript.list_strings(type ='vect'):
print vect,
# vypocet vahy konkretneho faktora
def Faktor(self, faktor):
import math
import scipy
# funkcia na ulozenie reklasifikacnych pravidiel pre II reklasifikaciu
def STL(a,b,c):
ctxt = self.cesta + "recl2\\" + "recl2_" + str(c) + ".txt"
file = open(ctxt, 'w+')
for j,k in zip(a, b):
file.writelines("%r = %r\n" % (j,k))
file.close()
# funkcia na citanie obsahu z reportov
def Report(self,F):
import csv
tf = open(F, "rb")
lines = tf.readlines()
lines1 = lines[4:(len(lines)-3)]
data = csv.reader(lines1, delimiter="|")
table = [row for row in data]
self.recl1 = [None]
self.P = [None]
for row in table:
a = row[1]
b = row[3]
if self.recl1 is [None]:
self.recl1 = [a]
else: self.recl1.append(a)
if self.P is [None]:
self.P = [b]
else: self.P.append(b)
del self.recl1[0]
del self.P[0]
self.recl1 = [int(i) for i in self.recl1]
self.P = [float(i) for i in self.P]
STL(self.recl1, self.P, faktor)
return (self.recl1,self.P)
f1 = "report_"
f2 = str(faktor)
f3 = ".txt"
f4 = "_z.txt"
Ft = self.cesta+"report\\"+f1+f2+f3
Ftz = self.cesta+"report\\"+f1+f2+f4
# plocha triedy
pt = Report(self, Ft)
Pt = pt[1]
recl1t = pt[0]
# plocha zosuvov v triede
ptz = Report(self, Ftz)
Ptz = ptz[1]
recl1tz = ptz[0]
# pocet tried parametrickej mapy
s =
|
66d96c42c8b814fcf38f8fd18215e6e29efbc810
|
Python
|
<|begin_of_text|>"""
Class: Stat232C
Project 3: Goal Inference
Name:Mingjia Yao
Date: May, 2020
"""
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.patches import Rectangle
import copy
import math
class ValueIteration(object):
def __init__(self, transitionTable, rewardTable, valueTable, convergenceTolerance, gamma):
self.transitionTable = transitionTable
self.rewardTable = rewardTable
self.valueTable = valueTable
self.convergenceTolerance = convergenceTolerance
self.gamma = gamma
def __call__(self):
ValueTable=self.valueTable
tempValueTable=copy.copy(ValueTable)
delta=1
while delta >= self.convergenceTolerance:
delta=0
for s in ValueTable:
v=ValueTable[s]
lib_s=self.transitionTable[s]
max_a =0
for a in lib_s:
sum_sr=0
lib_a=lib_s[a]
for ns in lib_a:
sum_sr=sum_sr+lib_a[ns]*(self.rewardTable[s][a][ns]+self.gamma*tempValueTable[ns])
max_a=max(max_a,sum_sr)
ValueTable[s]=max_a
delta=max(delta,abs(v-ValueTable[s]))
tempValueTable=copy.copy(ValueTable)
policyTable={}
for s in ValueTable:
lib_s=self.transitionTable[s]
pi_s=(0,0)
vs=0
prob_ns=0
for a in lib_s:
sum_sr=0
lib_a=lib_s[a]
for ns in lib_a:
sum_sr=sum_sr+lib_a[ns]*(self.rewardTable[s][a][ns]+self.gamma*ValueTable[ns])
if sum_sr>vs:
pi_s=a
vs=sum_sr
pi_ns=(0,0)
v_ns=0
for ns in lib_a:
if ValueTable[ns]>v_ns:
pi_ns=ns
v_ns=ValueTable[ns]
prob_ns=lib_a[ns]
policyTable[s]={pi_s:prob_ns}
return ([ValueTable, policyTable])
def visualizeValueTable(gridWidth, gridHeight, goalState, trapStates, valueTable):
gridAdjust =.5
gridScale = 1.5
xs = np.linspace(-gridAdjust, gridWidth-gridAdjust, gridWidth+1)
ys = np.linspace(-gridAdjust, gridHeight-gridAdjust, gridHeight+1)
plt.rcParams["figure.figsize"] = [gridWidth*gridScale,gridHeight*gridScale]
ax = plt.gca(frameon=False, xticks = range(gridWidth), yticks = range(gridHeight))
#goal and trap coloring
ax.add_patch(Rectangle((goalState[0]-gridAdjust, goalState[1]-gridAdjust), 1, 1, fill=True, color='green', alpha=.1))
for (trapx, trapy) in trapStates:
ax.add_patch(Rectangle((trapx-gridAdjust, trapy-gridAdjust), 1, 1, fill=True, color='black', alpha=.1))
# grid lines
for x in xs:
plt.plot([x, x], [ys[0], ys[-1]], color = "black")
for y in ys:
plt.plot([xs[0], xs[-1]], [y, y], color = "black")
#labeled values
for (statex, statey), val in valueTable.items():
plt.text(statex-.2, statey, str(round(val, 3)))
plt.show()
def visualizePolicy(gridWidth, gridHeight, goalState, trapStates, policy):
#grid height/width
gridAdjust =.5
gridScale = 1.5
arrowScale =.5
xs = np.linspace(-gridAdjust, gridWidth-gridAdjust, gridWidth+1)
ys = np.linspace(-gridAdjust, gridHeight-gridAdjust, gridHeight+1)
plt.rcParams["figure.figsize"] = [gridWidth*gridScale,gridHeight*gridScale]
ax = plt.gca(frameon=False, xticks = range(gridWidth), yticks = range(gridHeight))
#goal and trap coloring
ax.add_patch(Rectangle((goalState[0]-gridAdjust, goalState[1]-gridAdjust), 1, 1, fill=True, color='green', alpha=.1))
for (trapx, trapy) in trapStates:
ax.add_patch(Rectangle((trapx-gridAdjust, trapy-gridAdjust), 1, 1, fill=True, color='black', alpha=.1))
# grid lines
for x in xs:
plt.plot([x, x], [ys[0], ys[-1]], color = "black")
for y in ys:
plt.plot([xs[0], xs[-1]], [y, y], color = "black")
#labeled values
for (statex, statey), actionDict in policy.items():
for (optimalActionX, optimalActionY), actionProb in actionDict.items():
plt.arrow(statex, statey, optimalActionX*actionProb*arrowScale, optimalActionY*actionProb*arrowScale, head_width=0.05*actionProb, head_length=0.1*actionProb)
plt.show()
def viewDictionaryStructure(d, levels, indent=0):
for key, value in d.items():
print('\t' * indent + str(levels[indent]) + ": "+ str(key))
if isinstance(value, dict):
viewDictionaryStructure(value, levels, indent+1)
else:
print('\t' * (indent+1) + str(levels[indent+1])+ ": " + str(value))
def p_traj(traj,beta,gamma,transitTable,rewardTable,valueTable):
p=1
ret=[]
for i in range(len(traj)-1):
p=p*p_stn_st(traj[i],traj[i+1],beta,gamma,transitTable,rewardTable,valueTable)
ret=ret+[p]
return ret
def p_stn_st(st,stn,beta,gamma,transitTable,rewardTable,valueTable):
lib_s=transitTable[st]
p=0
for at in lib_s:
if stn in lib_s[at]:
p=p+lib_s[at][stn]*pias(st,at,beta,gamma,transitTable,rewardTable,valueTable)
return p
def pias(st,at,beta,gamma,transitTable,rewardTable,valueTable):
return math.log(beta*q(st,at,gamma,transitTable,rewardTable,valueTable))
def q(st,at,gamma,transitTable,rewardTable,valueTable):
lib_sa=transitTable[st][at]
q=0
for ns in lib_sa:
q=q+lib_sa[ns]*(rewardTable[st][at][ns]+gamma*valueTable[ns])
return q
def main():
gamma =.95
beta =.4
convergenceTolerance = 10e-7
transition = {(0, 0): {(1, 0): {(1, 0): 1},(0, 1): {(0, 1): 1},(-1, 0): {(0, 0): 1},(0, -1): {(0, 0): 1},(-1, 1): {(0, 0): 1},(1, -1): {(0, 0): 1},(1, 1): {(1, 1): 1},(-1, -1): {(0, 0): 1}},(0, 1): {(1, 0): {(1, 1): 1},(0, 1): {(0, 2): 1},(-1, 0): {(0, 1): 1},(0, -1): {(0, 0): 1},(-1, 1): {(0, 1): 1},(1, -1): {(1, 0): 1},(1, 1): {(1, 2): 1},(-1, -1): {(0, 1): 1}},(0, 2): {(1, 0): {(1, 2): 1},(0, 1): {(0, 3): 1},(-1, 0): {(0, 2): 1},(0, -1): {(0, 1): 1},(-1, 1): {(0, 2): 1},(1, -1): {(1, 1): 1},(1, 1): {(1, 3): 1},(-1, -1): {(0, 2): 1}},(0, 3): {(1, 0): {(1, 3): 1},(0, 1): {(0, 4): 1},(-1, 0): {(0, 3): 1},(0, -1): {(0, 2): 1},(-1, 1): {(0, 3): 1},(1, -1): {(1, 2): 1},(1, 1): {(1, 4): 1},(-1, -1): {(0, 3): 1}},(0, 4): {(1, 0): {(1, 4): 1},(0, 1): {(0, 5): 1},(-1, 0): {(0, 4): 1},(0, -1): {(0, 3): 1},(-1, 1): {(0, 4): 1},(1, -1): {(1, 3): 1},(1, 1): {(1, 5): 1},(-1, -1): {(0, 4): 1}},(0, 5): {(1, 0): {(1, 5): 1},(0, 1): {(0, 5): 1},(-1, 0): {(0, 5): 1},(0, -1): {(0, 4): 1},(-1, 1): {(0, 5): 1},(1, -1): {(1, 4): 1},(1, 1): {(0, 5): 1},(-1, -1): {(0, 5): 1}},(1, 0): {(1, 0): {(2, 0): 1},(0, 1): {(1, 1): 1},(-1, 0): {(0, 0): 1},(0, -1): {(1, 0): 1},(-1, 1): {(0, 1): 1},(1, -1): {(1, 0): 1},(1, 1): {(2, 1): 1},(-1, -1): {(1, 0): 1}},(1, 1): {(1, 0): {(2, 1): 1},(0, 1): {(1, 2): 1},(-1, 0): {(0, 1): 1},(0, -1): {(1, 0): 1},(-1, 1): {(0, 2): 1},(1, -1): {(2, 0): 1},(1, 1): {(2, 2): 1},(-1, -1): {(0, 0): 1}},(1, 2): {(1, 0): {(2, 2): 1},(0, 1): {(1, 3): 1},(-1, 0): {(0, 2): 1},(0, -1): {(1, 1): 1},(-1, 1): {(0, 3): 1},(1, -1): {(2, 1): 1},(1, 1): {(2, 3): 1},(-1, -1): {(0, 1): 1}},(1, 3): {(1, 0): {(2, 3): 1},(0, 1): {(1, 4): 1},(-1, 0): {(0, 3): 1},(0, -1): {(1, 2): 1},(-1, 1): {(0, 4): 1},(1, -1): {(2, 2): 1},(1, 1): {(2, 4): 1},(-1, -1): {(0, 2): 1}},(1, 4): {(1, 0): {(2, 4): 1},(0, 1): {(1, 5): 1},(-1, 0): {(0, 4): 1},(0, -1): {(1, 3): 1},(-1, 1): {(0, 5): 1},(1, -1): {(2, 3): 1},(1, 1): {(2, 5): 1},(-1, -1): {(0, 3): 1}},(1, 5): {(1, 0): {(2, 5): 1},(0, 1): {(1, 5): 1},(-1, 0): {(0, 5): 1},(0, -1): {(1, 4): 1},(-1, 1): {(1, 5): 1},(1, -1): {(2, 4): 1},(1, 1): {(1, 5): 1},(-1, -1): {(0, 4): 1}},(2, 0): {(1, 0): {(3, 0): 1},(0, 1): {(2, 1): 1},(-1, 0): {(1, 0): 1},(0, -1): {(2, 0): 1},(-1, 1): {(1, 1): 1},(1, -1): {(2, 0): 1},(1, 1): {(3, 1): 1},(-1, -1): {(2, 0): 1}},(2, 1): {(1, 0): {(3, 1): 1},(0, 1): {(2, 2): 1},(-1, 0): {(1, 1): 1},(0, -1): {(2, 0): 1},(-1, 1): {(1, 2): 1},(1, -1): {(3, 0): 1},(1, 1): {(3, 2): 1},(-1, -1): {(1, 0): 1}},(2, 2): {(1, 0): {(3, 2): 1},(0, 1): {(2, 3): 1},(-1, 0): {(1, 2): 1},(0, -1): {(2, 1): 1},(-1, 1): {(1, 3): 1},(1, -1): {(3, 1): 1},(1, 1): {(3, 3): 1},(-1, -1): {(1, 1): 1}},(2, 3): {(1, 0): {(3, 3): 1},(0, 1): {(2, 4): 1},(-1, 0): {(1, 3): 1},(0, -1): {(2, 2): 1},(-1, 1): {(1, 4): 1},(1, -1): {(3, 2): 1},(1, 1): {(3, 4): 1},(-1, -1): {(1, 2): 1}},(2, 4): {(1, 0): {(3, 4): 1},(0, 1): {(2, 5): 1},(-1, 0): {(1, 4): 1},(0, -1): {(2, 3): 1},(-1, 1): {(1, 5): 1},(1, -1): {(3, 3): 1},(1, 1): {(3, 5): 1},(-1, -1): {(1, 3): 1}},(2, 5): {(1, 0): {(3, 5): 1},(0, 1): {(2, 5): 1},(-1, 0): {(1, 5): 1},(0, -1): {(2, 4): 1},(-1, 1): {(2, 5): 1},(1, -1): {(3, 4): 1},(1, 1): {(2, 5): 1},(-1, -1): {(1, 4): 1}},(3, 0): {(1, 0): {(4, 0): 1},(0, 1): {(3, 1): 1},(-1, 0): {(2, 0): 1},(0, -1): {(3, 0): 1},(-1, 1): {(2, 1): 1},(1, -1): {(3, 0): 1},(1, 1): {(4, 1): 1},(-1, -1): {(3, 0): 1}},(3, 1): {(1, 0): {(4, 1): 1},(0, 1): {(3, 2): 1},(-1, 0): {(2, 1): 1},(0, -1): {(3, 0): 1},(-1, 1): {(2, 2): 1},(1, -1): {(4, 0): 1},(1, 1): {(4, 2): 1},(-1, -1): {(2, 0): 1}},(3, 2): {(1, 0): {(4, 2): 1},(0, 1): {(3, 3): 1},(-1, 0): {(2, 2): 1},(0, -1): {(3, 1): 1},(-1, 1): {(2, 3): 1},(1, -1): {(4, 1): 1},(1, 1): {(4, 3): 1},(-1, -1): {(2, 1): 1}},(3, 3): {(1, 0): {(4, 3): 1},(0, 1): {(3, 4): 1},(-1, 0): {(2, 3): 1},(0, -1): {(3, 2): 1},(-1, 1): {(2, 4): 1},(1, -1): {(4, 2): 1},(1, 1): {(4, 4): 1},(-1, -1): {(2, 2): 1}},(3, 4): {(1, 0): {(4, 4): 1},(0, 1): {(3, 5): 1},(-1, 0): {(2, 4): 1},(0, -1): {(3, 3): 1},(-1, 1): {(2, 5): 1},(1, -1): {(4, 3): 1},(1, 1): {(4, 5): 1},(-1, -1): {(2, 3): 1}},(3, 5): {(1, 0): {(4, 5): 1},(0, 1): {(3, 5): 1},(-1, 0): {(2, 5): 1},(0, -1): {(3, 4): 1},(-1, 1): {(3, 5): 1},(1, -1): {(4, 4): 1},(1, 1): {(3, 5): 1},(-1, -1): {(2, 4): 1}},(4, 0): {(1, 0): {(5, 0): 1},(0, 1): {(4, 1): 1},(-1, 0): {(3, 0): 1},(0, -1): {(4, 0): 1},(-1, 1): {(3, 1): 1},(1, -1): {(4, 0): 1},(1, 1): {(5, 1): 1},(-1, -1): {(4, 0): 1}},(4, 1): {(1, 0): {(5, 1): 1},(0, 1): {(4, 2): 1},(-1, 0): {(3, 1): 1},(0, -1): {(4, 0): 1},(-1, 1): {(3, 2): 1},(1, -1): {(5, 0): 1},(1, 1): {(5, 2): 1},(-1, -1): {(3, 0): 1}},(4, 2): {(1, 0): {(5, 2): 1},(0, 1): {(4, 3): 1},(-1, 0): {(3, 2): 1},(0, -1): {(4, 1): 1},(-1, 1): {(3, 3): 1},(1, -1): {(5, 1): 1},(1, 1): {(5, 3): 1},(-1, -1): {(3, 1): 1}},(4, 3): {(1, 0): {(5, 3): 1},(0, 1): {(4, 4): 1},(-1, 0): {(3, 3): 1},(0, -1): {(4, 2): 1},(-1, 1): {(3, 4): 1},(1, -1): {(5, 2): 1},(1, 1): {(5, 4): 1},(-1, -1): {(3, 2): 1}},(4, 4): {(1, 0): {(5, 4): 1},(0, 1): {(4, 5): 1},(-1, 0): {(3, 4): 1},(0, -1): {(4, 3): 1},(-1, 1): {(3, 5): 1},(1, -1): {(5, 3): 1},(1, 1): {(5, 5): 1},(-1, -1): {(3, 3): 1}},(4, 5): {(1, 0): {(5, 5): 1},(0, 1): {(4, 5): 1},(-1, 0): {(3, 5): 1},(0, -1): {(4, 4): 1},(-1, 1): {(4, 5): 1},(1, -1): {(5, 4): 1},(1, 1): {(4, 5): 1},(-1, -1): {(3, 4): 1}},(5, 0): {(1, 0): {(6, 0): 1},(0, 1): {(5, 1): 1},(-1, 0): {(4, 0): 1},(0, -1): {(5, 0): 1},(-1, 1): {(4, 1): 1},(1, -1): {(5, 0): 1},(1, 1): {(6, 1): 1},(-1, -1): {(5, 0): 1}},(5, 1): {(1, 0): {(6, 1): 1},(0, 1): {(5, 2): 1},(-1, 0): {(4, 1): 1},(0, -1): {(5, 0): 1},(-1, 1): {(4, 2): 1},(1, -1): {(6, 0): 1},(1, 1): {(6, 2): 1},(-1, -1): {(4, 0): 1}},(5, 2): {(1, 0): {(6, 2): 1},(0, 1): {(5, 3): 1},(-1, 0): {(4, 2): 1},(0, -1): {(5, 1): 1},(-1, 1): {(4, 3): 1},(1, -1): {(6, 1): 1},(1, 1): {(6, 3): 1},(-1, -1): {(4, 1): 1}},(5, 3): {(1, 0): {(6, 3): 1},(0, 1): {(5, 4): 1},(-1, 0): {(4, 3): 1},(0, -1): {(5, 2): 1},(-1, 1): {(4, 4): 1},(1, -1): {(6, 2): 1},(1, 1): {(6, 4): 1},(-1, -1): {(4, 2): 1}},(5, 4): {(1, 0): {(6, 4): 1},(0, 1): {(5, 5): 1},(-1, 0): {(4, 4): 1},(0, -1): {(5, 3): 1},(-1, 1): {(4, 5): 1},(1, -1): {(6, 3): 1},(1, 1): {(6, 5): 1},(-1, -1): {(4, 3): 1}},(5, 5): {(1, 0): {(6, 5): 1},(0, 1): {(5, 5): 1},(-1, 0): {(4, 5): 1},(0, -1): {(5, 4): 1},(-1, 1): {(5, 5): 1},(1, -1): {(6, 4): 1},(1, 1): {(5, 5): 1},(-1, -1): {(4, 4): 1}},(6, 0): {(1, 0): {(6, 0): 1},(0, 1): {(6, 1): 1},(-1, 0): {(5, 0): 1},(0, -1): {(6, 0): 1},(-1, 1): {(5, 1): 1},(1, -1): {(6, 0): 1},(1, 1): {(6, 0): 1},(-1, -1): {(6, 0): 1}},(6, 1): {(1, 0): {(6, 1): 1},(0, 1): {(6, 2): 1},(-1, 0): {(5, 1): 1},(0, -1): {(6, 0): 1},(-1, 1): {(5, 2): 1},(1, -1): {(6, 1): 1},(1, 1): {(6, 1): 1},(-1, -1): {(5, 0): 1}},(6, 2): {(1, 0): {(6, 2): 1},(0, 1): {(6, 3): 1},(-1, 0): {(5, 2): 1},(0, -1): {(6, 1): 1},(-1, 1): {(5, 3): 1},(1, -1): {(6, 2): 1},(1, 1): {(6, 2): 1},(-1, -1): {(5, 1): 1}},(6, 3): {(1, 0): {(6, 3): 1},(0, 1): {(6, 4): 1},(-1, 0): {(5, 3): 1},(0, -1): {(6, 2): 1},(-1, 1): {(5, 4): 1},(1, -1): {(6, 3): 1},(1, 1): {(6, 3): 1},(-1, -1): {(5, 2): 1}},(6, 4): {(1, 0): {(6, 4): 1},(0, 1): {(6, 5): 1},(-1, 0): {(5, 4): 1},(0, -1): {(6, 3): 1},(-1, 1): {(5, 5): 1},(1, -1): {(6, 4): 1},(1, 1): {(6, 4): 1},(-1, -1): {(5, 3): 1}},(6, 5): {(1, 0): {(6, 5): 1},(0, 1): {(6, 5): 1},(-1, 0): {(5, 5): 1},(0, -1): {(6, 4): 1},(-1, 1): {(6, 5): 1},(1, -1): {(6, 5): 1},(1, 1): {(6, 5): 1},(-1, -1): {(5, 4): 1}}}
valueTable = {(0, 0): 0,(0, 1): 0,(0, 2): 0,(0, 3): 0,(0, 4): 0,(0, 5): 0,(1, 0): 0,(1, 1): 0,(1, 2): 0,(1, 3): 0,(1, 4): 0,(1, 5): 0,(2, 0): 0,(2, 1): 0,(2, 2): 0,(2, 3): 0,(2, 4): 0,(2, 5): 0,(3, 0): 0,(3, 1): 0,(3, 2): 0,(3, 3): 0,(3, 4): 0,(3, 5): 0,(4, 0): 0,(4, 1): 0,(4, 2): 0,(4, 3): 0,(4, 4): 0,(4, 5): 0,(5, 0): 0,(5, 1): 0,(5, 2): 0,(5, 3): 0,(5, 4): 0,(5, 5): 0,(6, 0): 0,(6, 1): 0,(6, 2): 0,(6, 3): 0,(6, 4): 0,(6, 5): 0}
#Observed Trajectories
trajectoryToGoalA = [(0,0), (1,1), (1,2), (2,3), (3,4), (4,4), (5,4), (6,4)]
trajectoryToGoalB = [(0,0), (1,1), (2,2), (2,3), (3,4), (4,3), (5,2), (6,1)]
trajectoryToGoalC = [(0,0), (0,1), (1,2), (1,3), (1,4), (1,5)]
#Environment 1: Solid Barrier
rewardA = {(0, 0): {(1, 0): {(1, 0): -1.0},(0, 1): {(0, 1): -1.0},(-1, 0): {(0, 0): -1},(0, -1): {(0, 0): -1},(-1, 1): {(0, 0): -1},(1, -1): {(0, 0): -1},(1, 1): {(1, 1): -1.4142135623730951},(-1, -1): {(0, 0): -1}},(0, 1): {(1, 0): {(1, 1): -1.0},(0, 1): {(0, 2): -1.0},(-1, 0): {(0, 1): -1},(0, -1): {(0, 0): -1.0},(-1, 1): {(0, 1): -1},(1, -1): {(1, 0): -1.4142135623730951},(1, 1): {(1, 2): -1.4142135623730951},(-1, -1): {(0, 1): -1}},(0, 2): {(1, 0): {(1, 2): -1.0},(0, 1): {(0, 3): -1.0},(-1, 0): {(0, 2): -1},(0, -1): {(0, 1): -1.0},(-1, 1): {(0, 2): -1},(1, -1): {(1, 1): -1.4142135623730951},(1, 1): {(1, 3): -1.4142135623730951},(-1, -1): {(0, 2): -1}},(0, 3): {(1, 0): {(1, 3): -1.0},(0, 1): {(0, 4): -1.0},(-1, 0): {(0, 3): -1},(0, -1): {(0, 2): -1.0},(-1, 1): {(0, 3): -1},(1, -1): {(1, 2): -1.4142135623730951},(1, 1): {(1, 4): -1.4142135623730951},(-1, -1): {(0, 3): -1}},(0, 4): {(1, 0): {(1, 4): -1.0},(0, 1): {(0, 5): -1.0},(-1, 0): {(0, 4): -1},(0, -1): {(0, 3): -1.0},(-1, 1): {(0, 4): -1},(1, -1): {(1, 3): -1.4142135623730951},(1, 1): {(1, 5): -1.4142135623730951},(-1, -1): {(0, 4): -1}},(0, 5): {(1, 0): {(1, 5): -1.0},(0, 1): {(0, 5): -1},(-1, 0): {(0, 5): -1},(0, -1): {(0, 4): -1.0},(-1, 1): {(0, 5): -1},(1, -1): {(1, 4): -1.4142135623730951},(1, 1): {(0, 5): -1},(-1, -1): {(0, 5): -1}},(1, 0): {(1, 0): {(2, 0): -1.0},(0, 1): {(1, 1): -1.0},(-1, 0): {(0, 0): -1.0},(0, -1): {(1, 0): -1},(-1, 1): {(0, 1): -1.4142135623730951},(1, -1): {(1, 0): -1},(1, 1): {(2, 1): -1.4142135623730951},(-1, -1): {(1, 0): -1}},(1, 1): {(1, 0): {(2, 1): -1.0},(0, 1): {(1, 2): -1.0},(-1, 0): {(0, 1): -1.0},(0, -1): {(1, 0): -1.0},(-1, 1): {(0, 2): -1.4142135623730951},(1, -1): {(2, 0): -1.4142135623730951},(1, 1): {(2, 2): -1.4142135623730951},(-1, -1): {(0, 0): -1.4142135623730951}},(1, 2): {(1, 0): {(2, 2): -1.0},(0, 1): {(1, 3): -1.0},(-1, 0): {(0, 2): -1.0},(0, -1): {(1, 1): -1.0},(-1, 1): {(0, 3): -1.4142135623730951},(1, -1): {(2, 1): -1.4142135623730951},(1, 1): {(2, 3): -1.4142135623730951},(-1, -1): {(0, 1): -1.4142135623730951}},(1, 3): {(1, 0): {(2, 3): -1.0},(0, 1): {(1, 4): -1.0},(-1, 0): {(0, 3): -1.0},(0, -1): {(1, 2): -1.0},(-1, 1): {(0, 4): -1.4142135623730951},(1, -1): {(2, 2): -1.4142135623730951},(1, 1): {(2, 4): -1.4142135623730951},(-1, -1): {(0, 2): -1.4142135623730951}},(1, 4): {(1, 0): {(2, 4): -1.0},(0, 1): {(1, 5): -1.0},(-1, 0): {(0, 4): -1.0},(0, -1): {(1, 3): -1.0},(-1, 1): {(0, 5): -1.4142135623730951},(1, -1): {(2, 3): -1.4142135623730951},(1, 1): {(2, 5): -1.4142135623730951},(-1, -1): {(0, 3): -1.4142135623730951}},(1, 5): {(1, 0): {(2, 5): -1.0},(0, 1): {(1, 5): -1},(-1, 0): {(0, 5): -1.0},(0, -1): {(1, 4): -1.0},(-1, 1): {(1, 5): -1},(1, -1): {(2, 4): -1.4142135623730951},(1, 1): {(1, 5): -1},(-1, -1): {(0, 4): -1.4142135623730951}},(2, 0): {(1, 0): {(3, 0): -1.0},(0, 1): {(2, 1): -1.0},(-1, 0): {(1, 0): -1.0},(0, -1): {(2, 0): -1},(-1, 1): {(1, 1): -1.4142135623730951},(1, -1): {(2, 0): -1},(1, 1): {(3, 1): -1.4142135623730951},(-1, -1): {(2, 0): -1}},(2, 1): {(1, 0): {(3, 1): -1.0},(0, 1): {(2, 2): -1.0},(-1, 0): {(1, 1): -1.0},(0, -1): {(2, 0): -1.0},(-1, 1): {(1, 2): -1.4142135623730951},(1, -1): {(3, 0): -1.4142135623730951},(1, 1): {(3, 2): -1.4142135623730951},(-1, -1): {(1, 0): -1.4142135623730951}},(2, 2): {(1, 0): {(3, 2): -1.0},(0, 1): {(2, 3): -1.0},(-1, 0): {(1, 2): -1.0},(0, -1): {(2, 1): -1.0},(-1, 1): {(1, 3): -1.4142135623730951},(1, -1): {(3, 1): -1.4142135623730951},(1, 1): {(3, 3): -1.4142135623730951},(-1, -1): {(1, 1): -1.4142135623730951}},(2, 3): {(1, 0): {(3, 3): -1.0},(0, 1): {(2, 4): -1.0},(-1, 0): {(1, 3): -1.0},(0, -1): {(2, 2): -1.0},(-1, 1): {(1, 4): -1.4142135623730951},(1, -1): {(3, 2): -1.4142135623730951},(1, 1): {(3, 4): -1.4142135623730951},(-1, -1): {(1, 2): -1.4142135623730951}},(2, 4): {(1, 0): {(3, 4): -1.0},(0, 1): {(2, 5): -1.0},(-1, 0): {(1, 4): -1.0},(0, -1): {(2, 3): -1.0},(-1, 1): {(1, 5): -1.4142135623730951},(1, -1): {(3, 3): -1.4142135623730951},(1, 1): {(3, 5): -1.4142135623730951},(-1, -1): {(1, 3): -1.4142135623730951}},(2, 5): {(1, 0): {(3, 5): -1.0},(0, 1): {(2, 5): -1},(-1, 0): {(1, 5): -1
|
c4c2912c76dea40a7fe36e070c679142bc0f0425
|
Python
|
<|begin_of_text|># coding: utf-8
import gc
import matplotlib.pyplot as plt
import seaborn as sns
import numpy as np
import pandas as pd
import time
from datetime import datetime, timedelta
from collections import deque, defaultdict
from itertools import islice
from tqdm import tqdm as tqdm
from contextlib import contextmanager
@contextmanager
def faith(title):
start_time = time.time()
yield
print(">> {} - done in {:.0f}s".format(title, time.time() - start_time))
START_DATE = datetime.strptime('2017-12-01', '%Y-%m-%d') #or 30th Nov?
#START_DATE = datetime.strptime('2017-11-01', '%Y-%m-%d')
periods = ['7d', '14d']
min_instances = 1000
aggr_cols = [
'addr1','card1','card2','card3','card4','card5','card6','ProductCD',
'pemail_domain','remail_domain','pemail_ext','remail_ext',
]
country_map = {
'com':'us', 'net':'us', 'edu':'us', 'gmail':'us','mx':'mx', 'es':'es', 'de':'de', 'fr':'fr','uk':'uk', 'jp':'jp'
}
domain = lambda x: x.split('.')[0]
pemail_country = lambda x: x.split('.')[-1]
USER_AGENTS = [
'Intel', 'Windows NT 6.1', 'Windows NT 6.2', 'Microsoft', 'Trident/7.0',
'Touch', 'S.N.O.W.4', 'BOIE9', 'rv:11.0', 'rv:48.0', 'rv:52.0', 'rv:56.0',
'rv:57.0', 'rv:58.0', 'rv:59.0', 'rv:60.0', 'rv:61.0', 'rv:62.0', 'rv:63.0',
'rv:64.0', 'rv:38.0', 'rv:51.0', 'rv:45.0', 'rv:42.0', 'rv:49.0', 'en-us',
'rv:41.0', 'rv:54.0', 'rv:47.0', 'rv:55.0', 'rv:31.0', 'rv:44.0', 'rv:53.0',
'rv:39.0', 'rv:35.0', 'rv:50.0', 'rv:37.0', 'rv:52.9', 'rv:46.0', 'rv:43.0',
'rv:29.0', 'rv:14.0', 'rv:33.0', 'rv:21.0', 'rv:27.0', 'rv:65.0', 'rv:28.0',
'rv:60.1.0', 'es-us', 'es-es', 'es-mx', 'en-gb', 'Linux', 'MDDRJS',
'Android 5.1', 'Android 4.4.2', 'Android 6.0.1', 'Android 6.0', 'Android 7.0',
'Android', 'Android 8.0.0', 'Android 7.1.2', 'WOW64', 'ATT-IE11', 'MAMI', 'MALC',
'hp2015', 'Northwell', 'xs-Z47b7VqTMxs', 'QwestIE8', 'ATT', 'NetHelper70',
'FunWebProducts', 'Lifesize', 'CPU'
]
CAT_FCOLS = ['card2', 'card3', 'card5', 'addr1', 'addr2', 'dist1', 'dist2']
C_FCOLS = [f'C{i}' for i in range(1, 15)]
D_FCOLS = [f'D{i}' for i in range(1, 16)]
V_FCOLS = [f'V{i}' for i in range(1, 340)]
FLOAT64_TCOLS = CAT_FCOLS + C_FCOLS + D_FCOLS + V_FCOLS
FLOAT64_ICOLS = [f'id_0{i}' for i in range(1, 10)] + ['id_10', 'id_11', 'id_13', 'id_14', 'id_17', 'id_18', 'id_19', 'id_20', 'id_21', 'id_22', 'id_24', 'id_25', 'id_26', 'id_32']
id_30_dates = {
'Android 4.4.2':'2012-11-13','Android 5.0':'2014-11-12','Android 5.0.2':'2014-12-19',
'Android 5.1.1':'2015-04-21','Android 6.0':'2015-10-05','Android 6.0.1':'2015-12-07',
'Android 7.0':'2016-08-22','Android 7.1.1':'2016-12-05','Android 7.1.2':'2017-04-04',
'Android 8.0.0':'2017-08-21','Android 8.1.0':'2017-12-05','Android 9':'2018-08-06',
#Windows
'Windows XP':'2001-10-25','Windows Vista':'2006-11-08','Windows 7':'2009-10-22',
'Windows 8':'2012-10-26','Windows 8.1':'2013-10-17','Windows 10':'2015-07-29',
#MacOS
'Mac OS X 10.6': '2009-08-28','Mac OS X 10_6_8': '2011-06-23','Mac OS X 10_7_5': '2012-09-19',
'Mac OS X 10_8_5': '2013-09-12','Mac OS X 10.9': '2013-10-22','Mac OS X 10_9_5': '2014-09-17',
'Mac OS X 10.10': '2014-10-16','Mac OS X 10_10_5': '2015-08-13','Mac OS X 10.11': '2015-09-30',
'Mac OS X 10_11_3': '2016-01-19','Mac OS X 10_11_4': '2016-03-20','Mac OS X 10_11_5': '2016-05-16',
'Mac OS X 10_11_6': '2016-07-18','Mac OS X 10.12': '2016-09-20','Mac OS X 10_12': '2016-09-20',
'Mac OS X 10_12_1': '2016-10-24','Mac OS X 10_12_2': '2016-12-13','Mac OS X 10_12_3': '2017-01-23',
'Mac OS X 10_12_4': '2017-03-27','Mac OS X 10_12_5': '2017-05-15','Mac OS X 10_12_6': '2017-07-19',
'Mac OS X 10.13': '2017-09-25','Mac OS X 10_13_1': '2017-10-31','Mac OS X 10_13_2': '2017-12-06',
'Mac OS X 10_13_3': '2018-01-23','Mac OS X 10_13_4': '2018-03-29','Mac OS X 10_13_5': '2018-06-01',
'Mac OS X 10_13_6': '2018-07-09','Mac OS X 10.14': '2018-09-24','Mac OS X 10_14': '2018-09-24',
'Mac OS X 10_14_0': '2018-09-24','Mac OS X 10_14_1': '2018-10-30','Mac OS X 10_14_2': '2018-12-05',
#iOS
'iOS 9.3.5':'2016-08-25','iOS 10.0.2':'2016-09-23','iOS 10.1.1':'2016-10-31','iOS 10.2.0':'2016-12-12',
'iOS 10.2.1':'2017-01-23','iOS 10.3.1':'2017-04-03','iOS 10.3.2':'2017-05-15','iOS 10.3.3':'2017-07-19',
'iOS 11.0.0':'2017-08-19','iOS 11.0.1':'2017-08-26','iOS 11.0.2':'2017-10-03','iOS 11.0.3':'2017-10-11',
'iOS 11.1.0':'2017-10-31','iOS 11.1.1':'2017-11-08','iOS 11.1.2':'2017-11-16','iOS 11.2.0':'2017-12-02',
'iOS 11.2.1':'2017-12-13','iOS 11.2.2':'2018-01-08','iOS 11.2.5':'2018-01-23','iOS 11.2.6':'2018-02-19',
'iOS 11.3.0':'2018-03-29','iOS 11.3.1':'2018-04-24','iOS 11.4.0':'2018-05-29','iOS 11.4.1':'2018-07-09',
'iOS 12.0.0':'2018-08-17','iOS 12.0.1':'2018-09-08','iOS 12.1.0':'2018-09-30','iOS 12.1.1':'2018-12-05',
'iOS 12.1.2':'2018-12-20',
}
id_30_dates = {k.lower():v for k,v in id_30_dates.items()}
with faith('1. Loading Data Hold On....') as f:
df_train_identity = pd.read_csv('../input/train_identity.csv', dtype=dict.fromkeys(FLOAT64_ICOLS, np.float32),)
df_test_identity = pd.read_csv('../input/test_identity.csv', dtype=dict.fromkeys(FLOAT64_ICOLS, np.float32),)
df_train_transaction = pd.read_csv('../input/train_transaction.csv', dtype=dict.fromkeys(FLOAT64_TCOLS, np.float32),)
df_test_transaction = pd.read_csv('../input/test_transaction.csv', dtype=dict.fromkeys(FLOAT64_TCOLS, np.float32),)
X_train = pd.merge(df_train_transaction, df_train_identity, how='left', on='TransactionID')
X_test = pd.merge(df_test_transaction, df_test_identity, how='left', on='TransactionID')
org_cols = X_train.columns.tolist()
print('Number of Training Examples = {}'.format(df_train_transaction.shape[0]))
print('Number of Test Examples = {}\\n'.format(df_test_transaction.shape[0]))
print('Number of Training Examples with Identity = {}'.format(df_train_identity.shape[0]))
print('Number of Test Examples with Identity = {}\\n'.format(df_test_identity.shape[0]))
print('Training X Shape = {}'.format(X_train.shape))
print('Training y Shape = {}'.format(X_train['isFraud'].shape))
print('Test X Shape = {}\\n'.format(X_test.shape))
print('Training Set Memory Usage = {:.2f} MB'.format(X_train.memory_usage().sum() / 1024**2))
print('Test Set Memory Usage = {:.2f} MB\\n'.format(X_test.memory_usage().sum() / 1024**2))
del df_train_identity, df_test_identity, df_train_transaction, df_test_transaction
with faith('2. Adding simple time feats like minute hour etc will be dropped later for sure') as f:
for df in tqdm([X_train, X_test]):
# TransactionDT converted to a timestamp
df['TransactionDate'] = (df['TransactionDT'] - 86400).apply(lambda x: (START_DATE + timedelta(seconds=x)))
# Time features for aggregation and grouping
df['Minute'] = df['TransactionDate'].dt.minute.values
df['Hour'] = df['TransactionDate'].dt.hour.values
df['Day'] = df['TransactionDate'].dt.day.values
df['DayOfWeek'] = df['TransactionDate'].dt.dayofweek.values
df['DayOfYear'] = df['TransactionDate'].dt.dayofyear.values
df['Week'] = df['TransactionDate'].dt.week.values
df['Month'] = df['TransactionDate'].dt.month.values
# D9 is Hour divided by 24, so this will fill the NaNs of D9
df['D9'] = df['Hour'] / 24
with faith('3. Fixing id_30 and DeviceInfo and inferring more vals for other cols etc...') as f:
for df in tqdm([X_train, X_test]):
########## DeviceInfo ##########
# Finding DeviceInfo from id_31
df.loc[df.query('DeviceInfo.isnull() and id_31.str.contains("mobile safari")', engine='python').index, 'DeviceInfo'] = 'iOS Device'
df.loc[df.query('DeviceInfo.isnull() and id_31.str.contains("for ios")', engine='python').index, 'DeviceInfo'] = 'iOS Device'
df.loc[df.query('DeviceInfo.isnull() and id_31.str.startswith("google search application")', engine='python').index, 'DeviceInfo'] = 'iOS Device'
df.loc[df.query('DeviceInfo.isnull() and id_31 == "google"', engine='python').index, 'DeviceInfo'] = 'iOS Device'
df.loc[df.query('DeviceInfo.isnull() and id_31 == "safari"', engine='python').index, 'DeviceInfo'] = 'MacOS'
df.loc[df.query('DeviceInfo.isnull() and id_31 == "safari generic"', engine='python').index, 'DeviceInfo'] = 'MacOS'
df.loc[df.query('DeviceInfo.isnull() and id_31 == "safari 9.0"', engine='python').index, 'DeviceInfo'] = 'MacOS'
df.loc[df.query('DeviceInfo.isnull() and id_31 == "safari 10.0"', engine='python').index, 'DeviceInfo'] = 'MacOS'
df.loc[df.query('DeviceInfo.isnull() and id_31 == "safari 11.0"', engine='python').index, 'DeviceInfo'] = 'MacOS'
df.loc[df.query('DeviceInfo.isnull() and id_31 == "safari 12.0"', engine='python').index, 'DeviceInfo'] = 'MacOS'
########## DeviceType ##########
# Finding DeviceType from DeviceInfo
df.loc[df.query('DeviceType.isnull() and id_31 == "ie 11.0 for desktop"', engine='python').index, 'DeviceType'] = 'desktop'
df.loc[df.query('DeviceType.isnull() and id_31 == "chrome 65.0"', engine='python').index, 'DeviceType'] = 'desktop'
df.loc[df.query('DeviceType.isnull() and id_31 == "ie 11.0 for tablet"', engine='python').index, 'DeviceType'] = 'desktop'
# Finding DeviceType from id_31
df.loc[df.query('DeviceType.isnull() and ~DeviceInfo.isnull()', engine='python').index, 'DeviceType'] = 'desktop'
########## id_30 ##########
# Finding id_30 from DeviceInfo parsing errors
df.loc[df.query('id_30.isnull() and DeviceInfo == "Linux x86_64"', engine='python').index, 'id_30'] = 'Linux'
df.loc[df.query('id_30.isnull() and DeviceInfo == "Linux i686"', engine='python').index, 'id_30'] = 'Linux'
df.loc[df.query('id_30.isnull() and DeviceInfo == "BOIE9"', engine='python').index, 'id_30'] = 'Windows 7'
df.loc[df.query('id_30.isnull() and DeviceInfo == "MDDRJS"', engine='python').index, 'id_30'] = 'Windows 7'
df.loc[df.query('id_30.isnull() and DeviceInfo == "Windows NT 6.1"', engine='python').index, 'id_30'] = 'Windows 7'
df.loc[df.query('id_30.isnull() and DeviceInfo == "Windows NT 6.2"', engine='python').index, 'id_30'] = 'Windows 8'
df.loc[df.query('id_30.isnull() and DeviceInfo == "Microsoft"', engine='python').index, 'id_30'] = 'Windows 10'
df.loc[df.query('id_30.isnull() and DeviceInfo == "Windows" and id_31.str.startswith("edge")', engine='python').index, 'id_30'] = 'Windows 10'
df.loc[df.query('id_30.isnull() and DeviceInfo == "Android 5.1"', engine='python').index, 'id_30'] = 'Android 5.1'
df.loc[df.query('id_30.isnull() and DeviceInfo == "Android 4.4.2"', engine='python').index, 'id_30'] = 'Android 4.4.2'
df.loc[df.query('id_30.isnull() and DeviceInfo == "Android 5.1.1"', engine='python').index, 'id_30'] = 'Android 5.1.1'
df.loc[df.query('id_30.isnull() and DeviceInfo == "Android 6.0.1"', engine='python').index, 'id_30'] = 'Android 6.0.1'
df.loc[df.query('id_30.isnull() and DeviceInfo == "Android 6.0"', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo == "Android 7.0"', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo == "Android"', engine='python').index, 'id_30'] = 'Android'
df.loc[df.query('id_30.isnull() and DeviceInfo == "Android 7.1.2"', engine='python').index, 'id_30'] = 'Android 7.1.2'
df.loc[df.query('id_30.isnull() and DeviceInfo == "Android 8.0.0"', engine='python').index, 'id_30'] = 'Android 8.0.0'
# Finding id_30 from id_31 parsing errors
df.loc[df.query('id_30.isnull() and id_31 == "Generic/Android 7.0"', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and id_31.str.startswith("edge")', engine='python').index, 'id_30'] = 'Windows 10'
# Finding id_30 from Android Build Numbers
# Android devices without Build Numbers are labeled as Android
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("Build/Huawei")', engine='python').index, 'id_30'] = 'Android'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("Build/HUAWEI")', engine='python').index, 'id_30'] = 'Android'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("Build/S100")', engine='python').index, 'id_30'] = 'Android'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("Build/Vision")', engine='python').index, 'id_30'] = 'Android'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("Build/HONOR")', engine='python').index, 'id_30'] = 'Android'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("Build/Lenovo")', engine='python').index, 'id_30'] = 'Android'
# Android devices with Build Numbers are mapped with their correct id_30 values
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("IML74K")', engine='python').index, 'id_30'] = 'Android 4.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("JZO54K")', engine='python').index, 'id_30'] = 'Android 4.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("6.2.A.1.100")', engine='python').index, 'id_30'] = 'Android 4.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("9.8.2I-50_SML-25")', engine='python').index, 'id_30'] = 'Android 4.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("JDQ39")', engine='python').index, 'id_30'] = 'Android 4.2'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("JLS36C")', engine='python').index, 'id_30'] = 'Android 4.3'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("KTU84M")', engine='python').index, 'id_30'] = 'Android 4.4'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("KTU84P")', engine='python').index, 'id_30'] = 'Android 4.4'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("KOT49H")', engine='python').index, 'id_30'] = 'Android 4.4'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("KOT49I")', engine='python').index, 'id_30'] = 'Android 4.4'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("KVT49L")', engine='python').index, 'id_30'] = 'Android 4.4'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("KXB21.14-L1.40")', engine='python').index, 'id_30'] = 'Android 4.4'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("KXB20.9-1.10-1.24-1.1")', engine='python').index, 'id_30'] = 'Android 4.4'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("KXC21.5-40")', engine='python').index, 'id_30'] = 'Android 4.4'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("24.0.A.5.14")', engine='python').index, 'id_30'] = 'Android 4.4'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("SU6-7.7")', engine='python').index, 'id_30'] = 'Android 4.4'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("26.1.A.3.111")', engine='python').index, 'id_30'] = 'Android 5.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("27.1.A.1.81")', engine='python').index, 'id_30'] = 'Android 5.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("LRX21R")', engine='python').index, 'id_30'] = 'Android 5.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("LRX22C")', engine='python').index, 'id_30'] = 'Android 5.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("LRX21V")', engine='python').index, 'id_30'] = 'Android 5.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("LRX21M")', engine='python').index, 'id_30'] = 'Android 5.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("LRX21Y")', engine='python').index, 'id_30'] = 'Android 5.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("LRX21T")', engine='python').index, 'id_30'] = 'Android 5.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("LRX22G")', engine='python').index, 'id_30'] = 'Android 5.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("LPBS23.13-57-2")', engine='python').index, 'id_30'] = 'Android 5.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("LPCS23.13-56-5")', engine='python').index, 'id_30'] = 'Android 5.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("LPBS23.13-56-2")', engine='python').index, 'id_30'] = 'Android 5.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("LPAS23.12-21.7-1")', engine='python').index, 'id_30'] = 'Android 5.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("LPCS23.13-34.8-3")', engine='python').index, 'id_30'] = 'Android 5.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("E050L")', engine='python').index, 'id_30'] = 'Android 5.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("L050U")', engine='python').index, 'id_30'] = 'Android 5.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("LMY48B")', engine='python').index, 'id_30'] = 'Android 5.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("LMY47D")', engine='python').index, 'id_30'] = 'Android 5.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("LMY47I")', engine='python').index, 'id_30'] = 'Android 5.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("LMY47V")', engine='python').index, 'id_30'] = 'Android 5.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("LVY48F")', engine='python').index, 'id_30'] = 'Android 5.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("LMY47O")', engine='python').index, 'id_30'] = 'Android 5.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("LMY47X")', engine='python').index, 'id_30'] = 'Android 5.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("10.7.A.0.222")', engine='python').index, 'id_30'] = 'Android 5.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("14.6.A.0.368")', engine='python').index, 'id_30'] = 'Android 5.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("14.6.A.1.236")', engine='python').index, 'id_30'] = 'Android 5.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("18.6.A.0.182")', engine='python').index, 'id_30'] = 'Android 5.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("19.4.A.0.182")', engine='python').index, 'id_30'] = 'Android 5.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("29.1.A.0.101")', engine='python').index, 'id_30'] = 'Android 5.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("MPIS24.241-15.3-7")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("MPDS24.65-33-1-30")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("26.3.A.1.33")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("27.3.A.0.165")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("27.3.A.0.129")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("27.3.A.0.173")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("29.2.A.0.166")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("36.0.A.2.146")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("33.2.A.3.81")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("33.2.A.4.70")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("23.5.A.1.291")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("37.0.A.2.108")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("37.0.A.2.248")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("30.2.A.1.21")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("35.0.D.2.25")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("M4B30Z")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("MPB24.65-34-3")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("MPI24.65-25")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("MPDS24.107-52-11")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("MPIS24.65-33.1-2-10")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("MPIS24.65-33.1-2-16")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("MPIS24.65-25.1-19")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("MPIS24.107-55-2-17")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("MPIS24.241-2.35-1-17")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("MPIS24.241-15.3-26")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("MPI24.65-33.1-2")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("MCG24.251-5-5")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("MPB24.65-34")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("MPD24.107-52")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("MPD24.65-25")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("MPDS24.107-52-5")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("MPDS24.65-33-1-3")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("IEXCNFN5902303111S")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("MPD24.65-33")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("MHC19Q")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("MMB28B")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("MOB30M")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("MMB29K")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("MRA58K")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("MMB29M")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("MMB29T")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("MXB48T")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NRD90M")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NRD90N")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NRD90U")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("33.3.A.1.97")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("33.3.A.1.115")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("34.2.A.2.47")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("36.1.A.1.86")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("40.0.A.6.175")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("40.0.A.6.135")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("40.0.A.6.189")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("42.0.A.4.101")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("42.0.A.4.167")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("43.0.A.5.79")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("43.0.A.7.25")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("43.0.A.7.70")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("43.0.A.7.55")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("HONORBLN-L24")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("HONORDLI-L22")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NPHS25.200-15-8")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NPHS25.200-23-1")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NPNS25.137-92-4")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NPNS25.137-92-8")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NPNS25.137-15-11")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NPNS25.137-93-14")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NPP25.137-82")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NPN25.137-72")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NPPS25.137-15-11")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NPP25.137-33")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NPP25.137-72")', engine='python').index,
|
0d21598b4f513100ee33aa9f2b86daf80a4ec2a3
|
Python
|
<|begin_of_text|># -*- coding: utf8 -*-
# python
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
bl_info = {
'name': 'Texture Paint plus',
'author': 'Bart Crouch, scorpion81, Spirou4D, CDMJ',
'version': (2, 20),
'blender': (2, 73, 0),
'location': 'Paint editor > 3D view',
'warning': '',
'description': 'Several improvements for Texture Paint mode',
'wiki_url': '',
'tracker_url': '',
'category': 'Paint'}
import bgl
import blf
import bpy
import mathutils
import os
import time
import copy
import math
from bpy_extras.io_utils import ImportHelper
from bpy.types import Operator, Menu, Panel, UIList
#################################################
# #
# Functions for #
# '''Sync selection from uv-editor to 3d-view'''#
#################################################
# draw in 3d-view =>OK!
def draw_callback(self, context):
r, g, b = context.tool_settings.image_paint.brush.cursor_color_add
#x0, y0, x1, y1 = context.window_manager["straight_line"]
start = self.stroke[0]
end = self.stroke[-1]
x0 = start["mouse"][0]
y0 = start["mouse"][1]
x1 = end["mouse"][0]
y1 = end["mouse"][1]
# draw straight line
bgl.glEnable(bgl.GL_BLEND)
bgl.glColor4f(r, g, b, 1.0)
bgl.glBegin(bgl.GL_LINE_STRIP)
bgl.glVertex2i(x0, y0)
bgl.glVertex2i(x1, y1)
bgl.glEnd()
# restore opengl defaults
bgl.glDisable(bgl.GL_BLEND)
bgl.glColor4f(0.0, 0.0, 0.0, 1.0)
# return a list of all images that are being displayed in an editor =>OK!
def get_images_in_editors(context):
images = []
for area in context.screen.areas:
if area.type!= 'IMAGE_EDITOR':
continue
for space in area.spaces:
if space.type!= 'IMAGE_EDITOR':
continue
if space.image:
images.append(space.image)
area.tag_redraw()
return(images)
# calculate for 3d-view =>OK!
def sync_calc_callback(self, context, area, region):
mid_x = region.width/2.0
mid_y = region.height/2.0
width = region.width
height = region.height
region_3d = False
for space in area.spaces:
if space.type == 'VIEW_3D':
region_3d = space.region_3d
if not region_3d:
return
view_mat = region_3d.perspective_matrix
ob_mat = context.active_object.matrix_world
total_mat = view_mat * ob_mat
mesh = context.active_object.data
def transform_loc(loc):
vec = total_mat * loc
vec = mathutils.Vector([vec[0]/vec[3], vec[1]/vec[3], vec[2]/vec[3]])
x = int(mid_x + vec[0]*width/2.0)
y = int(mid_y + vec[1]*height/2.0)
return([x, y])
# vertices
locs = [mesh.vertices[v].co.to_4d() for v in self.overlay_vertices]
self.position_vertices = []
for loc in locs:
self.position_vertices.append(transform_loc(loc))
# edges
locs = [[mesh.vertices[mesh.edges[edge].vertices[0]].co.to_4d(),
mesh.vertices[mesh.edges[edge].vertices[1]].co.to_4d()] \
for edge in self.overlay_edges]
self.position_edges = []
for v1, v2 in locs:
self.position_edges.append(transform_loc(v1))
self.position_edges.append(transform_loc(v2))
# faces
locs = [[mesh.vertices[mesh.faces[face].vertices[0]].co.to_4d(),
mesh.vertices[mesh.faces[face].vertices[1]].co.to_4d(),
mesh.vertices[mesh.faces[face].vertices[2]].co.to_4d(),
mesh.vertices[mesh.faces[face].vertices[3]].co.to_4d(),] \
for face in self.overlay_faces]
self.position_faces = []
for v1, v2, v3, v4 in locs:
self.position_faces.append(transform_loc(v1))
self.position_faces.append(transform_loc(v2))
self.position_faces.append(transform_loc(v3))
self.position_faces.append(transform_loc(v4))
# draw in 3d-view =>OK!
def sync_draw_callback(self, context):
# polling
if context.mode!= "EDIT_MESH":
return
# draw vertices
bgl.glColor4f(1.0, 0.0, 0.0, 1.0)
bgl.glPointSize(4)
bgl.glBegin(bgl.GL_POINTS)
for x, y in self.position_vertices:
bgl.glVertex2i(x, y)
bgl.glEnd()
# draw edges
bgl.glColor4f(1.0, 0.0, 0.0, 1.0)
bgl.glLineWidth(1.5)
bgl.glBegin(bgl.GL_LINES)
for x, y in self.position_edges:
bgl.glVertex2i(x, y)
bgl.glEnd()
bgl.glLineWidth(1)
# draw faces
bgl.glEnable(bgl.GL_BLEND)
bgl.glColor4f(1.0, 0.0, 0.0, 0.3)
bgl.glBegin(bgl.GL_QUADS)
for x, y in self.position_faces:
bgl.glVertex2i(x, y)
bgl.glEnd()
bgl.glDisable(bgl.GL_BLEND)
# draw in image-editor =>OK!
def sync_draw_callback2(self, context):
# polling
if context.mode!= "EDIT_MESH":
return
# draw vertices
bgl.glColor4f(1.0, 0.0, 0.0, 1.0)
bgl.glPointSize(6)
bgl.glBegin(bgl.GL_POINTS)
for x, y in self.position2_vertices:
bgl.glVertex2f(x, y)
bgl.glEnd()
# draw paint tool and blendmode in 3d-view =>?
def toolmode_draw_callback(self, context):
# polling
if context.mode!= 'PAINT_TEXTURE':
return
# draw
if context.region:
main_y = context.region.height - 32
else:
return
blend_dic = {"MIX": "Mix",
"ADD": "Add",
"SUB": "Subtract",
"MUL": "Multiply",
"LIGHTEN": "Lighten",
"DARKEN": "Darken",
"ERASE_ALPHA": "Erase Alpha",
"ADD_ALPHA": "Add Alpha",
"OVERLAY": "Overlay",
"HARDLIGHT": "Hard light",
"COLORBURN": "Color burn",
"LINEARBURN": "Linear burn",
"COLORDODGE": "Color dodge",
"SCREEN": "Screen",
"SOFTLIGHT": "Soft light",
"PINLIGHT": "Pin light",
"VIVIDLIGHT": "Vivid light",
"LINEARLIGHT": "Linear light",
"DIFFERENCE": "Difference",
"EXCLUSION": "Exclusion",
"HUE": "Hue",
"SATURATION": "Saturation",
"LUMINOSITY": "Luminosity",
"COLOR": "Color"
}
brush = context.tool_settings.image_paint.brush
text = brush.name + " - " + blend_dic[brush.blend]
# text in top-left corner
bgl.glColor3f(0.6, 0.6, 0.6)
blf.position(0, 21, main_y, 0)
blf.draw(0, text)
# text above brush
dt = time.time() - context.window_manager["tpp_toolmode_time"]
if dt < 1:
if "tpp_toolmode_brushloc" not in context.window_manager:
return
brush_x, brush_y = context.window_manager["tpp_toolmode_brushloc"]
brush_x -= blf.dimensions(0, text)[0] / 2
bgl.glColor4f(0.6, 0.6, 0.6, min(1.0, (1.0 - dt)*2))
blf.position(0, brush_x, brush_y, 0)
blf.draw(0, text)
# add ID-properties to window-manager
def init_props():
wm = bpy.context.window_manager
wm["tpp_automergeuv"] = 0
# remove ID-properties from window-manager
def remove_props():
wm = bpy.context.window_manager
if "tpp_automergeuv" in wm:
del wm["tpp_automergeuv"]
if "tpp_toolmode_time" in wm:
del wm["tpp_toolmode_time"]
if "tpp_toolmode_brushloc" in wm:
del wm["tpp_toolmode_brusloc"]
# calculate new snapped location based on start point (sx,sy)
# and current mouse point (mx,my). These coords appear to be
# in 2D screen coords, with the origin at:
# bottom-left, +x right, +y up.
# =>?
def do_snap( sx, sy, mx, my ):
# compute delta between current mouse position and
# start position
dx = mx - sx
dy = my - sy
adx = abs(dx)
ady = abs(dy)
# if delta is "close enough" to the diagonal
if abs( ady - adx ) < 0.5 * max(adx, ady):
# use a simple algorithm to snap based on horizontal
# distance (could use vertical distance, or could use
# radial distance but that would require more calcs).
if (dx > 0 and dy > 0) or (dx < 0 and dy < 0):
x = mx
y = sy + dx
elif (dx > 0 and dy < 0) or (dx < 0 and dy > 0):
x = mx
y = sy - dx
else:
x = mx
y = my
elif ( adx > ady ):
# closer to y-axis, snap vertical
x = mx
y = sy
else:
# closer to x-axis, snap horizontal
x = sx
y = my
return (x, y)
##########################################
# #
# Classes =>? #
# #
##########################################
# =>?
class ImageBuffer:
# based on script by Domino from BlenderArtists
# licensed GPL v2 or later
def __init__(self, image):
self.image = image
self.x, self.y = self.image.size
self.buffer = list(self.image.pixels)
def update(self):
self.image.pixels = self.buffer
def _index(self, x, y):
if x < 0 or y < 0 or x >= self.x or y >= self.y:
return None
return (x + y * self.x) * 4
def set_pixel(self, x, y, colour):
index = self._index(x, y)
if index is not None:
index = int(index)
self.buffer[index:index + 4] = colour
def get_pixel(self, x, y):
index = self._index(x, y)
if index is not None:
index = int(index)
return self.buffer[index:index + 4]
else:
return None
# 2d bin packing =>?
class PackTree(object):
# based on python recipe by S W on ActiveState
# PSF license, 16 oct 2005. (GPL compatible)
def __init__(self, area):
if len(area) == 2:
area = (0,0,area[0],area[1])
self.area = area
def get_width(self):
return self.area[2] - self.area[0]
width = property(fget=get_width)
def get_height(self):
return self.area[3] - self.area[1]
height = property(fget=get_height)
def insert(self, area):
if hasattr(self, 'child'):
a = self.child[0].insert(area)
if a is None:
return self.child[1].insert(area)
else:
return a
area = PackTree(area)
if area.width <= self.width and area.height <= self.height:
self.child = [None,None]
self.child[0] = PackTree((self.area[0]+area.width, self.area[1], self.area[2], self.area[1] + area.height))
self.child[1] = PackTree((self.area[0], self.area[1]+area.height, self.area[2], self.area[3]))
return PackTree((self.area[0], self.area[1], self.area[0]+area.width, self.area[1]+area.height))
##########################################
# #
# Class Operators #
# #
##########################################
class AddDefaultImage(Operator):
'''Create and assign a new default image to the object'''
bl_idname = "object.add_default_image"
bl_label = "Add default image"
@classmethod
def poll(cls, context):
return(context.active_object and context.active_object.type=='MESH')
def invoke(self, context, event):
ob = context.active_object
mat = bpy.data.materials.new("default")
#Add texture to the mat
tex = bpy.data.textures.new("default", 'IMAGE')
img = bpy.data.images.new("default", 1024, 1024, alpha=True)
ts = mat.texture_slots.add()
tex.image = img
ts.texture = tex
ob.data.materials.append(mat)
return {'FINISHED'}
class AutoMergeUV(Operator):
'''Have UV Merge enabled by default for merge actions'''
bl_idname = "paint.auto_merge_uv"
bl_label = "AutoMerge UV"
def invoke(self, context, event):
wm = context.window_manager
if "tpp_automergeuv" not in wm:
init_props()
wm["tpp_automergeuv"] = 1 - wm["tpp_automergeuv"]
km = bpy.context.window_manager.keyconfigs.default.keymaps['Mesh']
for kmi in km.keymap_items:
if kmi.idname == "mesh.merge":
kmi.properties.uvs = wm["tpp_automergeuv"]
return {'FINISHED'}
class MakeBrushImageTexture(Operator): #class command
bl_label = "New Texture from Image"
bl_idname = "gizmo.image_texture"
filepath = bpy.props.StringProperty(subtype="FILE_PATH")
def invoke(self, context, event):
context.window_manager.fileselect_add(self)
return {'RUNNING_MODAL'}
def execute(self,context):
tex = bpy.data.textures.new("ImageTexture",'NONE')
tex.use_nodes = True
remove = tex.node_tree.nodes[1]
tex.node_tree.nodes.remove(remove)
tex.node_tree.nodes.new("TextureNodeImage")
tex.node_tree.links.new(tex.node_tree.nodes[0].inputs[0],tex.node_tree.nodes[1].outputs[0])
i = bpy.data.images.load(self.filepath)
tex.node_tree.nodes[1].image = i
bpy.context.tool_settings.image_paint.brush.texture = tex
tex.node_tree.nodes[1].location = [0,50]
tex.node_tree.nodes[0].location = [200,50]
if bpy.context.mode == 'SCULPT':
bpy.context.tool_settings.sculpt.brush.texture = tex
elif bpy.context.mode == 'PAINT_VERTEX':
bpy.context.tool_settings.vertex_paint.brush.texture = tex
#elif bpy.context.mode == 'PAINT_WEIGHT':
# bpy.context.tool_settings.weight_paint.brush.texture = tex
elif bpy.context.mode == 'PAINT_TEXTURE':
bpy.context.tool_settings.image_paint.brush.texture = tex
return set()
class MakeBrushImageTextureMask(Operator): #class command
bl_label = "New Mask Texture from Image"
bl_idname = "gizmo.image_texture_mask"
filepath = bpy.props.StringProperty(subtype="FILE_PATH")
def invoke(self, context, event):
context.window_manager.fileselect_add(self)
return {'RUNNING_MODAL'}
def execute(self,context):
tex = bpy.data.textures.new("ImageTextureMask",'NONE')
tex.use_nodes = True
remove = tex.node_tree.nodes[1]
tex.node_tree.nodes.remove(remove)
tex.node_tree.nodes.new("TextureNodeImage")
tex.node_tree.nodes.new("TextureNodeRGBToBW")
tex.node_tree.links.new(tex.node_tree.nodes[0].inputs[0],tex.node_tree.nodes[2].outputs[0])
tex.node_tree.links.new(tex.node_tree.nodes[2].inputs[0],tex.node_tree.nodes[1].outputs[0])
tex.node_tree.nodes[1].location = [0,50]
tex.node_tree.nodes[2].location = [200,50]
tex.node_tree.nodes[0].location = [400,50]
i = bpy.data.images.load(self.filepath)
tex.node_tree.nodes[1].image = i
#if bpy.context.mode == 'SCULPT':
# bpy.context.tool_settings.sculpt.brush.mask_texture = tex
#elif bpy.context.mode == 'PAINT_VERTEX':
# bpy.context.tool_settings.vertex_paint.brush.mask_texture = tex
#elif bpy.context.mode == 'PAINT_WEIGHT':
# bpy.context.tool_settings.weight_paint.brush.mask_texture = tex
if bpy.context.mode == 'PAINT_TEXTURE':
bpy.context.tool_settings.image_paint.brush.mask_texture = tex
return set()
class BrushPopup(Operator):
bl_idname = "view3d.brush_popup"
bl_label = "Brush settings"
bl_options = {'REGISTER', 'UNDO'}
@staticmethod
def paint_settings(context):
toolsettings = context.tool_settings
if context.vertex_paint_object:
return toolsettings.vertex_paint
elif context.weight_paint_object:
return toolsettings.weight_paint
elif context.image_paint_object:
if (toolsettings.image_paint and toolsettings.image_paint.detect_data()):
return toolsettings.image_paint
return None
return None
@staticmethod
def unified_paint_settings(parent, context):
ups = context.tool_settings.unified_paint_settings
parent.label(text="Unified Settings:")
row = parent.row()
row.prop(ups, "use_unified_size", text="Size")
row.prop(ups, "use_unified_strength", text="Strength")
if context.weight_paint_object:
parent.prop(ups, "use_unified_weight", text="Weight")
elif context.vertex_paint_object or context.image_paint_object:
parent.prop(ups, "use_unified_color", text="Color")
else:
parent.prop(ups, "use_unified_color", text="Color")
@staticmethod
def prop_unified_size(parent, context, brush, prop_name, icon='NONE', text="", slider=False):
ups = context.tool_settings.unified_paint_settings
ptr = ups if ups.use_unified_size else brush
parent.prop(ptr, prop_name, icon=icon, text=text, slider=slider)
@staticmethod
def prop_unified_strength(parent, context, brush, prop_name, icon='NONE', text="", slider=False):
ups = context.tool_settings.unified_paint_settings
ptr = ups if ups.use_unified_strength else brush
parent.prop(ptr, prop_name, icon=icon, text=text, slider=slider)
@staticmethod
def prop_unified_weight(parent, context, brush, prop_name, icon='NONE', text="", slider=False):
ups = context.tool_settings.unified_paint_settings
ptr = ups if ups.use_unified_weight else brush
parent.prop(ptr, prop_name, icon=icon, text=text, slider=slider)
@staticmethod
def prop_unified_color(parent, context, brush, prop_name, text=""):
ups = context.tool_settings.unified_paint_settings
ptr = ups if ups.use_unified_color else brush
parent.prop(ptr, prop_name, text=text)
@staticmethod
def prop_unified_color_picker(parent, context, brush, prop_name, value_slider=True):
ups = context.tool_settings.unified_paint_settings
ptr = ups if ups.use_unified_color else brush
parent.template_color_picker(ptr, prop_name, value_slider=value_slider)
def check(self, context):
'''
settings = self.paint_settings(context)
brush_updated = settings.brush.is_updated
if brush_updated:
return True
'''
return True
@classmethod
def poll(self, context):
obj = context.active_object
if obj is not None:
A = context.active_object.type == 'MESH'
B = context.mode in {'PAINT_TEXTURE','PAINT_VERTEX','PAINT_WEIGHT'}
return A and B
def brush_texpaint_common(self, layout, context, brush, settings, projpaint=False):
capabilities = brush.image_paint_capabilities
col = layout.column()
if brush.image_tool in {'DRAW', 'FILL'}:
if brush.blend not in {'ERASE_ALPHA', 'ADD_ALPHA'}:
if not brush.use_gradient:
self.prop_unified_color_picker(col, context, brush, "color", value_slider=True)
if settings.palette:
col.template_palette(settings, "palette", color=True)
if brush.use_gradient:
col.label("Gradient Colors")
col.template_color_ramp(brush, "gradient", expand=True)
if brush.image_tool!= 'FILL':
col.label("Background Color")
row = col.row(align=True)
self.prop_unified_color(row, context, brush, "secondary_color", text="")
if brush.image_tool == 'DRAW':
col.prop(brush, "gradient_stroke_mode", text="Mode")
if brush.gradient_stroke_mode in {'SPACING_REPEAT', 'SPACING_CLAMP'}:
col.prop(brush, "grad_spacing")
elif brush.image_tool == 'FILL':
col.prop(brush, "gradient_fill_mode")
else:
row = col.row(align=True)
self.prop_unified_color(row, context, brush, "color", text="")
if brush.image_tool == 'FILL' and not projpaint:
col.prop(brush, "fill_threshold")
else:
self.prop_unified_color(row, context, brush, "secondary_color", text="")
row.separator()
row.operator("paint.brush_colors_flip", icon='FILE_REFRESH', text="")
elif brush.image_tool == 'SOFTEN':
col = layout.column(align=True)
col.row().prop(brush, "direction", expand=True)
col.separator()
col.prop(brush, "sharp_threshold")
if not projpaint:
col.prop(brush, "blur_kernel_radius")
col.separator()
col.prop(brush, "blur_mode")
elif brush.image_tool == 'MASK':
col.prop(brush, "weight", text="Mask Value", slider=True)
elif brush.image_tool == 'CLONE':
col.separator()
if projpaint:
if settings.mode == 'MATERIAL':
col.prop(settings, "use_clone_layer", text="Clone from paint slot")
elif settings.mode == 'IMAGE':
col.prop(settings, "use_clone_layer", text="Clone from image/UV map")
if settings.use_clone_layer:
ob = context.active_object
col = layout.column()
if settings.mode == 'MATERIAL':
if len(ob.material_slots) > 1:
col.label("Materials")
col.template_list("MATERIAL_UL_matslots", "",
ob, "material_slots",
ob, "active_material_index", rows=2)
mat = ob.active_material
if mat:
col.label("Source Clone Slot")
col.template_list("TEXTURE_UL_texpaintslots", "",
mat, "texture_paint_images",
mat, "paint_clone_slot", rows=2)
elif settings.mode == 'IMAGE':
mesh = ob.data
clone_text = mesh.uv_texture_clone.name if mesh.uv_texture_clone else ""
col.label("Source Clone Image")
col.template_ID(settings, "clone_image")
col.label("Source Clone UV Map")
col.menu("VIEW3D_MT_tools_projectpaint_clone", text=clone_text, translate=False)
else:
col.prop(brush, "clone_image", text="Image")
col.prop(brush, "clone_alpha", text="Alpha")
col.separator()
if capabilities.has_radius:
row = col.row(align=True)
self.prop_unified_size(row, context, brush, "size", slider=True, text="Radius")
self.prop_unified_size(row, context, brush, "use_pressure_size")
row = col.row(align=True)
if capabilities.has_space_attenuation:
row.prop(brush, "use_space_attenuation", toggle=True, icon_only=True)
self.prop_unified_strength(row, context, brush, "strength", text="Strength")
self.prop_unified_strength(row, context, brush, "use_pressure_strength")
if brush.image_tool in {'DRAW', 'FILL'}:
col.separator()
col.prop(brush, "blend", text="Blend")
col = layout.column()
# use_accumulate
if capabilities.has_accumulate:
col = layout.column(align=True)
col.prop(brush, "use_accumulate")
if projpaint:
col.prop(brush, "use_alpha")
col.prop(brush, "use_gradient")
col.separator()
col.template_ID(settings, "palette", new="palette.new")
def draw(self, context):
# Init values
toolsettings = context.tool_settings
settings = self.paint_settings(context)
brush = settings.brush
ipaint = toolsettings.image_paint
layout = self.layout
# Stroke mode
col = layout.column()
col.prop(brush, "stroke_method", text="")
if brush.use_anchor:
col.separator()
col.prop(brush, "use_edge_to_edge", "Edge To Edge")
if brush.use_airbrush:
col.separator()
col.prop(brush, "rate", text="Rate", slider=True)
if brush.use_space:
col.separator()
row = col.row(align=True)
row.prop(brush, "spacing", text="Spacing")
row.prop(brush, "use_pressure_spacing", toggle=True, text="")
if brush.use_line or brush.use_curve:
col.separator()
row = col.row(align=True)
row.prop(brush, "spacing", text="Spacing")
if brush.use_curve:
col.separator()
col.template_ID(brush, "paint_curve", new="paintcurve.new")
col.operator("paintcurve.draw")
else:
col.separator()
row = col.row(align=True)
row.prop(brush, "use_relative_jitter", icon_only=True)
if brush.use_relative_jitter:
row.prop(brush, "jitter", slider=True)
else:
row.prop(brush, "jitter_absolute")
row.prop(brush, "use_pressure_jitter", toggle=True, text="")
col = layout.column()
col.separator()
if brush.brush_capabilities.has_smooth_stroke:
col.prop(brush, "use_smooth_stroke")
sub = col.column()
sub.active = brush.use_smooth_stroke
sub.prop(brush, "smooth_stroke_radius", text="Radius", slider=True)
sub.prop(brush, "smooth_stroke_factor", text="Factor", slider=True)
layout.prop(settings, "input_samples")
# Curve stroke
col = layout.column(align=True)
settings = self.paint_settings(context)
brush = settings.brush
layout.template_curve_mapping(brush, "curve", brush=True)
col = layout.column(align=True)
row = col.row(align=True)
row.operator("brush.curve_preset", icon='SMOOTHCURVE', text="").shape = 'SMOOTH'
row.operator("brush.curve_preset", icon='SPHERECURVE', text="").shape = 'ROUND'
row.operator("brush.curve_preset", icon='ROOTCURVE', text="").shape = 'ROOT'
row.operator("brush.curve_preset", icon='SHARPCURVE', text="").shape = 'SHARP'
row.operator("brush.curve_preset", icon='LINCURVE', text="").shape = 'LINE'
row.operator("brush.curve_preset", icon='NOCURVE', text="").shape = 'MAX'
# Symetries mode
col = layout.column(align=True)
row = col.row(align=True)
row.prop(ipaint, "use_symmetry_x", text="X", toggle=True)
row.prop(ipaint, "use_symmetry_y", text="Y", toggle=True)
row.prop(ipaint, "use_symmetry_z", text="Z", toggle=True)
# imagepaint tool operate buttons
col = layout.split().column()
col.template_ID_preview(settings, "brush", new="brush.add", rows=3, cols=8)
########################################################################
# Texture Paint Mode #
if context.image_paint_object and brush:
self.brush_texpaint_common( layout, context, brush, settings, True)
########################################################################
# Weight Paint Mode #
elif context.weight_paint_object and brush:
col = layout.column()
row = col.row(align=True)
self.prop_unified_weight(row, context, brush, "weight", slider=True, text="Weight")
row = col.row(align=True)
self.prop_unified_size(row, context, brush, "size", slider=True, text="Radius")
self.prop_unified_size(row, context, brush, "use_pressure_size")
row = col.row(align=True)
self.prop_unified_strength(row, context, brush, "strength", text="Strength")
self.prop_unified_strength(row, context, brush, "use_pressure_strength")
col.prop(brush, "vertex_tool", text="Blend")
if brush.vertex_tool == 'BLUR':
col.prop(brush, "use_accumulate")
col.separator()
col = layout.column()
col.prop(toolsettings, "use_auto_normalize", text="Auto Normalize")
col.prop(toolsettings, "use_multipaint", text="Multi-Paint")
########################################################################
# Vertex Paint Mode #
elif context.vertex_paint_object and brush:
col = layout.column()
self.prop_unified_color_picker(col, context, brush, "color", value_slider=True)
if settings.palette:
col.template_palette(settings, "palette", color=True)
self.prop_unified_color(col, context, brush, "color", text="")
col.separator()
row = col.row(align=True)
self.prop_unified_size(row, context, brush, "size", slider=True, text="Radius")
self.prop_unified_size(row, context, brush, "use_pressure_size")
row = col.row(align=True)
self.prop_unified_strength(row, context, brush, "strength", text="Strength")
self.prop_unified_strength(row, context, brush, "use_pressure_strength")
col.separator()
col.prop(brush, "vertex_tool", text="Blend")
col.separator()
col.template_ID(settings, "palette", new="palette.new")
def invoke(self, context, event):
if context.space_data.type == 'IMAGE_EDITOR':
context.space_data.mode = 'PAINT'
wm = context.window_manager
return wm.invoke_props_dialog(self, width=160)
def execute(self, context):
return {'FINISHED'}
class TexturePopup(Operator):
bl_idname = "view3d.texture_popup"
bl_label = "Textures and Mask Textures settings"
bl_options = {'REGISTER', 'UNDO'}
toggleMenu = bpy.props.BoolProperty(default=True) # toogle texture or Mask menu
def check(self, context):
return True
@classmethod
def poll(self, context):
obj = context.active_object
if obj is not None:
A = obj.type == 'MESH'
B = context.mode == 'PAINT_TEXTURE'
return A and B
def draw(self, context):
# Init values
toolsettings = context.tool_settings
brush = toolsettings.image_paint.brush
tex_slot = brush.texture_slot
mask_tex_slot = brush.mask_texture_slot
unified = toolsettings.unified_paint_settings
settings = toolsettings.image_paint
# textures panel
layout = self.layout
# Parameter Toggle Menu
_TITLE = 'TEXTURES' if self.toggleMenu else 'MASKS'
_ICON = 'TEXTURE' if self.toggleMenu else 'MOD_MASK'
Menu = layout.row()
Menu.prop(self, "toggleMenu", text=_TITLE, icon=_ICON)
if self.toggleMenu:
col = layout.column() #TEXTURES
col.template_ID_preview(brush, "texture", new="texture.new", \
rows=3, cols=8)
layout.label(text="Brush Mapping:")
# texture_map_mode
layout.row().prop(tex_slot, "tex_paint_map_mode", text="")
layout.separator()
if tex_slot.map_mode == 'STENCIL':
if brush.texture and brush.texture.type == 'IMAGE':
layout.operator("brush.stencil_fit_image_aspect")
layout.operator("brush.stencil_reset_transform")
# angle and texture_angle_source
if tex_slot.has_texture_angle:
col = layout.column()
col.label(text="Angle:")
col.prop(tex_slot, "angle", text="")
if tex_slot.has_texture_angle_source:
col.prop(tex_slot, "use_rake", text="Rake")
if brush.brush_capabilities.has_random_texture_angle and tex_slot.has_random_texture_angle:
col.prop(tex_slot, "use_random", text="Random")
if tex_slot.use_random:
col.prop(tex_slot, "random_angle", text="")
# scale and offset
split = layout.split()
split.prop(tex_slot, "offset")
split.prop(tex_slot, "scale")
row = layout.row()
row.operator(MakeBrushImageTexture.bl_idname)
else:
col = layout.column() #MASK TEXTURE
col.template_ID_preview(brush, "mask_texture", new="texture.new", \
rows=3, cols=8)
layout.label(text="Mask Mapping:")
# map_mode
layout.row().prop(mask_tex_slot, "mask_map_mode", text="")
layout.separator()
if mask_tex_slot.map_mode == 'STENCIL':
if brush.mask_texture and brush.mask_texture.type == 'IMAGE':
layout.operator("brush.stencil_fit_image_aspect").mask = True
layout.operator("brush.stencil_reset_transform").mask = True
col = layout.column()
col.prop(brush, "use_pressure_masking", text="")
# angle and texture_angle_source
if mask_tex_slot.has_texture_angle:
col = layout.column()
col.label(text="Angle:")
col.prop(mask_tex_slot, "angle", text="")
if mask_tex_slot.has_texture_angle_source:
col.prop(mask_tex_slot, "use_rake", text="Rake")
if brush.brush_capabilities.has_random_texture_angle and mask_tex_slot.has_random_texture_angle:
col.prop(mask_tex_slot, "use_random", text="Random")
if mask_tex_slot.use_random:
col.prop(mask_tex_slot, "random_angle", text="")
# scale and offset
split = layout.split()
split.prop(mask_tex_slot, "offset")
split.prop(mask_tex_slot, "scale")
row = layout.row()
row.operator(MakeBrushImageTextureMask.bl_idname)
def invoke(self, context, event):
if context.space_data.type == 'IMAGE_EDITOR':
context.space_data.mode = 'PAINT'
return context.window_manager.\
invoke_props_dialog(self, width=160)
def execute(self, context):
return {'FINISHED'}
class SelectVertgroup(bpy.types.Operator):
"""Select Vertgroup"""
bl_idname = "object.select_vgroup"
bl_label = "Select VGroup"
bl_options = { 'REGISTER', 'UNDO' }
def execute(self, context):
bpy.ops.object.editmode_toggle()#toggle editmode
bpy.ops.object.vertex_group_select()#select current active vgroup
bpy.ops.object.editmode_toggle()#toggle editmode
bpy.ops.paint.texture_paint_toggle()#Texpaint
bpy.context.object.data.use_paint_mask = True #set face select masking on in case we forgot
return {'FINISHED'}
class DeselectVertgroup(bpy.types.Operator):
"""Deselect Vertgroup"""
bl_idname = "object.deselect_vgroup"
bl_label = "Deselect VGroup"
bl_options = { 'REGISTER', 'UNDO' }
def execute(self, context):
bpy.ops.object.editmode_toggle()#toggle editmode
bpy.ops.object.vertex_group_deselect()#select current active vgroup
bpy.ops.object.editmode_toggle()#toggle editmode
bpy.ops.paint.texture_paint_toggle()#Texpaint
bpy.context.object.data.use_paint_mask = True #set face select masking on in case we forgot
return {'FINISHED'}
class Slots_projectpaint(Operator):
bl_idname = "slots.projectpaint"
bl_label = "Slots & VGroups"
bl_options = {'REGISTER', 'UNDO'}
def check(self, context):
return True
@classmethod
def poll(cls, context):
brush = context.tool_settings.image_paint.brush
ob = context.active_object
if (brush is not None and ob is not None):
A = context.active_object.type == 'MESH'
B = context.mode == 'PAINT_TEXTURE'
return A and B
def draw(self, context):
settings = context.tool_settings.image_paint
ob = context.active_object
layout = self.layout
col = layout.column()
col.separator()
col.operator("image.save_dirty", text="Save All Images")
layout = self.layout
ob = context.object
group = ob.vertex_groups.active
rows = 2
if group:
rows = 4
row = layout.row()
row.template_list("MESH_UL_vgroups", "", ob, "vertex_groups", ob.vertex_groups, "active_index", rows=rows)
col = row.column(align=True)
col.operator("object.vertex_group_add", icon='ZOOMIN', text="")
col.operator("object.vertex_group_remove", icon='ZOOMOUT', text="").all = False
col.menu("MESH_MT_vertex_group_specials", icon='DOWNARROW_HLT', text="")
if group:
col.separator()
col.operator("object.vertex_group_move", icon='TRIA_UP', text="").direction = 'UP'
col.operator("object.vertex_group_move", icon='TRIA_DOWN', text="").direction = 'DOWN'
if ob.vertex_groups and (ob.mode == 'EDIT' or (ob.mode == 'WEIGHT_PAINT' and ob.type == 'MESH' and ob.data.use_paint_mask_vertex)):
row = layout.row()
sub = row.row(align=True)
sub.operator("object.vertex_group_assign", text="Assign")
sub.operator("object.vertex_group_remove_from", text="Remove")
sub = row.row(align=True)
sub.operator("object.vertex_group_select", text="Select")
sub.operator("object.vertex_group_deselect", text="Deselect")
layout.prop(context.tool_settings, "vertex_group_weight", text="Weight")
#row = layout.row()
row = layout.row(align=True)
row.operator("object.select_vgroup", text = "Select VGroup", icon = 'ROTACTIVE')
#row = layout.column()
row.operator("object.deselect_vgroup", text = "Deselect VGroup", icon = 'ROTACTIVE')
layout = self.layout
col = layout.column()
col.label("Painting Mode")
col.prop(settings, "mode", text="")
col.separator()
if settings.mode == 'MATERIAL':
if len(ob.material_slots) > 1:
col.label("Materials")
col.template_list("MATERIAL_UL_matslots", "layers",
ob, "material_slots",
ob, "active_material_index", rows=2)
mat = ob.active_material
if mat:
col.label("Available Paint Slots")
col.template_list("TEXTURE_UL_texpaintslots", "",
mat, "texture_paint_images",
mat, "paint_active_slot", rows=2)
if mat.texture_paint_slots:
slot = mat.texture_paint_slots[mat.paint_active_slot]
else:
slot = None
if (not mat.use_nodes) and context.scene.render.engine in {'BLENDER_RENDER', 'BLENDER_GAME'}:
row = col.row(align=True)
row.operator_menu_enum("paint.add_texture_paint_slot", "type")
row.operator("paint.delete_texture_paint_slot", text="", icon='X')
if slot:
col.prop(mat.texture_slots[slot.index], "blend_type")
col.separator()
if slot and slot.index!= -1:
col.label("UV Map")
col.prop_search(slot, "uv_layer", ob.data, "uv_textures", text="")
elif settings.mode == 'IMAGE':
mesh = ob.data
uv_text = mesh.uv_textures.active.name if mesh.uv_textures.active else ""
col.label("Canvas Image")
col.template_ID(settings, "canvas")
col.operator("image.new", text="New").gen_context = 'PAINT_CANVAS'
col.label("UV Map")
col.menu("VIEW3D_MT_tools_projectpaint_uvlayer", text=uv_text, translate=False)
def invoke(self, context,event):
if context.space_data.type == 'IMAGE_EDITOR':
context.space_data.mode = 'PAINT'
return context.window_manager.invoke_props_dialog(self, width=240)
def execute(self, context):
return {'FINISHED'}
class ChangeSelection(Operator):
'''Select more or less vertices/edges/faces, connected to the original selection'''
bl_idname = "paint.change_selection"
bl_label = "Change selection"
mode = bpy.props.EnumProperty(name="Mode",
items = (("more", "More", "Select more vertices/edges/faces"),
("less", "Less", "Select less vertices/edges/faces")),
description = "Choose whether the selection should be increased or decreased",
default ='more')
@classmethod
def poll(cls, context):
return bpy.ops.paint.image_paint.poll()
def invoke(self, context, event):
bpy.ops.object.mode_set(mode='EDIT')
if self.mode =='more':
bpy.ops.mesh.select_more()
else: #self.mode == 'less'
bpy.ops.mesh.select_less()
bpy.ops.object.mode_set(mode='TEXTURE_PAINT')
return {'FINISHED'}
class DefaultMaterial(Operator):
'''Add a default dif/spec/normal material to an object'''
bl_idname = "object.default_material"
bl_label = "Default material"
@classmethod
def poll(cls, context):
object = context.active_object
if not object or not object.data:
return False
return object.type == 'MESH'
def invoke(self, context, event):
objects = context.selected_objects
for ob in objects:
if not ob.data or ob.type!= 'MESH':
continue
mat = bpy.data.materials.new(ob.name)
# diffuse texture
tex = bpy.data.textures.new(ob.name+"_DIFF", 'IMAGE')
ts = mat.texture_slots.add()
ts.texture_coords = 'UV'
ts.texture = tex
# specular texture
tex = bpy.data.textures.new(ob.name+"_SPEC", 'IMAGE')
ts = mat.texture_slots.add()
ts.texture_coords = 'UV'
ts.use_map_color_diffuse = False
ts.use_map_specular = True
ts.texture = tex
# normal texture
tex = bpy.data.textures.new(ob.name+"_NORM", 'IMAGE')
tex.use_normal_map = True
ts = mat.texture_slots.add()
ts.texture_coords = 'UV'
ts.use_map_color_diffuse = False
|
5109eb2ffd5d01484a8cb1b7fd588d7214116878
|
Python
|
<|begin_of_text|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2014, 2015 Patrick Moran for Verizon
#
# Distributes WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License. If not, see <http://www.gnu.org/licenses/>.
from collections import deque
import g_config
import g_eon_api_bridge
# from g_graphics import plot_assets
import time
import logging
import json
from g_lat_lon_distance import lat_lon_distance, move_to_lat_lon, compute_resolution
from sortedcontainers import SortedDict
import pickle
import copy
import pandas
from numpy import int64, fmax, argsort, array, interp, linspace, diff, random
import arrow
import Queue
import os
import threading
ON = 1
OFF = 0
class GroomingMessageHandler(threading.Thread):
def __init__(self,
incoming_q,
incoming_queue_lock,
outgoing_q,
outgoing_queue_lock,
module_instance_name='Unnamed',
shared_data=None, shared_data_lock=None):
self.incoming_rabbit_mq = incoming_q
self.incoming_queue_lock = incoming_queue_lock
self.outgoing_q = outgoing_q
self.outgoing_queue_lock = outgoing_queue_lock
self.my_local_logger = logging.getLogger(module_instance_name)
self.my_local_logger.setLevel(logging.DEBUG)
self.local_q = deque()
self.eon_api_bridge = g_eon_api_bridge.EonApiBridge()
self.handle_queue = False
self.instance_name = module_instance_name
# This is used to run the main loop
self.run_enable = True
self.shared_data = shared_data
self.shared_data_lock = shared_data_lock
self.start_time = 0
self.run_start_time = time.time()
self.groomer_state = "0:IDLE" # Used to determine the current state of this thread in a multi-threaded env
self.groom_run_state = "0:IDLE" # Used to determine the current run mode of this thread
self.idle_count = 0
self.end_time = 0
self.query_count = 0
self.asset_dictionary = {}
self.working_radius = g_config.START_RADIUS # This will hold the radius units 0.12
self.cell_collection_set = set()
self.resolution = compute_resolution(self.working_radius)
self.cell_count = 0
self.utility_region = g_config.UTILITY_REGION
self.ttl = g_config.TTL_MAX
self.SHOW_PLOTS = False
self.cell_time_event = False
threading.Thread.__init__(self)
@staticmethod
def check_message_payload(dequeued_item):
"""
This method checks that the message payload keys matches the required (specified) keys
:return: False is any key is missing otherwise True
"""
key_array = ["dateTime",
"payload",
"messageType"]
# Note that the "ttl" key (and others) may be present but its not checked here!
for key in key_array:
if key not in dequeued_item.keys():
return False
key_array = ["zoomR",
"spatial",
"circuitID",
"reputationEnabled",
"assetID",
"temporal",
"outageTime",
"company",
"votes",
"zoomT",
"longitude",
"latitude"]
for key in key_array:
if key not in dequeued_item["payload"].keys():
return False
return True
def process_incoming_rabbit_mq(self):
"""
Processes the Rabbit MQ bus messages and process the queue depending on the type
If the type is Query then put it on the local queue for processing later
"""
self.groomer_state = "3:PROCESS QUEUE"
lock_counter = 0
while not self.incoming_queue_lock.acquire(False):
self.my_local_logger.debug("Trying to acquire lock. Sleeping 0.05s.")
time.sleep(g_config.SLEEP_TIME)
lock_counter += 1
if lock_counter > 100:
self.my_local_logger.debug("Cant acquire incoming queue lock, returning")
self.my_local_logger.error("Unable to acquire lock in process_incoming_queue, returning!")
self.groomer_state = "4:PROCESS QUEUE LOCK ERROR"
return
while not self.incoming_rabbit_mq.empty():
self.my_local_logger.debug(
"Groomer says Incoming Rabbit MQ not empty, length is %d" % self.incoming_rabbit_mq.qsize())
self.my_local_logger.debug("Acquired lock")
# This is where the incoming grooming message is pulled off the Rabbit MQ.
dequeued_item = self.incoming_rabbit_mq.get()
if self.check_message_payload(dequeued_item):
self.my_local_logger.info("A %s type message was dequeued " %
dequeued_item['messageType'])
else:
self.my_local_logger.error("Message payload is malformed in process_incoming_queue, returning")
if self.incoming_queue_lock:
self.incoming_queue_lock.release()
self.my_local_logger.debug("GROOMER rabbit MQ lock was released")
self.my_local_logger.info("The rabbit MQ lock was released")
self.groomer_state = "5:PROCESS QUEUE MALFORMED"
return
# Determine what is queue command type is and dispatch it.
if dequeued_item['messageType'] == 'Test':
# This is a dummy Test which is dropped for now.
pass
elif dequeued_item['messageType'] == 'Clear':
# Restore the previous results
pass
elif dequeued_item['messageType'] == 'Save':
# Save the current groom (filter) settings and kick off a new Utility wide groom process
# Grab the Query message type and stuff it in a local fifo queue
self.my_local_logger.debug("Save type message received")
self.my_local_logger.debug("query_guid = %s" % "None - missing on save") # dequeued_item['queryGuid'])
#######################################################
# Collect interesting payload information here
#######################################################
if "ttl" not in dequeued_item.keys():
dequeued_item["ttl"] = g_config.TTL_UTILITY_SPAN
self.local_q.append(dequeued_item)
self.my_local_logger.debug("Message queued to the local incoming queue (len=%d)" % len(self.local_q))
self.my_local_logger.info("Message queued to the local incoming queue (len=%d)" % len(self.local_q))
pass
elif dequeued_item['messageType'] == 'Query':
# Grab the Query message type and stuff it in a local fifo queue
self.my_local_logger.debug("Query type message received")
self.my_local_logger.debug("query_guid = %s" % dequeued_item['queryGuid'])
#######################################################
# Collect interesting payload information here
#######################################################
if "ttl" not in dequeued_item.keys():
dequeued_item["ttl"] = g_config.TTL_MAX
self.local_q.append(dequeued_item)
self.my_local_logger.debug("Message queued to the local incoming queue (len=%d)" % len(self.local_q))
self.my_local_logger.info("Message queued to the local incoming queue (len=%d)" % len(self.local_q))
else:
self.my_local_logger.error("incoming_rabbit_mq TYPE is a UNKNOWN")
if self.incoming_queue_lock:
self.incoming_queue_lock.release()
self.my_local_logger.debug("GROOMER rabbit MQ lock was released")
self.my_local_logger.info("The rabbit MQ lock was released")
self.my_local_logger.debug("process_incoming_rabbit_mq finished")
self.groomer_state = "0:IDLE"
def get_data_in_cell_area(self, cell_parameters, ttl):
"""
Ask the EON API for onts, circuits and transformers for a given lat, lon and radius
Returns a group of items that are inside the circle with a given center (lat, lon) and
radius.
Note: convert the time units in the ONT event list into minutes by dividing by 60000
:param cell_parameters: Latitude
:param ttl: The time to live.
:return: this_cell # A hexagonal cell dictionary
this_cell = {'neighbors': [], # the 6 nearest neighbor cells
'assets': {}, # The utility assets including their lat and lon and events
'onts': {}, # Verizon's ONTs including their lat and lon and events
'state': '' # A string representing the state of this cell.
This is used for multi threading purposes so that neighboring cells can see
whats going on.
'circuits': {} # This is a set of circuits in this cell. All assets on a circuit
are in the circuits list
'lat_lon': [] # The lat and lon array of the center of the cell
'radius': 1.00 # The radius of the circumscribed cell.
ont_items is a dictionary of {'lat_lon':[],'assets':[],'events':[]}
asset_items is a dictionary of {'lat_lon':[],'onts':[],'events':[]}
circuit_items is a dictionary of {'connected_items', asset_item_key}
where asset_item_key is a key entry in the asset_item dictionary
events is an array of 2 sets of events. events[0] is the "fail_time" and events[1] is the "restore_time"
A call to teh API is done in a loop to gather all items, here is a test of teh api call:
The swagger test example is
http://10.123.0.27:8080/eon360/api/query
With a json payload of
{
"itemType":"ALL",
"circle": {
"unit": "MILES",
"longitude": -73.8773389,
"radius": 1.0,
"latitude": 41.2693778
},
"pageParameter": {
"page": 0,
"size": 100
}
}
This will return a data structure like this
dd['eligibility']['dataItems']
dd['alarm']['dataItems']
dd['utility']['dataItems']
"""
# query_guid = payload["query_guid"]
this_lat = cell_parameters["latitude"]
this_lon = cell_parameters["longitude"]
# utility = cell_parameters["company"]
groom_time = cell_parameters["outageTime"]
# circuit_id = cell_parameters["circuitID"]
# asset_id = cell_parameters["assetID"]
# votes = cell_parameters["votes"]
# spatial = cell_parameters["spatial"]
# temporal = cell_parameters["temporal"]
# reputation_ena = cell_parameters["reputationEnabled"]
# zoom_t = cell_parameters["zoomT"]
# zoom_r = cell_parameters["zoomR"]
this_radius = cell_parameters["radius"]
# units = cell_parameters["units"]
query_type = "ALL"
ont_serial_number_set = set()
ont_items = {}
asset_serial_number_set = set()
asset_items = {}
circuit_serial_number_set = set()
circuit_items = {}
# The six neighbor cells are initially set to be empty
# This a string quid and an angle (in degrees)
neighbor_array = [["", 0], ["", 60], ["", 120], ["", 180], ["", 240], ["", 300]]
this_cell = {'neighbors': neighbor_array,
'assets': {},
'onts': {},
'circuits': {},
'state': 'creating',
'lat_lon': [this_lat, this_lon],
'radius': this_radius,
'groom_time': groom_time,
'ttl': 0
}
page_number = 0
page_size = 20
query_parameter = json.dumps({"itemType": query_type,
"circle": {"longitude": this_lon,
"latitude": this_lat,
"radius": this_radius, "unit": g_config.RADIUS_UNITS},
"pageParameter": {"page": page_number, "size": page_size}})
self.my_local_logger.debug("Formed query parameter: %s" % query_parameter)
dd = self.eon_api_bridge.query_post_eon_data_30(query_parameter=query_parameter)
more_pages = True
# Loop here until no more utility components of the first collection are found
while more_pages and dd is not None:
# This is the ONTs loop through them and find all the ONTs in the area
for this_ont in dd['eligibility']['dataItems']:
ont_dictionary_keyword = this_ont['ontSerialNumber']
ont_serial_number_set.add(ont_dictionary_keyword)
if ont_dictionary_keyword == "[PENDING INSTALL]":
self.my_local_logger.debug("skipping this ont in eligibility list")
continue
ont_items[ont_dictionary_keyword] = {'lat_lon': [this_ont['latitude'], this_ont['longitude']]}
alarm_set_time = set()
alarm_clear_time = set()
ont_items[ont_dictionary_keyword]['events'] = [alarm_set_time, alarm_clear_time]
ont_items[ont_dictionary_keyword]['assets'] = set()
for this_alarm in dd['alarm']['dataItems']:
alarm_dictionary_keyword = this_alarm['ontSerialNumber']
if alarm_dictionary_keyword not in ont_serial_number_set:
if alarm_dictionary_keyword == "[PENDING INSTALL]":
self.my_local_logger.debug("skipping this ONT in the alarm list")
continue
ont_serial_number_set.add(alarm_dictionary_keyword)
ont_items[alarm_dictionary_keyword] = {'lat_lon': [this_alarm['latitude'], this_alarm['longitude']]}
alarm_set_time = set()
alarm_clear_time = set()
ont_items[alarm_dictionary_keyword]['events'] = [alarm_set_time, alarm_clear_time]
ont_items[alarm_dictionary_keyword]['assets'] = set()
if this_alarm['alarmReceiveTime']:
alarm_set = float(this_alarm['alarmReceiveTime']) # * 1e-3) / 60
ont_items[alarm_dictionary_keyword]['events'][0].add(alarm_set)
if this_alarm['alarmClearTime']:
alarm_clear = float(this_alarm['alarmClearTime']) # * 1e-3) / 60
ont_items[alarm_dictionary_keyword]['events'][1].add(alarm_clear)
# Now go through the assets and associate the assets to the ONTs and the ONTs to the assets
for this_item in dd['utility']['dataItems']:
asset_dictionary_keyword = this_item['transformerID']
if asset_dictionary_keyword not in asset_serial_number_set:
asset_serial_number_set.add(asset_dictionary_keyword)
asset_items[asset_dictionary_keyword] = {'lat_lon': [this_item['latitude'], this_item['longitude']]}
asset_items[asset_dictionary_keyword]['events'] = [set(), set()]
asset_items[asset_dictionary_keyword]['onts'] = set()
asset_items[asset_dictionary_keyword]['guid'] = this_item['guid']
asset_items[asset_dictionary_keyword]['serviceAddress'] = this_item['serviceAddress']
for this_ont in this_item['eligibilityList']:
ont_dictionary_keyword = this_ont['ontSerialNumber']
if ont_dictionary_keyword not in ont_serial_number_set:
ont_serial_number_set.add(ont_dictionary_keyword)
ont_items[ont_dictionary_keyword] = {
'lat_lon': [this_ont['latitude'], this_ont['longitude']]}
alarm_set_time = set()
alarm_clear_time = set()
ont_items[ont_dictionary_keyword]['events'] = [alarm_set_time, alarm_clear_time]
ont_items[ont_dictionary_keyword]['assets'] = set()
# Skip the ONTs that don't have an installation.
if ont_dictionary_keyword == "[PENDING INSTALL]":
self.my_local_logger.debug("skipping the ONT listed on eligibility list in asset_id=%s" %
asset_dictionary_keyword)
self.my_local_logger.info("Skipping %s because it's status is PENDING INSTALL" %
asset_dictionary_keyword)
continue
# Stitch up the assets in the onts
ont_items[ont_dictionary_keyword]['assets'].add(asset_dictionary_keyword)
# Stitch up the onts in the assets
asset_items[asset_dictionary_keyword]['onts'].add(ont_dictionary_keyword)
circuit_dictionary_keyword = this_item['circuitID']
if circuit_dictionary_keyword not in circuit_serial_number_set:
# add the circuit item to the circuit_serial_number_set is needed
circuit_serial_number_set.add(circuit_dictionary_keyword)
# and create an empty set
circuit_items[circuit_dictionary_keyword] = {'connected_items': set()}
# Now add the data structure to the set
circuit_items[circuit_dictionary_keyword]['connected_items'].add(asset_dictionary_keyword)
###########################
# Look for the next page #
###########################
if (dd['utility']['pageTotalItems'] == page_size) or \
(dd['alarm']['pageTotalItems'] == page_size) or \
(dd['eligibility']['pageTotalItems'] == page_size):
self.my_local_logger.debug("Collecting next page for this message")
page_number += 1
more_pages = True
query_parameter = json.dumps({"itemType": query_type,
"circle": {"longitude": this_lon,
"latitude": this_lat,
"radius": this_radius,
"unit": g_config.RADIUS_UNITS},
"pageParameter": {"page": page_number, "size": page_size}})
dd = self.eon_api_bridge.query_post_eon_data_30(query_parameter=query_parameter)
else:
more_pages = False
this_cell['assets'] = asset_items
# Go over the ONT set and see it there are any that don't have alarms. This might happen if there were no alarms
# posted to this ONT because the main alarm injestion loop failed for some reason. There will still be alarms
# that are posted on the ONTs and those can be recovered here.
for this_ont in ont_items:
if len(ont_items[this_ont]['events'][0]) == 0 or len(ont_items[this_ont]['events'][1]) == 0:
# To find any ONTs that don't seem to have alarms make this call:
# where ONT_SERIAL_NUMBER is 00ABB96 in this example.
# http://10.123.0.27:8080/eon360/api/alarms?sortBy=alarmReceiveTime&ontSerialNumber=000ABB96&p=0&s=20
dd = self.eon_api_bridge.alarm_get_pons_nms_00(ont_serial_number=this_ont)
if dd:
if 'alarms' in dd.keys():
for this_alarm in dd['alarms']:
if this_alarm['alarmReceiveTime']:
alarm_set = float(this_alarm['alarmReceiveTime']) # * 1e-3) / 60
ont_items[this_ont]['events'][0].add(alarm_set)
self.my_local_logger.info("Adding an AlarmReceiveTime to the data")
if this_alarm['alarmClearTime']:
alarm_clear = float(this_alarm['alarmClearTime']) # * 1e-3) / 60
ont_items[this_ont]['events'][1].add(alarm_clear)
else:
self.my_local_logger.warning("No alarms found in call to alarm_get_pons_nms_00(ont_serial_number=%s)" % this_ont )
else:
self.my_local_logger.warning("Nothing returned from the API call")
this_cell['onts'] = ont_items
this_cell['circuits'] = circuit_items
this_cell['state'] = 'populated'
this_cell['ttl'] = ttl
self.my_local_logger.info("This CELL (radius= %3.3f %s @ lat=%f, lon=%f) has %d circuits, %d assets and %d onts." %
(this_radius, g_config.RADIUS_UNITS, this_lat, this_lon,
len(circuit_items), len(asset_items), len(ont_items))
)
# Note convert the time units into minutes by dividing by 60000
return this_cell
@staticmethod
def persist_cell_pickle(cell, filename=""):
"""
:param cell: The cell structure that is persisted to disk
:return:
"""
this_lat = cell['lat_lon'][0]
this_lon = cell['lat_lon'][1]
if this_lat < 0:
lat_str = ("%03.2f" % (float(round(-this_lat * 100)) / 100.0)).replace('.','m')
else:
lat_str = ("%03.2f" % (float(round(this_lat * 100)) / 100.0)).replace('.', 'p')
if this_lon < 0:
lon_str = ("%03.2f" % (float(round(-this_lon * 100)) / 100.0)).replace('.','m')
else:
lon_str = ("%03.2f" % (float(round(this_lon * 100)) / 100.0)).replace('.', 'p')
if filename == "":
filename = 'cell_' + lat_str + '_' + lon_str
filename += '.pck'
full_path = g_config.BASE_DIR + os.sep + g_config.PICKLES + os.sep + filename
with open(full_path, "w") as f: # write mode
pickle.dump(cell, f)
@staticmethod
def un_persist_cell_pickle(this_lat, this_lon):
"""
:param this_lat:
:param this_lon:
:return: cell
"""
if this_lat < 0:
lat_str = ("%03.2f" % (float(round(-this_lat * 100)) / 100.0)).replace('.','m')
else:
lat_str = ("%03.2f" % (float(round(this_lat * 100)) / 100.0)).replace('.', 'p')
if this_lon < 0:
lon_str = ("%03.2f" % (float(round(-this_lon * 100)) / 100.0)).replace('.','m')
else:
lon_str = ("%03.2f" % (float(round(this_lon * 100)) / 100.0)).replace('.', 'p')
filename = 'cell_' + lat_str + '_' + lon_str + '.pck'
with open(filename, "r") as f: # read mode
cell = pickle.load(open(f))
return cell
def temporal_filter(self, cell):
"""
:param cell:
This method does the filter model of the ont and returns a filtered outage based on the
alarm_condition (a value between 0 and 1)
Start with the alarm_condition =0 which is no alarm (These are alarm_conditions for ALARMs)
This is how the EPOCH number can be converted back and forth to a date.
In this context ON means power is ON, OFF means power is off
t is in milliseconds. To convert to minutes divide by 1000 and by 60.
:return:
"""
self.cell_time_event = False
for this_ont in cell['onts']:
event_vector = {'t': [int64(g_config.ENGINE_BEGIN_TIME)], 'a': [ON]}
on_times = cell['onts'][this_ont]['events'][ON]
off_times = cell['onts'][this_ont]['events'][OFF]
if len(on_times) > 0:
for this_alarm in on_times:
event_vector['t'].append(this_alarm)
event_vector['a'].append(ON)
if len(off_times) > 0:
for this_alarm in off_times:
event_vector['t'].append(this_alarm)
event_vector['a'].append(OFF)
# At this point we have a temporal vector of event for this ONT.
time_vector = array(event_vector['t'])
ind = argsort(time_vector)
power_state = array(event_vector['a'])[ind]
t = time_vector[ind]
# At this point the sorted time and alarm vectors are ready
# tw = t[t > t[-1] - config.ALARM_DETECT_WINDOW * 1000]
# aw = a[t > t[-1] - config.ALARM_DETECT_WINDOW * 1000]
# Deglitch the vectors now
# To deglitch the time vector take all the values that at ON and extend them by 5 minutes then
# and add (or) them back to the time vector
# time_of_alarm_condition = tw[-1] # The last time vector point (the sorted value)
# alarm_condition = aw[-1]
time_count = len(t)
deglitched_power_state = copy.copy(power_state)
# see for example http://pandas.pydata.org/pandas-docs/stable/timeseries.html
for i in range(time_count - 1):
if power_state[i] == OFF and power_state[i + 1] == ON:
if t[i + 1] < t[i] + g_config.DEGLITCH_TIME:
self.my_local_logger.debug(
"Deglitched the power at %s" % (pandas.to_datetime(t[i], unit='ms')))
deglitched_power_state[i] = ON
else:
self.my_local_logger.debug("off time is %f min (%f hours) (days %f)" % (
(t[i + 1] - t[i]) / 1000 / 60, (t[i + 1] - t[i]) / 1000 / 60 / 60,
(t[i + 1] - t[i]) / 1000 / 60 / 60 / 24))
power_state_array = []
time_array = []
for i in range(time_count-1):
time_array.append(t[i])
time_array.append(t[i+1] - g_config.MS_TIME_RESOLUTION) # something around 5 seconds
power_state_array.append(deglitched_power_state[i])
power_state_array.append(deglitched_power_state[i])
if deglitched_power_state[i] == ON:
self.my_local_logger.debug("power on at %s" % (pandas.to_datetime(t[i], unit='ms')))
if deglitched_power_state[i] == OFF:
self.my_local_logger.debug("power off at %s" % (pandas.to_datetime(t[i], unit='ms')))
time_array.append(t[-1])
power_state_array.append(deglitched_power_state[-1])
sample_time = cell['groom_time']
if sample_time > t[-1]:
self.my_local_logger.debug(
"sample time is after the end of time in the time event list, using interpolated value")
time_array.append(sample_time - g_config.MS_TIME_RESOLUTION)
power_state_array.append(deglitched_power_state[-1])
time_array_sec = [round(x / 1000) for x in time_array]
# time_domain_vector = [time_array, power_state_array] # column_stack((time_array,power_state_array))
# Calculate a +/- 1 week interval every 5 minutes from the groom time unless the groom time is the same as
# the current time then the last 30 minutes are used to compute the time vector.
# This is done to allow the real time groomer to run a bit faster than the interactive groomer during the
# interp call.
# The arrow library produces timestamp values in seconds.
current_time = arrow.utcnow().to('US/Eastern')
a_week_ago = current_time.replace(weeks=-1)
sample_time_arrow = arrow.get(sample_time/1000)
if sample_time_arrow.timestamp < a_week_ago.timestamp:
# This is a grooming operation that fits in the 2 week span of time.
start_time = sample_time_arrow.replace(weeks=-1)
stop_time = sample_time_arrow.replace(weeks=1)
else:
start_time = sample_time_arrow.replace(weeks=-2)
stop_time = sample_time_arrow
# The time vector will be in seconds
# One minute = 60
# One hour = 60*60
# One day = 24*60*60
# One week = 7*24*60*60
# Five minute intervals are 5*60
delta_time = 5*60 # This is the sample interval of the time vector (Every 5 minutes)
number_of_points = (stop_time.timestamp - start_time.timestamp) / delta_time
sample_time_array = linspace(start_time.timestamp, stop_time.timestamp, number_of_points)
sample_power_array = interp(sample_time_array, time_array_sec, power_state_array)
time_domain_vector = [sample_time_array, sample_power_array]
reliability = sum(sample_power_array)/len(sample_power_array)
event_durations = []
event_times = []
if sample_power_array.min() == sample_power_array.max():
self.SHOW_PLOTS = False
else:
self.SHOW_PLOTS = True
if self.SHOW_PLOTS:
if not g_config.IS_DEPLOYED:
print "Reliability = %4.4f" % reliability
if reliability > 0.8:
self.cell_time_event = True
if not g_config.IS_DEPLOYED:
try:
import matplotlib.pyplot as plt
# plt.plot(time_array, power_state_array, 'o')
plt.plot(sample_time_array, sample_power_array, '-x')
plt.show(block=False)
except:
print "Something went wrong with the matplotlib command, skipping!"
if (sample_power_array[0] > 0) and (sample_power_array[-1] > 0):
if not g_config.IS_DEPLOYED:
print "Diff the time vector to find the on and off times."
diff_sample_power_array = diff(sample_power_array)
index_on = diff_sample_power_array > 0
on_times = sample_time_array[index_on]
index_off = diff_sample_power_array < 0
off_times = sample_time_array[index_off]
if len(on_times) == len(off_times):
for k, t_off in enumerate(off_times):
# The power will be off from the time it turns minus the time it turned off.
power_fail_event_duration = on_times[k] - t_off
if not g_config.IS_DEPLOYED:
print "power fail event duration = %f" % power_fail_event_duration
event_durations.append(power_fail_event_duration)
event_times.append(t_off)
if not g_config.IS_DEPLOYED:
print "Found a %10.2f minute outage on %s" % (
(power_fail_event_duration/60),
arrow.get(t_off).format("MMMM DD, YYYY @ hh:mm A")
)
else:
self.my_local_logger.info('Power event edges are mismatched, skipping this: ')
else:
self.my_local_logger.info('Power event edges in the window are mismatched, skipping this: ')
else:
self.my_local_logger.info('Power event outage has low reliability, skipping this: ')
self.my_local_logger.info('temporal data for cell has %d points from %s to %s' % (
number_of_points, start_time, stop_time))
cell['onts'][this_ont]['temporal_filter'] = {'reliability': reliability,
'event_durations': event_durations,
'event_times': event_times,
'time_domain_vector': time_domain_vector}
return cell
def spatial_filter(self, cell):
"""
The spatial filter does a filtering of the ont collection based on the asset called this_asset.
:param cell:
A cell that contains of onts along with their locations and states.
The onts values must have been filtered temporally first.
:return:
"""
if self.cell_time_event:
# Only append outages on assets for the cells that have events
if not g_config.IS_DEPLOYED:
print "An interesting time event has occurred in this cell..."
for this_ont in cell['onts']:
event_durations = cell['onts'][this_ont]['temporal_filter']['event_durations']
event_times = cell['onts'][this_ont]['temporal_filter']['event_times']
if not g_config.IS_DEPLOYED:
if this_ont == "0016FE13":
print "found an event"
for this_asset in cell['onts'][this_ont]['assets']:
if not g_config.IS_DEPLOYED:
if this_asset == "TR1000489404_108":
print "found a matching asset"
try:
event_activities = cell['assets'][this_asset]['spatial_filter']
except KeyError:
event_activities = {'distance': [], 'events': []}
if len(event_durations) > 0:
ont_lat = cell['onts'][this_ont]['lat_lon'][0]
ont_lon = cell['onts'][this_ont]['lat_lon'][1]
lat_lon = cell['assets'][this_asset]['lat_lon']
asset_lat = lat_lon[0]
asset_lon = lat_lon[1]
this_distance = lat_lon_distance(asset_lat, asset_lon, ont_lat, ont_lon, units='mi')
event_activities['distance'].append(this_distance)
event_activities['events'].append(
{'event_durations': event_durations, 'event_times': event_times}
)
cell['assets'][this_asset]['spatial_filter'] = event_activities
if not g_config.IS_DEPLOYED:
print " ...done with interesting cell."
return cell
def vote_on_assets(self, cell, temporal_data, spatial_data, voting_data):
"""
:param cell:
:param voting_data: an integer that is the number of votes to use
:return:
"""
try:
this_filter = json.loads(spatial_data)
total_counts = len(this_filter['r'])
weights = []
for i in range(total_counts):
weights.append(this_filter['r'][i])
except TypeError as e:
self.my_local_logger.error('Spatial data has a Type Error: %s, %s' % (spatial_data, e))
except ValueError as e:
self.my_local_logger.error('Spatial data has a ValueError: %s, %s' % (spatial_data, e))
self.my_local_logger.info('spatial data = %s', spatial_data)
self.my_local_logger.info('temporal data = %s', temporal_data)
if voting_data:
try:
number_of_votes = int(voting_data)
except ValueError as e:
self.my_local_logger.error('Voting data has en error in the passed value %s' % e)
number_of_votes = 1
except TypeError as e:
self.my_local_logger.error('Voting data is not a string %s' % e)
number_of_votes = 1
else:
number_of_votes = 1
self.my_local_logger.info('Number of votes passed: %d' % number_of_votes)
for this_asset in cell['assets']:
cell['assets'][this_asset]['outage_events'] = None
try:
# these_distances = cell['assets'][this_asset]['spatial_filter']['distance']
these_events = cell['assets'][this_asset]['spatial_filter']['events']
except KeyError:
# print "No outages on this asset"
continue
if len(these_events) > 0:
if len(these_events) >= 1: # number_of_votes:
# This is where the filter will take place.
# These events is an array.
# I must iterate over an array of these event items
try:
outage_events = cell['assets'][this_asset]['outage_events']
except KeyError:
outage_events = {'event_durations': [], 'event_times': []}
if outage_events is None:
outage_events = {'event_durations': [], 'event_times': []}
for this_event_dict in these_events:
for j, this_event in enumerate(this_event_dict['event_durations']):
outage_events['event_durations'].append(this_event)
outage_events['event_times'].append(this_event_dict['event_times'][j])
cell['assets'][this_asset]['outage_events'] = outage_events
return cell
def post_outage_on_asset(self, cell, payload):
"""
:param cell:
:param payload: this will be of the form
http://10.123.0.27:8080/eon360/api/utilities?p=0&s=20
"eonUtilityEntries": [
{
"id": "5508dacee4b0df5309df591e",
"version": 0,
#######################
## ADD THIS GUID
"guid": "46f7655c-9160-4c08-b272-59c32232ba9f",
#######################
"company": "CEDRAFT",
"serviceAddress": "{\"CE Map ID\": \"None\",
\"Municipality\": \"New Castle\",
\"Provenance\":\"Report A\",
\"Attached Assets\": [],
\"Next Hop\": \"PS302355612\",
\"Type\": \"HOUSE\",
\"Downstream\": \"None\",
\"Transformer Supply\": [\"TR302355616_T4\"],
\"Upstream\":\"PS302355612\",
\"Connections\": [],
\"Address\":\"10 VALLEY VIEW RD, Chappaqua NY, 10514-2532\",
\"Utility ID\": \"None\"}",
"errorCode": "0",
"circuitID": "10U2",
"transformerID": "HS01c902165608e5f12ce4c01c78c70415",
"eligibilityList": [
{
"id": "54a079aae4b040db636a2d95",
"version": 0,
"guid": "23697667-4810-4169-8802-46ad6efae3a3",
"company": "",
"ontSerialNumber": "59054969",
"errorCode": "0.91",
"alarmID": "CHPQNYCPOL1*LET-3*11*1*1",
"ontAddress": "8 Brookside Cir,Chappaqua,NY,10514",
"modelCoefficients": null,
"longitude": f-73.787811,
"latitude": 41.175064,
"createdAtTimestamp": 1419803050366,
"lastModifiedAtTimestamp": 1419803050366
},
"payload": {
"company": "CEDRAFT",
"outageTime": 1430452800000,
"longitude": lon,
"latitude": lat,
"circuitID": "",
"assetID": "",
"votes": 3,
"spatial": '{"r":[1,1]}',
"temporal": "[1,0;.8,24;.3, 60]",
"reputationEnabled": True,
"zoomT": 1,
"zoomR": 1,
"radius": 0.12,
"units": "MI"
},
The post must be of the form
{
"eventDuration": "long",
"guid": "",
"id": "",
"utility": {
"assetType": "",
"circuitID": "",
"company": "",
"outageID": "",
"transformerID": ""
},
"timeOfEvent": "Date",
"company": "",
"longitude": 0,
"internalUtilityGuid": "",
"latitude": 0,
"algorithm": "",
"version": "long"
}
:return:
"""
# circuit_id = ""
# First loop over all circuits:
try:
for this_circuit in cell['circuits']:
# Now loop over all the items on that circuit
for this_asset in cell['circuits'][this_circuit]['connected_items']:
asset_item = cell['assets'][this_asset]
outages = asset_item['outage_events']
# This is the form of an event (If there is one!)
# It will be None if there are no events otherwise it will be:
# 'event_durations': copy.deepcopy(these_events['event_durations']),
# 'event_times': copy.deepcopy(these_events['event_times'])
if outages:
self.my_local_logger.info('Examining circuit=%s, asset=%s. which has %d outages to post!' % (this_circuit, this_asset, len(outages)))
if this_asset[0:2] == "TR":
asset_type = "TRANSFORMER"
elif this_asset[0:2] == "HS":
asset_type = "HOUSE"
elif this_asset[0:2] == "PS":
asset_type = "POLE, SECONDARY"
elif this_asset[0:2] == "PP":
asset_type = "POLE, PRIMARY"
else:
asset_type = "OTHER"
for i, this_event_duration in enumerate(outages['event_durations']):
address_string = cell['assets'][this_asset]['serviceAddress']
self.my_local_logger.info("address_string = %s" % address_string)
address_string_pairs = json.loads(address_string)
this_address = ''
if "Municipality" in address_string_pairs.keys():
this_address += 'Municipality:' + address_string_pairs['Municipality'] + '|'
if "Address" in address_string_pairs.keys():
this_address += 'Address:' + address_string_pairs['Address'] + '|'
# Here's how to include the CE Map ID and the Utility ID if needed
# this_address += 'CE MapID:' + this_asset.split('_')[1] + '|'
# this_address += 'UtilityID:' + this_asset.split('_')[0][2:]
if this_address[-1] == '|':
this_address = this_address[:-1]
utility_document = {
"internalUtilityGuid": asset_item['guid'],
"eventDuration": int(round(this_event_duration * 1000)),
# "guid": "guid-here",
# "id": 'id-here',
"utility": {
"assetType": asset_type,
"circuitID": this_circuit,
"company": payload["company"],
"outageID": 'outage-id-here',
"transformerID": this_asset,
"address": this_address
},
"timeOfEvent": int(round(outages['event_times'][i] * 1000)),
# "longitude": asset_item['lat_lon'][1],
# "latitude": asset_item['lat_lon'][0],
"algorithm": "NEAR10"
# "version": 0
}
if not g_config.IS_DEPLOYED:
print "Posting a %10.2f minute outage on %s, circuit: %s, asset_id: %s" % (
(utility_document['eventDuration'] / 1000 / 60),
arrow.get(utility_document['timeOfEvent'] / 1000).format("MMMM DD, YYYY @ hh:mm A"),
utility_document['utility']['circuitID'],
utility_document['utility']['transformerID']
)
self.my_local_logger.info('Posting: %s' % json.dumps(utility_document))
self.eon_api_bridge.groomed_outages_post_20(utility_document)
else:
if not g_config.IS_DEPLOYED:
print "Nothing to post for circuit: %s, asset_id: %s" % (
this_circuit,
this_asset
)
except:
self.my_local_logger.error('Posting outage error')
def build_in_memory_cell_db(self, cell):
"""
:param cell: A cell of data that represents the collection of onts, assets and circuits along with the alarms
Creates an in-memory data structure that has this information:
this_cell = {'neighbors': [], # the 6 nearest neighbors
'assets': {}, # The utility assets including their lat and lon and events
'onts': {}, # Verizon's ONTs including their lat and lon and events
'state': '' # A string representing the state of this cell.
This is used for multi threading purposes so that neighboring cells can see
whats going on.
'circuits': {} # This is a set of circuits in this cell. All assets on a circuit
are in the circuits list
'lat_lon': [] # The lat and lon array of the center of the cell
'radius': 1.00 # The radius of the circumscribed cell.
ont_items is a dictionary of {'lat_lon':[],'assets':[],'events':[]}
asset_items is a dictionary of {'lat_lon':[],'onts':[],'events':[]}
:return: none
"""
asset_dict = {'groom_time': cell['groom_time']}
for this_asset in cell['assets']:
asset_dict[this_asset] = SortedDict()
for this_ont in cell['assets'][this_asset]['onts']:
this_distance = lat_lon_distance(cell['assets'][this_asset]['lat_lon'][0],
cell['assets'][this_asset]['lat_lon'][1],
cell['onts'][this_ont]['lat_lon'][0],
cell['onts'][this_ont]['lat_lon'][1])
for this_event in cell['onts'][this_ont]['events'][0]:
event_key = int(this_event / 1000)
if event_key in asset_dict[this_asset]:
asset_dict[this_asset][event_key]['voters'].update({this_distance: this_ont})
else:
voters = SortedDict()
voters.update({this_distance: this_ont})
asset
|
7ed966af4b81c43fa6980a449a421d947ebad60b
|
Python
|
<|begin_of_text|>
# Copyright Qwilt, 2012
#
# The code contained in this file may not be used by any other entities without explicit written permission from Qwilt.
#
# Author: naamas
from a.infra.misc.enum_with_value import EnumWithValue
from a.infra.basic.return_codes import ReturnCodes
from a.infra.misc.init_guard import InitGuard
from a.sys.confd.pyconfdlib.tag_values import TagValues
from a.sys.confd.pyconfdlib.value import Value
from a.sys.confd.pyconfdlib.key_path import KeyPath
from system_defaults_maapi_base_gen import SystemDefaultsMaapiBase
from a.api.yang.modules.tech.common.qwilt_tech_interfaces.tech.interfaces.interface.system_defaults.content.content_maapi_gen import BlinkyContentMaapi
from a.api.yang.modules.tech.common.qwilt_tech_interfaces.tech.interfaces.interface.system_defaults.connectivity_check.connectivity_check_maapi_gen import BlinkyConnectivityCheckMaapi
from a.api.yang.modules.tech.common.qwilt_tech_interfaces.tech.interfaces.interface.system_defaults.management.management_maapi_gen import BlinkyManagementMaapi
from a.api.yang.modules.tech.common.qwilt_tech_interfaces.tech.interfaces.interface.system_defaults.link.link_maapi_gen import BlinkyLinkMaapi
from a.api.yang.modules.tech.common.qwilt_tech_interfaces.tech.interfaces.interface.system_defaults.device.device_maapi_gen import BlinkyDeviceMaapi
class BlinkySystemDefaultsMaapi(SystemDefaultsMaapiBase):
def __init__ (self, logger):
self.myInitGuard = InitGuard()
self._log=logger.createLogger("sys-blinky-oper-example","blinky-maapi-systemDefaults")
self.domain = None
self.contentObj = None
self.connectivityCheckObj = None
self.managementObj = None
self.linkObj = None
self.deviceObj = None
self.configurationDelayRequested = False
self.configurationDelay = None
self.configurationDelaySet = False
self.muteReportingRequested = False
self.muteReporting = None
self.muteReportingSet = False
self.sendGratuitousArpRequested = False
self.sendGratuitousArp = None
self.sendGratuitousArpSet = False
self.shutdownRequested = False
self.shutdown = None
self.shutdownSet = False
self.techModeRequested = False
self.techMode = None
self.techModeSet = False
def init (self, domain):
self.myInitGuard.crashIfInitDone()
for logFunc in self._log('init').debug3Func(): logFunc('called. domain=%s', domain)
self.domain = domain
self.myInitGuard.initDone()
def requestConfigAndOper (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('request-config-and-oper').debug3Func(): logFunc('called, PARAMS')
self.requestConfigurationDelay(True)
self.requestMuteReporting(True)
self.requestSendGratuitousArp(True)
self.requestShutdown(True)
self.requestTechMode(True)
if not self.contentObj:
self.contentObj = self.newContent()
self.contentObj.requestConfigAndOper()
if not self.connectivityCheckObj:
self.connectivityCheckObj = self.newConnectivityCheck()
self.connectivityCheckObj.requestConfigAndOper()
if not self.managementObj:
self.managementObj = self.newManagement()
self.managementObj.requestConfigAndOper()
if not self.linkObj:
self.linkObj = self.newLink()
self.linkObj.requestConfigAndOper()
if not self.deviceObj:
self.deviceObj = self.newDevice()
self.deviceObj.requestConfigAndOper()
def requestConfig (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('request-config').debug3Func(): logFunc('called, PARAMS')
self.requestConfigurationDelay(True)
self.requestMuteReporting(True)
self.requestSendGratuitousArp(True)
self.requestShutdown(True)
self.requestTechMode(True)
if not self.contentObj:
self.contentObj = self.newContent()
self.contentObj.requestConfig()
if not self.connectivityCheckObj:
self.connectivityCheckObj = self.newConnectivityCheck()
self.connectivityCheckObj.requestConfig()
if not self.managementObj:
self.managementObj = self.newManagement()
self.managementObj.requestConfig()
if not self.linkObj:
self.linkObj = self.newLink()
self.linkObj.requestConfig()
if not self.deviceObj:
self.deviceObj = self.newDevice()
self.deviceObj.requestConfig()
def requestOper (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('request-oper').debug3Func(): logFunc('called, PARAMS')
self.requestConfigurationDelay(False)
self.requestMuteReporting(False)
self.requestSendGratuitousArp(False)
self.requestShutdown(False)
self.requestTechMode(False)
if not self.contentObj:
self.contentObj = self.newContent()
self.contentObj.requestOper()
if not self.connectivityCheckObj:
self.connectivityCheckObj = self.newConnectivityCheck()
self.connectivityCheckObj.requestOper()
if not self.managementObj:
self.managementObj = self.newManagement()
self.managementObj.requestOper()
if not self.linkObj:
self.linkObj = self.newLink()
self.linkObj.requestOper()
if not self.deviceObj:
self.deviceObj = self.newDevice()
self.deviceObj.requestOper()
def clearAllRequested (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('clear-all-requested').debug3Func(): logFunc('called, PARAMS')
self.requestConfigurationDelay(False)
self.requestMuteReporting(False)
self.requestSendGratuitousArp(False)
self.requestShutdown(False)
self.requestTechMode(False)
if not self.contentObj:
self.contentObj = self.newContent()
self.contentObj.clearAllRequested()
if not self.connectivityCheckObj:
self.connectivityCheckObj = self.newConnectivityCheck()
self.connectivityCheckObj.clearAllRequested()
if not self.managementObj:
self.managementObj = self.newManagement()
self.managementObj.clearAllRequested()
if not self.linkObj:
self.linkObj = self.newLink()
self.linkObj.clearAllRequested()
if not self.deviceObj:
self.deviceObj = self.newDevice()
self.deviceObj.clearAllRequested()
def clearAllSet (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('clear-all-set').debug3Func(): logFunc('called, PARAMS')
self.setConfigurationDelay(None)
self.configurationDelaySet = False
self.setMuteReporting(None)
self.muteReportingSet = False
self.setSendGratuitousArp(None)
self.sendGratuitousArpSet = False
self.setShutdown(None)
self.shutdownSet = False
self.setTechMode(None)
self.techModeSet = False
if self.contentObj:
self.contentObj.clearAllSet()
if self.connectivityCheckObj:
self.connectivityCheckObj.clearAllSet()
if self.managementObj:
self.managementObj.clearAllSet()
if self.linkObj:
self.linkObj.clearAllSet()
if self.deviceObj:
self.deviceObj.clearAllSet()
def write (self
, interface
, trxContext=None
):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('write').debug3Func(): logFunc('called, PARAMS')
return self._internalWrite(interface, trxContext)
def read (self
, interface
, trxContext=None):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('read').debug3Func(): logFunc('called, PARAMS')
return self._internalRead(interface,
False,
trxContext)
def readAllOrFail (self
, interface
, trxContext=None):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('read-all-or-fail').debug3Func(): logFunc('called, PARAMS')
return self._internalRead(interface,
True,
trxContext)
def newContent (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('new-content').debug3Func(): logFunc('called.')
content = BlinkyContentMaapi(self._log)
content.init(self.domain)
return content
def setContentObj (self, obj):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('set-content').debug3Func(): logFunc('called. obj=%s', obj)
self.contentObj = obj
def getContentObj (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('get-content').debug3Func(): logFunc('called. self.contentObj=%s', self.contentObj)
return self.contentObj
def hasContent (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('has-content').debug3Func(): logFunc('called. self.contentObj=%s', self.contentObj)
if self.contentObj:
return True
return False
def newConnectivityCheck (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('new-connectivitycheck').debug3Func(): logFunc('called.')
connectivityCheck = BlinkyConnectivityCheckMaapi(self._log)
connectivityCheck.init(self.domain)
return connectivityCheck
def setConnectivityCheckObj (self, obj):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('set-connectivitycheck').debug3Func(): logFunc('called. obj=%s', obj)
self.connectivityCheckObj = obj
def getConnectivityCheckObj (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('get-connectivitycheck').debug3Func(): logFunc('called. self.connectivityCheckObj=%s', self.connectivityCheckObj)
return self.connectivityCheckObj
def hasConnectivityCheck (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('has-connectivitycheck').debug3Func(): logFunc('called. self.connectivityCheckObj=%s', self.connectivityCheckObj)
if self.connectivityCheckObj:
return True
return False
def newManagement (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('new-management').debug3Func(): logFunc('called.')
management = BlinkyManagementMaapi(self._log)
management.init(self.domain)
return management
def setManagementObj (self, obj):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('set-management').debug3Func(): logFunc('called. obj=%s', obj)
self.managementObj = obj
def getManagementObj (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('get-management').debug3Func(): logFunc('called. self.managementObj=%s', self.managementObj)
return self.managementObj
def hasManagement (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('has-management').debug3Func(): logFunc('called. self.managementObj=%s', self.managementObj)
if self.managementObj:
return True
return False
def newLink (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('new-link').debug3Func(): logFunc('called.')
link = BlinkyLinkMaapi(self._log)
link.init(self.domain)
return link
def setLinkObj (self, obj):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('set-link').debug3Func(): logFunc('called. obj=%s', obj)
self.linkObj = obj
def getLinkObj (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('get-link').debug3Func(): logFunc('called. self.linkObj=%s', self.linkObj)
return self.linkObj
def hasLink (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('has-link').debug3Func(): logFunc('called. self.linkObj=%s', self.linkObj)
if self.linkObj:
return True
return False
def newDevice (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('new-device').debug3Func(): logFunc('called.')
device = BlinkyDeviceMaapi(self._log)
device.init(self.domain)
return device
def setDeviceObj (self, obj):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('set-device').debug3Func(): logFunc('called. obj=%s', obj)
self.deviceObj = obj
def getDeviceObj (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('get-device').debug3Func(): logFunc('called. self.deviceObj=%s', self.deviceObj)
return self.deviceObj
def hasDevice (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('has-device').debug3Func(): logFunc('called. self.deviceObj=%s', self.deviceObj)
if self.deviceObj:
return True
return False
def requestConfigurationDelay (self, requested):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('request-configurationdelay').debug3Func(): logFunc('called. requested=%s', requested)
self.configurationDelayRequested = requested
self.configurationDelaySet = False
def isConfigurationDelayRequested (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('is-configurationdelay-requested').debug3Func(): logFunc('called. requested=%s', self.configurationDelayRequested)
return self.configurationDelayRequested
def getConfigurationDelay (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('get-configurationdelay').debug3Func(): logFunc('called. self.configurationDelaySet=%s, self.configurationDelay=%s', self.configurationDelaySet, self.configurationDelay)
if self.configurationDelaySet:
return self.configurationDelay
return None
def hasConfigurationDelay (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('has-configurationdelay').debug3Func(): logFunc('called. self.configurationDelaySet=%s, self.configurationDelay=%s', self.configurationDelaySet, self.configurationDelay)
if self.configurationDelaySet:
return True
return False
def setConfigurationDelay (self, configurationDelay):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('set-configurationdelay').debug3Func(): logFunc('called. configurationDelay=%s, old=%s', configurationDelay, self.configurationDelay)
self.configurationDelaySet = True
self.configurationDelay = configurationDelay
def requestMuteReporting (self, requested):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('request-mutereporting').debug3Func(): logFunc('called. requested=%s', requested)
self.muteReportingRequested = requested
self.muteReportingSet = False
def isMuteReportingRequested (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('is-mutereporting-requested').debug3Func(): logFunc('called. requested=%s', self.muteReportingRequested)
return self.muteReportingRequested
def getMuteReporting (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('get-mutereporting').debug3Func(): logFunc('called. self.muteReportingSet=%s, self.muteReporting=%s', self.muteReportingSet, self.muteReporting)
if self.muteReportingSet:
return self.muteReporting
return None
def hasMuteReporting (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('has-mutereporting').debug3Func(): logFunc('called. self.muteReportingSet=%s, self.muteReporting=%s', self.muteReportingSet, self.muteReporting)
if self.muteReportingSet:
return True
return False
def setMuteReporting (self, muteReporting):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('set-mutereporting').debug3Func(): logFunc('called. muteReporting=%s, old=%s', muteReporting, self.muteReporting)
self.muteReportingSet = True
self.muteReporting = muteReporting
def requestSendGratuitousArp (self, requested):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('request-sendgratuitousarp').debug3Func(): logFunc('called. requested=%s', requested)
self.sendGratuitousArpRequested = requested
self.sendGratuitousArpSet = False
def isSendGratuitousArpRequested (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('is-sendgratuitousarp-requested').debug3Func(): logFunc('called. requested=%s', self.sendGratuitousArpRequested)
return self.sendGratuitousArpRequested
def getSendGratuitousArp (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('get-sendgratuitousarp').debug3Func(): logFunc('called. self.sendGratuitousArpSet=%s, self.sendGratuitousArp=%s', self.sendGratuitousArpSet, self.sendGratuitousArp)
if self.sendGratuitousArpSet:
return self.sendGratuitousArp
return None
def hasSendGratuitousArp (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('has-sendgratuitousarp').debug3Func(): logFunc('called. self.sendGratuitousArpSet=%s, self.sendGratuitousArp=%s', self.sendGratuitousArpSet, self.sendGratuitousArp)
if self.sendGratuitousArpSet:
return True
return False
def setSendGratuitousArp (self, sendGratuitousArp):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('set-sendgratuitousarp').debug3Func(): logFunc('called. sendGratuitousArp=%s, old=%s', sendGratuitousArp, self.sendGratuitousArp)
self.sendGratuitousArpSet = True
self.sendGratuitousArp = sendGratuitousArp
def requestShutdown (self, requested):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('request-shutdown').debug3Func(): logFunc('called. requested=%s', requested)
self.shutdownRequested = requested
self.shutdownSet = False
def isShutdownRequested (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('is-shutdown-requested').debug3Func(): logFunc('called. requested=%s', self.shutdownRequested)
return self.shutdownRequested
def getShutdown (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('get-shutdown').debug3Func(): logFunc('called. self.shutdownSet=%s, self.shutdown=%s', self.shutdownSet, self.shutdown)
if self.shutdownSet:
return self.shutdown
return None
def hasShutdown (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('has-shutdown').debug3Func(): logFunc('called. self.shutdownSet=%s, self.shutdown=%s', self.shutdownSet, self.shutdown)
if self.shutdownSet:
return True
return False
def setShutdown (self, shutdown):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('set-shutdown').debug3Func(): logFunc('called. shutdown=%s, old=%s', shutdown, self.shutdown)
self.shutdownSet = True
self.shutdown = shutdown
def requestTechMode (self, requested):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('request-techmode').debug3Func(): logFunc('called. requested=%s', requested)
self.techModeRequested = requested
self.techModeSet = False
def isTechModeRequested (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('is-techmode-requested').debug3Func(): logFunc('called. requested=%s', self.techModeRequested)
return self.techModeRequested
def getTechMode (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('get-techmode').debug3Func(): logFunc('called. self.techModeSet=%s, self.techMode=%s', self.techModeSet, self.techMode)
if self.techModeSet:
return self.techMode
return None
def hasTechMode (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('has-techmode').debug3Func(): logFunc('called. self.techModeSet=%s, self.techMode=%s', self.techModeSet, self.techMode)
if self.techModeSet:
return True
return False
def setTechMode (self, techMode):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('set-techmode').debug3Func(): logFunc('called. techMode=%s, old=%s', techMode, self.techMode)
self.techModeSet = True
self.techMode = techMode
def _clearAllReadData (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('clear-all-read-data').debug3Func(): logFunc('called')
if self.contentObj:
self.contentObj._clearAllReadData()
if self.connectivityCheckObj:
self.connectivityCheckObj._clearAllReadData()
if self.managementObj:
self.managementObj._clearAllReadData()
if self.linkObj:
self.linkObj._clearAllReadData()
if self.deviceObj:
self.deviceObj._clearAllReadData()
self.configurationDelay = 0
self.configurationDelaySet = False
self.muteReporting = 0
self.muteReportingSet = False
self.sendGratuitousArp = 0
self.sendGratuitousArpSet = False
self.shutdown = 0
self.shutdownSet = False
self.techMode = 0
self.techModeSet = False
def _getSelfKeyPath (self, interface
, junkForTemplate):
for logFunc in self._log('get-self-key-path').debug3Func(): logFunc('called. PARAMS, junkForTemplate=%s', junkForTemplate)
keyPath = KeyPath()
xmlVal = Value()
xmlVal.setXmlTag(("system-defaults", "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces", "qt-if"))
keyPath.addKeyPathPrefix(xmlVal)
ancestorVal = Value()
ancestorVal.setString(interface);
keyPath.addKeyPathPrefix(ancestorVal)
xmlVal = Value()
xmlVal.setXmlTag(("interface", "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces", "qt-if"))
keyPath.addKeyPathPrefix(xmlVal)
xmlVal = Value()
xmlVal.setXmlTag(("interfaces", "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces", "qt-if"))
keyPath.addKeyPathPrefix(xmlVal)
xmlVal = Value()
xmlVal.setXmlTag(("tech", "http://qwilt.com/ns/yang/device/tech/qwilt-tech", "qt"))
keyPath.addKeyPathPrefix(xmlVal)
for logFunc in self._log('get-self-key-path-done').debug3Func(): logFunc('done. keyPath=%s. PARAMS', keyPath)
return keyPath
def _internalWrite (self,
interface,
trxContext):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('internal-write').debug3Func(): logFunc('called. PARAMS')
tagValueList = TagValues()
res = self._fillWriteTagValues(tagValueList)
if res!= ReturnCodes.kOk:
for logFunc in self._log('write-fill-write-tag-value-failed').errorFunc(): logFunc('_fillWriteTagValues() failed. PARAMS')
return ReturnCodes.kGeneralError
itemsToDelete = []
res = self._collectItemsToDelete(interface,
itemsToDelete)
if res!= ReturnCodes.kOk:
for logFunc in self._log('write-collect-items-to-delete-failed').errorFunc(): logFunc('_collectItemsToDelete() failed. PARAMS')
return ReturnCodes.kGeneralError
keyPath = self._getSelfKeyPath(interface,
None)
res = self.domain.writeMaapi(tagValueList, keyPath, trxContext, itemsToDelete)
if res!= ReturnCodes.kOk:
for logFunc in self._log('write-domain-failed').errorFunc(): logFunc('domain.writeMaapi() failed. PARAMS')
return ReturnCodes.kGeneralError
for logFunc in self._log('internal-write-done').debug3Func(): logFunc('done. PARAMS')
return ReturnCodes.kOk
def _internalRead (self,
interface,
readAllOrFail,
trxContext):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('internal-read').debug3Func(): logFunc('called. PARAMS, readAllOrFail=%s', readAllOrFail)
if readAllOrFail:
self._clearAllReadData()
tagValueList = TagValues()
res = self._fillReadTagValues(tagValueList)
if res!= ReturnCodes.kOk:
for logFunc in self._log('read-fill-read-tag-value-failed').errorFunc(): logFunc('_fillReadTagValues() failed. PARAMS')
return ReturnCodes.kGeneralError
keyPath = self._getSelfKeyPath(interface,
None)
res = self.domain.readMaapi(tagValueList, keyPath, trxContext)
if res!= ReturnCodes.kOk:
for logFunc in self._log('read-domain-failed').errorFunc(): logFunc('domain.readMaapi() failed. PARAMS')
return ReturnCodes.kGeneralError
res = self._readTagValues(tagValueList, readAllOrFail)
if res!= ReturnCodes.kOk:
for logFunc in self._log('read-read-tag-values-failed').errorFunc(): logFunc('_readTagValues() failed. PARAMS')
return ReturnCodes.kGeneralError
for logFunc in self._log('internal-read-done').debug3Func(): logFunc('done. PARAMS, readAllOrFail=%s', readAllOrFail)
return ReturnCodes.kOk
def _collectItemsToDelete (self,
interface,
itemsToDelete):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('collect-items-to-delete').debug3Func(): logFunc('called: itemsToDelete=%s. PARAMS', itemsToDelete)
if self.contentObj:
res = self.contentObj._collectItemsToDelete(interface,
itemsToDelete)
if res!= ReturnCodes.kOk:
for logFunc in self._log('collect-items-to-delete-content-failed').errorFunc(): logFunc('contentObj._collectItemsToDelete() failed. PARAMS')
return ReturnCodes.kGeneralError
if self.connectivityCheckObj:
res = self.connectivityCheckObj._collectItemsToDelete(interface,
itemsToDelete)
if res!= ReturnCodes.kOk:
for logFunc in self._log('collect-items-to-delete-connectivity-check-failed').errorFunc(): logFunc('connectivityCheckObj._collectItemsToDelete() failed. PARAMS')
return ReturnCodes.kGeneralError
if self.managementObj:
res = self.managementObj._collectItemsToDelete(interface,
itemsToDelete)
if res!= ReturnCodes.kOk:
for logFunc in self._log('collect-items-to-delete-management-failed').errorFunc(): logFunc('managementObj._collectItemsToDelete() failed. PARAMS')
return ReturnCodes.kGeneralError
if self.linkObj:
res = self.linkObj._collectItemsToDelete(interface,
itemsToDelete)
if res!= ReturnCodes.kOk:
for logFunc in self._log('collect-items-to-delete-link-failed').errorFunc(): logFunc('linkObj._collectItemsToDelete() failed. PARAMS')
return ReturnCodes.kGeneralError
if self.deviceObj:
res = self.deviceObj._collectItemsToDelete(interface,
itemsToDelete)
if res!= ReturnCodes.kOk:
for logFunc in self._log('collect-items-to-delete-device-failed').errorFunc(): logFunc('deviceObj._collectItemsToDelete() failed. PARAMS')
return ReturnCodes.kGeneralError
for logFunc in self._log('collect-items-to-delete-done').debug3Func(): logFunc('done: itemsToDelete=%s. PARAMS', itemsToDelete)
return ReturnCodes.kOk
def _fillWriteTagValues (self, tagValueList):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('fill-write-tag-values').debug3Func(): logFunc('called: tagValueList=%s', tagValueList)
if self.hasConfigurationDelay():
valConfigurationDelay = Value()
if self.configurationDelay is not None:
valConfigurationDelay.setUint64(self.configurationDelay)
else:
valConfigurationDelay.setEmpty()
tagValueList.push(("configuration-delay", "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces"), valConfigurationDelay)
if self.hasMuteReporting():
valMuteReporting = Value()
if self.muteReporting is not None:
valMuteReporting.setBool(self.muteReporting)
else:
valMuteReporting.setEmpty()
tagValueList.push(("mute-reporting", "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces"), valMuteReporting)
if self.hasSendGratuitousArp():
valSendGratuitousArp = Value()
if self.sendGratuitousArp is not None:
valSendGratuitousArp.setBool(self.sendGratuitousArp)
else:
valSendGratuitousArp.setEmpty()
tagValueList.push(("send-gratuitous-arp", "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces"), valSendGratuitousArp)
if self.hasShutdown():
valShutdown = Value()
if self.shutdown is not None:
valShutdown.setBool(self.shutdown)
else:
valShutdown.setEmpty()
tagValueList.push(("shutdown", "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces"), valShutdown)
if self.hasTechMode():
valTechMode = Value()
if self.techMode is not None:
valTechMode.setBool(self.techMode)
else:
valTechMode.setEmpty()
tagValueList.push(("tech-mode", "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces"), valTechMode)
if self.contentObj:
valBegin = Value()
(tag, ns, prefix) = ("content", "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces", "qt-if")
valBegin.setXmlBegin((tag, ns, prefix))
tagValueList.push((tag, ns), valBegin)
tagValueListLen = tagValueList.getLen()
res = self.contentObj._fillWriteTagValues(tagValueList)
if res!= ReturnCodes.kOk:
for logFunc in self._log('fill-write-tag-values-content-failed').errorFunc(): logFunc('contentObj._fillWriteTagValues() failed. PARAMS')
return ReturnCodes.kGeneralError
if tagValueList.getLen() == tagValueListLen:
# descendant didn't add anything, no need to read it.
tagValueList.pop()
else:
valEnd = Value()
valEnd.setXmlEnd((tag, ns, prefix))
tagValueList.push((tag, ns), valEnd)
if self.connectivityCheckObj:
valBegin = Value()
(tag, ns, prefix) = ("connectivity-check", "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces", "qt-if")
valBegin.setXmlBegin((tag, ns, prefix))
tagValueList.push((tag, ns), valBegin)
tagValueListLen = tagValueList.getLen()
res = self.connectivityCheckObj._fillWriteTagValues(tagValueList)
if res!= ReturnCodes.kOk:
for logFunc in self._log('fill-write-tag-values-connectivity-check-failed').errorFunc(): logFunc('connectivityCheckObj._fillWriteTagValues() failed. PARAMS')
return ReturnCodes.kGeneralError
if tagValueList.getLen() == tagValueListLen:
# descendant didn't add anything, no need to read it.
tagValueList.pop()
else:
valEnd = Value()
valEnd.setXmlEnd((tag, ns, prefix))
tagValueList.push((tag, ns), valEnd)
if self.managementObj:
valBegin = Value()
(tag, ns, prefix) = ("management", "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces", "qt-if")
valBegin.setXmlBegin((tag, ns, prefix))
tagValueList.push((tag, ns), valBegin)
tagValueListLen = tagValueList.getLen()
res = self.managementObj._fillWriteTagValues(tagValueList)
if res!= ReturnCodes.kOk:
for logFunc in self._log('fill-write-tag-values-management-failed').errorFunc(): logFunc('managementObj._fillWriteTagValues() failed. PARAMS')
return ReturnCodes.kGeneralError
if tagValueList.getLen() == tagValueListLen:
# descendant didn't add anything, no need to read it.
tagValueList.pop()
else:
valEnd = Value()
valEnd.setXmlEnd((tag, ns, prefix))
tagValueList.push((tag, ns), valEnd)
if self.linkObj:
valBegin = Value()
(tag, ns, prefix) = ("link", "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces", "qt-if")
valBegin.setXmlBegin((tag, ns, prefix))
tagValueList.push((tag, ns), valBegin)
tagValueListLen = tagValueList.getLen()
res = self.linkObj._fillWriteTagValues(tagValueList)
if res!= ReturnCodes.kOk:
for logFunc in self._log('fill-write-tag-values-link-failed').errorFunc(): logFunc('linkObj._fillWriteTagValues() failed. PARAMS')
return ReturnCodes.kGeneralError
if tagValueList.getLen() == tagValueListLen:
# descendant didn't add anything, no need to read it.
tagValueList.pop()
else:
valEnd = Value()
valEnd.setXmlEnd((tag, ns, prefix))
tagValueList.push((tag, ns), valEnd)
if self.deviceObj:
valBegin = Value()
(tag, ns, prefix) = ("device", "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces", "qt-if")
valBegin.setXmlBegin((tag, ns, prefix))
tagValueList.push((tag, ns), valBegin)
tagValueListLen = tagValueList.getLen()
res = self.deviceObj._fillWriteTagValues(tagValueList)
if res!= ReturnCodes.kOk:
for logFunc in self._log('fill-write-tag-values-device-failed').errorFunc(): logFunc('deviceObj._fillWriteTagValues() failed. PARAMS')
return ReturnCodes.kGeneralError
if tagValueList.getLen() == tagValueListLen:
# descendant didn't add anything, no need to read it.
tagValueList.pop()
else:
valEnd = Value()
valEnd.setXmlEnd((tag, ns, prefix))
tagValueList.push((tag, ns), valEnd)
return ReturnCodes.kOk
def _fillReadTagValues (self, tagValueList):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('fill-read-tag-values').debug3Func(): logFunc('called: tagValueList=%s', tagValueList)
if self.isConfigurationDelayRequested():
valConfigurationDelay = Value()
valConfigurationDelay.setEmpty()
tagValueList.push(("configuration-delay", "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces"), valConfigurationDelay)
if self.isMuteReportingRequested():
valMuteReporting = Value()
valMuteReporting.setEmpty()
tagValueList.push(("mute-reporting", "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces"), valMuteReporting)
if self.isSendGratuitousArpRequested():
valSendGratuitousArp = Value()
valSendGratuitousArp.setEmpty()
tagValueList.push(("send-gratuitous-arp", "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces"), valSendGratuitousArp)
if self.isShutdownRequested():
valShutdown = Value()
valShutdown.setEmpty()
tagValueList.push(("shutdown", "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces"), valShutdown)
if self.isTechModeRequested():
valTechMode = Value()
valTechMode.setEmpty()
tagValueList.push(("tech-mode", "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces"), valTechMode)
if self.contentObj:
valBegin = Value()
(tag, ns, prefix) = ("content", "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces", "qt-if")
valBegin.setXmlBegin((tag, ns, prefix))
tagValueList.push((tag, ns), valBegin)
tagValueListLen = tagValueList.getLen()
res = self.contentObj._fillReadTagValues(tagValueList)
if res!= ReturnCodes.kOk:
for logFunc in self._log('fill-read-tag-values-content-failed').errorFunc(): logFunc('contentObj._fillReadTagValues() failed. PARAMS')
return ReturnCodes.kGeneralError
if tagValueList.getLen() == tagValueListLen:
# descendant didn't add anything, no need to read it.
tagValueList.pop()
else:
valEnd = Value()
valEnd.setXmlEnd((tag, ns, prefix))
tagValueList.push((tag, ns), valEnd)
if self.connectivityCheckObj:
valBegin = Value()
(tag, ns, prefix) = ("connectivity-check", "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces", "qt-if")
valBegin.setXmlBegin((tag, ns, prefix))
tagValueList.push((tag, ns), valBegin)
tagValueListLen = tagValueList.getLen()
res = self.connectivityCheckObj._fillReadTagValues(tagValueList)
if res!= ReturnCodes.kOk:
for logFunc in self._log('fill-read-tag-values-connectivity-check-failed').errorFunc(): logFunc('connectivityCheckObj._fillReadTagValues() failed. PARAMS')
return ReturnCodes.kGeneralError
if tagValueList.getLen() == tagValueListLen:
# descendant didn't add anything, no need to read it.
tagValueList.pop()
else:
valEnd = Value()
valEnd.setXmlEnd((tag, ns, prefix))
tagValueList.push((tag, ns), valEnd)
if self.managementObj:
valBegin = Value()
(tag, ns, prefix) = ("management", "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces", "qt-if")
valBegin.setXmlBegin((tag, ns, prefix))
tagValueList.push((tag, ns), valBegin)
tagValueListLen = tagValueList.getLen()
res = self.managementObj._fillReadTagValues(tagValueList)
if res!= ReturnCodes.kOk:
for logFunc in self._log('fill-read-tag-values-management-failed').errorFunc(): logFunc('managementObj._fillReadTagValues() failed. PARAMS')
return ReturnCodes.kGeneralError
if tagValueList.getLen() == tagValueListLen:
# descendant didn't add anything, no need to read it.
tagValueList.pop()
else:
valEnd = Value()
valEnd.setXmlEnd((tag, ns, prefix))
tagValueList.push((tag, ns), valEnd)
if self.linkObj:
valBegin = Value()
(tag, ns, prefix) = ("link", "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces", "qt-if")
valBegin.setXmlBegin((tag, ns, prefix))
tagValueList.push((tag, ns), valBegin)
tagValueListLen = tagValueList.getLen()
res = self.linkObj._fillReadTagValues(tagValueList)
if res!= ReturnCodes.kOk:
for logFunc in self._log('fill-read-tag-values-link-failed').errorFunc(): logFunc('linkObj._fillReadTagValues() failed. PARAMS')
return ReturnCodes.kGeneralError
if tagValueList.getLen() == tagValueListLen:
# descendant didn't add anything, no need to read it.
tagValueList.pop()
else:
valEnd = Value()
valEnd.setXmlEnd((tag, ns, prefix))
tagValueList.push((tag, ns), valEnd)
if self.deviceObj:
valBegin = Value()
(tag, ns, prefix) = ("device", "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces", "qt-if")
valBegin.setXmlBegin((tag, ns, prefix))
tagValueList.push((tag, ns), valBegin)
tagValueListLen = tagValueList.getLen()
res = self.deviceObj._fillReadTagValues(tagValueList)
if res!= ReturnCodes.kOk:
for logFunc in self._log('fill-read-tag-values-device-failed').errorFunc(): logFunc('deviceObj._fillReadTagValues() failed. PARAMS')
return ReturnCodes.kGeneralError
if tagValueList.getLen() == tagValueListLen:
# descendant didn't add anything, no need to read it.
tagValueList.pop()
else:
valEnd = Value()
valEnd.setXmlEnd((tag, ns, prefix))
tagValueList.push((tag, ns), valEnd)
return ReturnCodes.kOk
def _readTagValues (self, tagValueList, readAllOrFail):
__pychecker__ ='maxlines=300'
__pychecker__ ='maxreturns=30'
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('read-tag-values').debug3Func(): logFunc('called. readAllOrFail=%s, tagValueList=%s', readAllOrFail, tagValueList)
res = ReturnCodes.kOk
for logFunc in self._log('read-tag-values-leaves').debug3Func(): logFunc('reading
|
9e83190ba101ff865ded4f3fba7ad806dcc6226b
|
Python
|
<|begin_of_text|>6691161,兵庫県,神戸市北区,道場町生野
6600000,兵庫県,尼崎市,
6610981,兵庫県,尼崎市,猪名寺
6600064,兵庫県,尼崎市,稲葉荘
6600055,兵庫県,尼崎市,稲葉元町
6600812,兵庫県,尼崎市,今福
6600096,兵庫県,尼崎市,扇町
6600076,兵庫県,尼崎市,大島
6600072,兵庫県,尼崎市,大庄川田町
6600075,兵庫県,尼崎市,大庄中通
6600077,兵庫県,尼崎市,大庄西町
6600063,兵庫県,尼崎市,大庄北
6600842,兵庫県,尼崎市,大高洲町
6610023,兵庫県,尼崎市,大西町
6600095,兵庫県,尼崎市,大浜町
6610022,兵庫県,尼崎市,尾浜町
6600862,兵庫県,尼崎市,開明町
6600821,兵庫県,尼崎市,梶ケ島
6610979,兵庫県,尼崎市,上坂部
6610014,兵庫県,尼崎市,上ノ島町
6610971,兵庫県,尼崎市,瓦宮
6610964,兵庫県,尼崎市,神崎町
6600884,兵庫県,尼崎市,神田中通
6600885,兵庫県,尼崎市,神田南通
6600883,兵庫県,尼崎市,神田北通
6600826,兵庫県,尼崎市,北城内
6600878,兵庫県,尼崎市,北竹谷町
6600804,兵庫県,尼崎市,北大物町
6600834,兵庫県,尼崎市,北初島町
6600806,兵庫県,尼崎市,金楽寺町
6600813,兵庫県,尼崎市,杭瀬寺島
6600814,兵庫県,尼崎市,杭瀬本町
6600822,兵庫県,尼崎市,杭瀬南新町
6600815,兵庫県,尼崎市,杭瀬北新町
6610977,兵庫県,尼崎市,久々知
6610978,兵庫県,尼崎市,久々知西町
6610983,兵庫県,尼崎市,口田中
6610013,兵庫県,尼崎市,栗山町
6600873,兵庫県,尼崎市,玄番南之町
6600872,兵庫県,尼崎市,玄番北之町
6610982,兵庫県,尼崎市,食満
6600074,兵庫県,尼崎市,琴浦町
6610972,兵庫県,尼崎市,小中島
6610024,兵庫県,尼崎市,三反田町
6600808,兵庫県,尼崎市,潮江
6610976,兵庫県,尼崎市,潮江
6600866,兵庫県,尼崎市,汐町
6610952,兵庫県,尼崎市,椎堂
6610975,兵庫県,尼崎市,下坂部
6600811,兵庫県,尼崎市,常光寺
6600881,兵庫県,尼崎市,昭和通
6600882,兵庫県,尼崎市,昭和南通
6600082,兵庫県,尼崎市,水明町
6600094,兵庫県,尼崎市,末広町
6600071,兵庫県,尼崎市,崇徳院
6610973,兵庫県,尼崎市,善法寺町
6600823,兵庫県,尼崎市,大物町
6610963,兵庫県,尼崎市,高田町
6600876,兵庫県,尼崎市,竹谷町
6610025,兵庫県,尼崎市,立花町
6600871,兵庫県,尼崎市,建家町
6610951,兵庫県,尼崎市,田能
6610002,兵庫県,尼崎市,塚口町
6610001,兵庫県,尼崎市,塚口本町
6600858,兵庫県,尼崎市,築地
6610965,兵庫県,尼崎市,次屋
6610046,兵庫県,尼崎市,常松
6610042,兵庫県,尼崎市,常吉
6600092,兵庫県,尼崎市,鶴町
6600867,兵庫県,尼崎市,寺町
6600083,兵庫県,尼崎市,道意町
6610961,兵庫県,尼崎市,戸ノ内町
6610003,兵庫県,尼崎市,富松町
6600851,兵庫県,尼崎市,中在家町
6600091,兵庫県,尼崎市,中浜町
6600802,兵庫県,尼崎市,長洲中通
6600801,兵庫県,尼崎市,長洲東通
6600807,兵庫県,尼崎市,長洲西通
6600803,兵庫県,尼崎市,長洲本通
6600073,兵庫県,尼崎市,菜切山町
6610974,兵庫県,尼崎市,若王寺
6600052,兵庫県,尼崎市,七松町
6600093,兵庫県,尼崎市,西海岸町
6610966,兵庫県,尼崎市,西川
6610047,兵庫県,尼崎市,西昆陽
6600865,兵庫県,尼崎市,西桜木町
6600845,兵庫県,尼崎市,西高洲町
6600054,兵庫県,尼崎市,西立花町
6600827,兵庫県,尼崎市,西大物町
6600805,兵庫県,尼崎市,西長洲町
6600893,兵庫県,尼崎市,西難波町
6600874,兵庫県,尼崎市,西本町
6600863,兵庫県,尼崎市,西本町北通
6600837,兵庫県,尼崎市,西松島町
6600868,兵庫県,尼崎市,西御園町
6600857,兵庫県,尼崎市,西向島町
6610962,兵庫県,尼崎市,額田町
6610967,兵庫県,尼崎市,浜
6600062,兵庫県,尼崎市,浜田町
6600843,兵庫県,尼崎市,東海岸町
6600864,兵庫県,尼崎市,東桜木町
6610953,兵庫県,尼崎市,東園田町
6600841,兵庫県,尼崎市,東高洲町
6600828,兵庫県,尼崎市,東大物町
6610011,兵庫県,尼崎市,東塚口町
6600051,兵庫県,尼崎市,東七松町
6600892,兵庫県,尼崎市,東難波町
6600832,兵庫県,尼崎市,東初島町
6600844,兵庫県,尼崎市,東浜町
6600824,兵庫県,尼崎市,東本町
6600831,兵庫県,尼崎市,東松島町
6600835,兵庫県,尼崎市,東向島東之町
6600856,兵庫県,尼崎市,東向島西之町
6600891,兵庫県,尼崎市,扶桑町
6600846,兵庫県,尼崎市,船出
6600087,兵庫県,尼崎市,平左衛門町
6600086,兵庫県,尼崎市,丸島町
6610026,兵庫県,尼崎市,水堂町
6610984,兵庫県,尼崎市,御園
6600861,兵庫県,尼崎市,御園町
6610985,兵庫県,尼崎市,南清水
6600825,兵庫県,尼崎市,南城内
6600875,兵庫県,尼崎市,南竹谷町
6610012,兵庫県,尼崎市,南塚口町
6600053,兵庫県,尼崎市,南七松町
6600833,兵庫県,尼崎市,南初島町
6610033,兵庫県,尼崎市,南武庫之荘
6600877,兵庫県,尼崎市,宮内町
6600084,兵庫県,尼崎市,武庫川町
6610044,兵庫県,尼崎市,武庫町
6610041,兵庫県,尼崎市,武庫の里
6610035,兵庫県,尼崎市,武庫之荘
6610031,兵庫県,尼崎市,武庫之荘本町
6610032,兵庫県,尼崎市,武庫之荘東
6610034,兵庫県,尼崎市,武庫之荘西
6610043,兵庫県,尼崎市,武庫元町
6610045,兵庫県,尼崎市,武庫豊町
6610021,兵庫県,尼崎市,名神町
6600085,兵庫県,尼崎市,元浜町
6610970,兵庫県,尼崎市,弥生ケ丘町
6600061,兵庫県,尼崎市,蓬川荘園
6600081,兵庫県,尼崎市,蓬川町
6620000,兵庫県,西宮市,
6620063,兵庫県,西宮市,相生町
6620862,兵庫県,西宮市,青木町
6691122,兵庫県,西宮市,青葉台
6638186,兵庫県,西宮市,上鳴尾町
6620925,兵庫県,西宮市,朝凪町
6620842,兵庫県,西宮市,芦原町
6620871,兵庫県,西宮市,愛宕山
6620946,兵庫県,西宮市,荒戎町
6638025,兵庫県,西宮市,荒木町
6620911,兵庫県,西宮市,池田町
6638137,兵庫県,西宮市,池開町
6620928,兵庫県,西宮市,石在町
6620074,兵庫県,西宮市,石刎町
6620932,兵庫県,西宮市,泉町
6620873,兵庫県,西宮市,一ケ谷町
6620975,兵庫県,西宮市,市庭町
6638002,兵庫県,西宮市,一里山町
6620972,兵庫県,西宮市,今在家町
6638214,兵庫県,西宮市,今津曙町
6638213,兵庫県,西宮市,今津上野町
6638221,兵庫県,西宮市,今津大東町
6638222,兵庫県,西宮市,今津久寿川町
6638229,兵庫県,西宮市,今津社前町
6638223,兵庫県,西宮市,今津巽町
6638227,兵庫県,西宮市,今津出在家町
6638225,兵庫県,西宮市,今津西浜町
6638212,兵庫県,西宮市,今津野田町
6638228,兵庫県,西宮市,今津二葉町
6638224,兵庫県,西宮市,今津真砂町
6638215,兵庫県,西宮市,今津水波町
6638226,兵庫県,西宮市,今津港町
6638211,兵庫県,西宮市,今津山中町
6620886,兵庫県,西宮市,上ケ原山田町
6620885,兵庫県,西宮市,上ケ原山手町
6620891,兵庫県,西宮市,上ケ原一番町
6620892,兵庫県,西宮市,上ケ原二番町
6620893,兵庫県,西宮市,上ケ原三番町
6620894,兵庫県,西宮市,上ケ原四番町
6620895,兵庫県,西宮市,上ケ原五番町
6620896,兵庫県,西宮市,上ケ原六番町
6620881,兵庫県,西宮市,上ケ原七番町
6620882,兵庫県,西宮市,上ケ原八番町
6620883,兵庫県,西宮市,上ケ原九番町
6620884,兵庫県,西宮市,上ケ原十番町
6638134,兵庫県,西宮市,上田中町
6638133,兵庫県,西宮市,上田東町
6638135,兵庫県,西宮市,上田西町
6620855,兵庫県,西宮市,江上町
6638143,兵庫県,西宮市,枝川町
6620085,兵庫県,西宮市,老松町
6620036,兵庫県,西宮市,大井手町
6638017,兵庫県,西宮市,大島町
6620054,兵庫県,西宮市,大谷町
6620836,兵庫県,西宮市,大畑町
6620957,兵庫県,西宮市,大浜町
6638023,兵庫県,西宮市,大森町
6638106,兵庫県,西宮市,大屋町
6620827,兵庫県,西宮市,岡田山
6620022,兵庫県,西宮市,奥畑
6620961,兵庫県,西宮市,御茶家所町
6638182,兵庫県,西宮市,学文殿町
6620977,兵庫県,西宮市,神楽町
6638136,兵庫県,西宮市,笠屋町
6620052,兵庫県,西宮市,霞町
6620001,兵庫県,西宮市,甲山町
6638003,兵庫県,西宮市,上大市
6620865,兵庫県,西宮市,神垣町
6638114,兵庫県,西宮市,上甲子園
6620813,兵庫県,西宮市,上甲東園
6620027,兵庫県,西宮市,神園町
6638021,兵庫県,西宮市,上之町
6620954,兵庫県,西宮市,上葭原町
6620097,兵庫県,西宮市,柏堂町
6620098,兵庫県,西宮市,柏堂西町
6620944,兵庫県,西宮市,川添町
6620951,兵庫県,西宮市,川西町
6620861,兵庫県,西宮市,河原町
6620945,兵庫県,西宮市,川東町
6638107,兵庫県,西宮市,瓦林町
6620823,兵庫県,西宮市,神呪町
6620021,兵庫県,西宮市,神原
6620078,兵庫県,西宮市,菊谷町
6620062,兵庫県,西宮市,木津山町
6638035,兵庫県,西宮市,北口町
6620833,兵庫県,西宮市,北昭和町
6620025,兵庫県,西宮市,北名次町
6620091,兵庫県,西宮市,北山町
6691131,兵庫県,西宮市,清瀬台
6620077,兵庫県,西宮市,久出ケ谷町
6691135,兵庫県,西宮市,国見台
6620927,兵庫県,西宮市,久保町
6638103,兵庫県,西宮市,熊野町
6620064,兵庫県,西宮市,雲井町
6620926,兵庫県,西宮市,鞍掛町
6620083,兵庫県,西宮市,苦楽園一番町
6620082,兵庫県,西宮市,苦楽園二番町
6620081,兵庫県,西宮市,苦楽園三番町
6620088,兵庫県,西宮市,苦楽園四番町
6620087,兵庫県,西宮市,苦楽園五番町
6620086,兵庫県,西宮市,苦楽園六番町
6620037,兵庫県,西宮市,結善町
6620099,兵庫県,西宮市,剣谷町
6638156,兵庫県,西宮市,甲子園網引町
6638165,兵庫県,西宮市,甲子園浦風町
6638151,兵庫県,西宮市,甲子園洲鳥町
6638162,兵庫県,西宮市,甲子園砂田町
6638166,兵庫県,西宮市,甲子園高潮町
6638167,兵庫県,西宮市,甲子園浜田町
6638161,兵庫県,西宮市,甲子園春風町
6638163,兵庫県,西宮市,甲子園三保町
6638164,兵庫県,西宮市,甲子園六石町
6638171,兵庫県,西宮市,甲子園一番町
6638172,兵庫県,西宮市,甲子園二番町
6638173,兵庫県,西宮市,甲子園三番町
6638174,兵庫県,西宮市,甲子園四番町
6638175,兵庫県,西宮市,甲子園五番町
6638176,兵庫県,西宮市,甲子園六番町
6638177,兵庫県,西宮市,甲子園七番町
6638178,兵庫県,西宮市,甲子園八番町
6638179,兵庫県,西宮市,甲子園九番町
6638113,兵庫県,西宮市,甲子園口
6638112,兵庫県,西宮市,甲子園口北町
6638152,兵庫県,西宮市,甲子園町
6638155,兵庫県,西宮市,甲子園浜
6620812,兵庫県,西宮市,甲東園
6620832,兵庫県,西宮市,甲風園
6620965,兵庫県,西宮市,郷免町
6620018,兵庫県,西宮市,甲陽園山王町
6620012,兵庫県,西宮市,甲陽園東山町
6620017,兵庫県,西宮市,甲陽園西山町
6620014,兵庫県,西宮市,甲陽園日之出町
6620015,兵庫県,西宮市,甲陽園本庄町
6620011,兵庫県,西宮市,甲陽園目神山町
6620016,兵庫県,西宮市,甲陽園若江町
6620092,兵庫県,西宮市,甑岩町
6620006,兵庫県,西宮市,越水社家郷山
6620864,兵庫県,西宮市,越水町
6638122,兵庫県,西宮市,小曽根町
6620047,兵庫県,西宮市,寿町
6638123,兵庫県,西宮市,小松東町
6638125,兵庫県,西宮市,小松西町
6638124,兵庫県,西宮市,小松南町
6638126,兵庫県,西宮市,小松北町
6638127,兵庫県,西宮市,小松町
6620844,兵庫県,西宮市,西福町
6620032,兵庫県,西宮市,桜谷町
6620071,兵庫県,西宮市,桜町
6620875,兵庫県,西宮市,五月ケ丘
6638183,兵庫県,西宮市,里中町
6620978,兵庫県,西宮市,産所町
6691101,兵庫県,西宮市,塩瀬町生瀬
6691251,兵庫県,西宮市,塩瀬町名塩
6691141,兵庫県,西宮市,塩瀬町名塩
6620026,兵庫県,西宮市,獅子ケ口町
6620033,兵庫県,西宮市,清水町
6638004,兵庫県,西宮市,下大市東町
6638005,兵庫県,西宮市,下大市西町
6620956,兵庫県,西宮市,下葭原町
6620974,兵庫県,西宮市,社家町
6620004,兵庫県,西宮市,鷲林寺
6620003,兵庫県,西宮市,鷲林寺町
6620002,兵庫県,西宮市,鷲林寺南町
6620856,兵庫県,西宮市,城ケ堀町
6620822,兵庫県,西宮市,松籟荘
6620023,兵庫県,西宮市,城山
6620843,兵庫県,西宮市,神祇官町
6620013,兵庫県,西宮市,新甲陽町
6620845,兵庫県,西宮市,神明町
6620041,兵庫県,西宮市,末広町
6620096,兵庫県,西宮市,角石町
6620913,兵庫県,西宮市,染殿町
6620867,兵庫県,西宮市,大社町
6638033,兵庫県,西宮市,高木東町
6638032,兵庫県,西宮市,高木西町
6620872,兵庫県,西宮市,高座町
6638141,兵庫県,西宮市,高須町
6620066,兵庫県,西宮市,高塚町
6638202,兵庫県,西宮市,高畑町
6638204,兵庫県,西宮市,高松町
6638201,兵庫県,西宮市,田代町
6638001,兵庫県,西宮市,田近野町
6620943,兵庫県,西宮市,建石町
6620973,兵庫県,西宮市,田中町
6638006,兵庫県,西宮市,段上町
6620046,兵庫県,西宮市,千歳町
6620853,兵庫県,西宮市,津田町
6638012,兵庫県,西宮市,堤町
6638244,兵庫県,西宮市,津門綾羽町
6638242,兵庫県,西宮市,津門飯田町
6638247,兵庫県,西宮市,津門稲荷町
6638243,兵庫県,西宮市,津門大箇町
6638241,兵庫県,西宮市,津門大塚町
6638245,兵庫県,西宮市,津門呉羽町
6638234,兵庫県,西宮市,津門住江町
6638231,兵庫県,西宮市,津門西口町
6638246,兵庫県,西宮市,津門仁辺町
6638232,兵庫県,西宮市,津門宝津町
6638233,兵庫県,西宮市,津門川町
6638104,兵庫県,西宮市,天道町
6620043,兵庫県,西宮市,常磐町
6638121,兵庫県,西宮市,戸崎町
6620916,兵庫県,西宮市,戸田町
6620065,兵庫県,西宮市,殿山町
6638105,兵庫県,西宮市,中島町
6620851,兵庫県,西宮市,中須佐町
6620852,兵庫県,西宮市,中殿町
6620952,兵庫県,西宮市,中浜町
6620857,兵庫県,西宮市,中前田町
6620868,兵庫県,西宮市,中屋町
6620955,兵庫県,西宮市,中葭原町
6638034,兵庫県,西宮市,長田町
6691147,兵庫県,西宮市,名塩
6691149,兵庫県,西宮市,名塩赤坂
6691143,兵庫県,西宮市,名塩ガーデン
6691136,兵庫県,西宮市,名塩木之元
6691146,兵庫県,西宮市,名塩さくら台
6691142,兵庫県,西宮市,名塩山荘
6691144,兵庫県,西宮市,名塩茶園町
6691148,兵庫県,西宮市,名塩東久保
6691145,兵庫県,西宮市,名塩平成台
6691132,兵庫県,西宮市,名塩南台
6691162,兵庫県,西宮市,名塩美山
6691134,兵庫県,西宮市,名塩新町
6620024,兵庫県,西宮市,名次町
6691103,兵庫県,西宮市,生瀬東町
6691111,兵庫県,西宮市,生瀬高台
6691102,兵庫県,西宮市,生瀬町
6691104,兵庫県,西宮市,生瀬武庫川町
6638184,兵庫県,西宮市,鳴尾町
6638142,兵庫県,西宮市,鳴尾浜
6620038,兵庫県,西宮市,南郷町
6620814,兵庫県,西宮市,仁川五ケ山町
6620815,兵庫県,西宮市,仁川百合野町
6620811,兵庫県,西宮市,仁川町
6620034,兵庫県,西宮市,西田町
6620934,兵庫県,西宮市,西宮浜
6620933,兵庫県,西宮市,西波止町
6620093,兵庫県,西宮市,西平町
6620838,兵庫県,西宮市,能登町
6638015,兵庫県,西宮市,野間町
6620051,兵庫県,西宮市,羽衣町
6620854,兵庫県,西宮市,櫨塚町
6638187,兵庫県,西宮市,花園町
6691121,兵庫県,西宮市,花の峯
6620915,兵庫県,西宮市,馬場町
6638154,兵庫県,西宮市,浜甲子園
6620942,兵庫県,西宮市,浜町
6620923,兵庫県,西宮市,浜松原町
6620941,兵庫県,西宮市,浜脇町
6638014,兵庫県,西宮市,林田町
6638132,兵庫県,西宮市,東鳴尾町
6620924,兵庫県,西宮市,東浜町
6620922,兵庫県,西宮市,東町
6691133,兵庫県,西宮市,東山台
6620094,兵庫県,西宮市,毘沙門町
6620084,兵庫県,西宮市,樋之池町
6638011,兵庫県,西宮市,樋ノ口町
6638022,兵庫県,西宮市,日野町
6620835,兵庫県,西宮市,平木町
6620044,兵庫県,西宮市,平松町
6620837,兵庫県,西宮市,広田町
6638203,兵庫県,西宮市,深津町
6620067,兵庫県,西宮市,深谷町
6638031,兵庫県,西宮市,伏原町
6638111,兵庫県,西宮市,二見町
6638185,兵庫県,西宮市,古川町
6620042,兵庫県,西宮市,分銅町
6691112,兵庫県,西宮市,宝生ケ丘
6620072,兵庫県,西宮市,豊楽町
6620953,兵庫県,西宮市,堀切町
6620914,兵庫県,西宮市,本町
6620931,兵庫県,西宮市,前浜町
6620076,兵庫県,西宮市,松生町
6620073,兵庫県,西宮市,松風町
6620061,兵庫県,西宮市,松ケ丘町
6620962,兵庫県,西宮市,松下町
6620053,兵庫県,西宮市,松園町
6638102,兵庫県,西宮市,松並町
6620912,兵庫県,西宮市,松原町
6638101,兵庫県,西宮市,松山町
6620831,兵庫県,西宮市,丸橋町
6620031,兵庫県,西宮市,満池谷町
6620095,兵庫県,西宮市,美作町
6638153,兵庫県,西宮市,南甲子園
6620075,兵庫県,西宮市,南越木岩町
6620834,兵庫県,西宮市,南昭和町
6620976,兵庫県,西宮市,宮西町
6620947,兵庫県,西宮市,宮前町
6638131,兵庫県,西宮市,武庫川町
6620863,兵庫県,西宮市,室川町
6620846,兵庫県,西宮市,森下町
6638013,兵庫県,西宮市,門前町
6620826,兵庫県,西宮市,門戸岡田町
6620824,兵庫県,西宮市,門戸東町
6620828,兵庫県,西宮市,門戸西町
6620825,兵庫県,西宮市,門戸荘
6638024,兵庫県,西宮市,薬師町
6620963,兵庫県,西宮市,屋敷町
6620045,兵庫県,西宮市,安井町
6620866,兵庫県,西宮市,柳本町
6620005,兵庫県,西宮市,湯元町
6620964,兵庫県,西宮市,弓場町
6620921,兵庫県,西宮市,用海町
6620917,兵庫県,西宮市,与古道町
6620841,兵庫県,西宮市,両度町
6620918,兵庫県,西宮市,六湛寺町
6620874,兵庫県,西宮市,六軒町
6638181,兵庫県,西宮市,若草町
6620035,兵庫県,西宮市,若松町
6638016,兵庫県,西宮市,若山町
6620971,兵庫県,西宮市,和上町
6640000,兵庫県,伊丹市,
6640001,兵庫県,伊丹市,荒牧
6640008,兵庫県,伊丹市,荒牧南
6640864,兵庫県,伊丹市,安堂寺町
6640027,兵庫県,伊丹市,池尻
6640846,兵庫県,伊丹市,伊丹
6640861,兵庫県,伊丹市,稲野町
6640011,兵庫県,伊丹市,鋳物師
6640843,兵庫県,伊丹市,岩屋
6640856,兵庫県,伊丹市,梅ノ木
6640899,兵庫県,伊丹市,大鹿
6640003,兵庫県,伊丹市,大野
6640002,兵庫県,伊丹市,荻野
6640031,兵庫県,伊丹市,荻野西
6640025,兵庫県,伊丹市,奥畑
6640833,兵庫県,伊丹市,小阪田
6640863,兵庫県,伊丹市,柏木町
6640893,兵庫県,伊丹市,春日丘
6640831,兵庫県,伊丹市,北伊丹
6640837,兵庫県,伊丹市,北河原
6640891,兵庫県,伊丹市,北園
6640007,兵庫県,伊丹市,北野
6640836,兵庫県,伊丹市,北本町
6640857,兵庫県,伊丹市,行基町
6640844,兵庫県,伊丹市,口酒井
6640872,兵庫県,伊丹市,車塚
6640839,兵庫県,伊丹市,桑津
6640006,兵庫県,伊丹市,鴻池
6640855,兵庫県,伊丹市,御願塚
6640881,兵庫県,伊丹市,昆陽
6640015,兵庫県,伊丹市,昆陽池
6640885,兵庫県,伊丹市,昆陽泉町
6640016,兵庫県,伊丹市,昆陽北
6640886,兵庫県,伊丹市,昆陽東
6640888,兵庫県,伊丹市,昆陽南
6640897,兵庫県,伊丹市,桜ケ丘
6640894,兵庫県,伊丹市,清水
6640832,兵庫県,伊丹市,下河原
6640882,兵庫県,伊丹市,鈴原町
6640898,兵庫県,伊丹市,千僧
6640892,兵庫県,伊丹市,高台
6640851,兵庫県,伊丹市,中央
6640026,兵庫県,伊丹市,寺本
6640020,兵庫県,伊丹市,寺本東
6640022,兵庫県,伊丹市,中野東
6640023,兵庫県,伊丹市,中野西
6640029,兵庫県,伊丹市,中野北
6640838,兵庫県,伊丹市,中村
6640834,兵庫県,伊丹市,西桑津
6640858,兵庫県,伊丹市,西台
6640028,兵庫県,伊丹市,西野
6640873,兵庫県,伊丹市,野間
6640875,兵庫県,伊丹市,野間北
6640845,兵庫県,伊丹市,東有岡
6640835,兵庫県,伊丹市,東桑津
6640004,兵庫県,伊丹市,東野
6640853,兵庫県,伊丹市,平松
6640014,兵庫県,伊丹市,広畑
6640847,兵庫県,伊丹市,藤ノ木
6640896,兵庫県,伊丹市,船原
6640871,兵庫県,伊丹市,堀池
6640024,兵庫県,伊丹市,松ケ丘
6640884,兵庫県,伊丹市,美鈴町
6640017,兵庫県,伊丹市,瑞ケ丘
6640005,兵庫県,伊丹市,瑞原
6640013,兵庫県,伊丹市,瑞穂町
6640012,兵庫県,伊丹市,緑ケ丘
6640883,兵庫県,伊丹市,南鈴原
6640854,兵庫県,伊丹市,南町
6640865,兵庫県,伊丹市,南野
6640887,兵庫県,伊丹市,南野北
6640852,兵庫県,伊丹市,南本町
6640895,兵庫県,伊丹市,宮ノ前
6640842,兵庫県,伊丹市,森本
6640874,兵庫県,伊丹市,山田
6640862,兵庫県,伊丹市,若菱町
6680000,兵庫県,豊岡市,
6680801,兵庫県,豊岡市,赤石
6680001,兵庫県,豊岡市,伊賀谷
6680261,兵庫県,豊岡市,出石町荒木
6680213,兵庫県,豊岡市,出石町伊木
6680207,兵庫県,豊岡市,出石町伊豆
6680218,兵庫県,豊岡市,出石町入佐
6680244,兵庫県,豊岡市,出石町上野
6680215,兵庫県,豊岡市,出石町魚屋
6680214,兵庫県,豊岡市,出石町内町
6680271,兵庫県,豊岡市,出石町大谷
6680201,兵庫県,豊岡市,出石町奥小野
6680251,兵庫県,豊岡市,出石町奥山
6680235,兵庫県,豊岡市,出石町鍛冶屋
6680279,兵庫県,豊岡市,出石町片間
6680255,兵庫県,豊岡市,出石町上村
6680231,兵庫県,豊岡市,出石町川原
6680242,兵庫県,豊岡市,出石町桐野
6680202,兵庫県,豊岡市,出石町口小野
6680264,兵庫県,豊岡市,出石町暮坂
6680256,兵庫県,豊岡市,出石町小人
6680216,兵庫県,豊岡市,出石町材木
6680205,兵庫県,豊岡市,出石町嶋
6680211,兵庫県,豊岡市,出石町下谷
6680233,兵庫県,豊岡市,出石町田結庄
6680209,兵庫県,豊岡市,出石町田多地
6680212,兵庫県,豊岡市,出石町谷山
6680273,兵庫県,豊岡市,出石町坪井
6680257,兵庫県,豊岡市,出石町坪口
6680223,兵庫県,豊岡市,出石町鉄砲
6680241,兵庫県,豊岡市,出石町寺坂
6680222,兵庫県,豊岡市,出石町寺町
6680217,兵庫県,豊岡市,出石町東條
6680272,兵庫県,豊岡市,出石町鳥居
6680275,兵庫県,豊岡市,出石町長砂
6680243,兵庫県,豊岡市,出石町中野
6680254,兵庫県,豊岡市,出石町中村
6680203,兵庫県,豊岡市,出石町袴狭
6680237,兵庫県,豊岡市,出石町馬場
6680246,兵庫県,豊岡市,出石町日野辺
6680266,兵庫県,豊岡市,出石町平田
6680238,兵庫県,豊岡市,出石町弘原
6680206,兵庫県,豊岡市,出石町福居
6680263,兵庫県,豊岡市,出石町福住
6680265,兵庫県,豊岡市,出石町福見
6680262,兵庫県,豊岡市,出石町細見
6680224,兵庫県,豊岡市,出石町本町
6680221,兵庫県,豊岡市,出石町町分
6680236,兵庫県,豊岡市,出石町松枝
6680277,兵庫県,豊岡市,出石町丸中
6680278,兵庫県,豊岡市,出石町三木
6680204,兵庫県,豊岡市,出石町宮内
6680274,兵庫県,豊岡市,出石町水上
6680276,兵庫県,豊岡市,出石町森井
6680225,兵庫県,豊岡市,出石町八木
6680208,兵庫県,豊岡市,出石町安良
6680234,兵庫県,豊岡市,出石町柳
6680245,兵庫県,豊岡市,出石町百合
6680232,兵庫県,豊岡市,出石町宵田
6680253,兵庫県,豊岡市,出石町榎見
6680252,兵庫県,豊岡市,出石町和屋
668002
|
e1f8424e0d08b91a96e8e8fa4e7271d7a13cf0a3
|
Python
|
<|begin_of_text|>import FWCore.ParameterSet.Config as cms
maxEvents = cms.untracked.PSet( input = cms.untracked.int32(-1) )
readFiles = cms.untracked.vstring()
secFiles = cms.untracked.vstring()
source = cms.Source ("PoolSource",fileNames = readFiles, secondaryFileNames = secFiles)
readFiles.extend( [
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/002F2CE1-38BB-E611-AF9F-0242AC130005.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/009CE684-45BB-E611-A261-001E67E6F8FA.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/044FF9CC-42BB-E611-ACB0-0CC47AD98BC2.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/06103109-48BB-E611-86BE-001E673968A6.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/0843C79F-FCBD-E611-B38C-001E67A3F8A8.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/0881BCD8-8FBE-E611-8796-002590FD5A72.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/08E524F3-0ABC-E611-984F-141877639F59.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/08F5FD50-23BC-E611-A4C2-00259073E3DA.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/0A85AA82-45BB-E611-8ACD-001E674FB063.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/0CA050B2-57BB-E611-8A7A-001E674FBA1D.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/0CC4E5F0-8EBE-E611-81A0-FA163E0546A6.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/10104CB2-51BB-E611-BCDC-FA163E2D421C.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/10B1C835-51BB-E611-962E-0025901D08B8.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/10F3C0E6-BDBD-E611-B15C-001E674FB24D.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/144E9DCA-3ABD-E611-B140-0025905B85EE.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/147E1208-0EBC-E611-8AB4-20CF307C9897.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/16409928-3FBB-E611-B72C-002590E2F5CE.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/16602767-48BB-E611-B7A6-0CC47AD98BC8.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/16BBACA8-FBBD-E611-BEC0-FA163E72410F.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/16D0A23B-0EBD-E611-A7D4-00266CFF090C.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/184B6801-D9BC-E611-8E6A-00259073E52C.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/1C48C9F3-58BB-E611-95E5-FA163E897AAE.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/203922CF-19BD-E611-A4CB-002590D0AF54.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/245AB734-3DBB-E611-A2BE-0090FAA575B0.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/260BAF20-F9BD-E611-AB0D-141877411FCD.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/269219CA-42BB-E611-9B4D-001E67444EAC.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/2A5DBC04-3CBB-E611-9C43-0CC47AA99436.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/2C22DEF7-8EBE-E611-9D17-0025905A497A.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/3048BFB6-48BD-E611-B2AE-FA163E7B239E.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/3232416A-4CBB-E611-9301-001E67348055.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/3665009B-FFBD-E611-9358-0025905A610A.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/38F82D20-42BB-E611-AA3B-002590747E14.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/3C994553-4DBB-E611-829E-00259048BF92.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/3CB1F166-3EBB-E611-BBAC-001E674FB24D.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/3E96493A-F2BD-E611-B4D4-24BE05C6E561.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/44E3620A-31BB-E611-B8EE-001E67444EAC.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/44F1F2FC-3BBB-E611-9596-0CC47A546E5E.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/48723720-30BC-E611-906D-0025905B855C.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/4C6573B5-F8BC-E611-8B6D-0CC47A7C340E.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/5A85E548-3FBB-E611-9AF8-001E674FCAE9.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/6064DD7D-27BC-E611-9269-FA163E3A554D.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/6079A2C9-5CBB-E611-9D23-0CC47A546E5E.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/64BFBA66-2ABC-E611-9884-02163E013C92.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/68FD6C45-4EBB-E611-8CE3-0CC47A7452D8.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/6AAB2667-DFBC-E611-BCE9-44A842CFCA0D.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/6C7AF5E2-51BB-E611-944C-0025905A60B8.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/6CB42E13-4ABB-E611-B37A-B083FECFF6AB.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/6E2754F6-49BB-E611-A8B6-00259074AE8A.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/6E4F2DD7-3FBB-E611-A5F6-0CC47A13CD44.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/6E7B7470-FEBD-E611-9FD6-0CC47A78A446.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/6E8A3BE5-4CBB-E611-A86D-00259073E4E4.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/70F436E2-3DBB-E611-92D6-0CC47A546E5E.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/7240B6CA-D6BC-E611-8854-B083FED04276.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/74368CB5-42BB-E611-B3D9-0CC47AD98BC8.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/74390FDB-35BD-E611-932E-02163E013EF0.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/76C36A06-48BB-E611-984D-0090FAA58204.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/78EB0B24-02BE-E611-B6ED-FA163E275D07.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/7A70E7A0-52BB-E611-A35E-001E674FC800.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/7AA6AA1E-30BC-E611-8E7E-0025905A610A.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/7AF0E4F9-4EBB-E611-9B9B-0CC47A13D284.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/7CE9BE91-0EBC-E611-A5DA-180373FF8446.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/7CF35EDF-E8BC-E611-A47E-24BE05C488E1.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/7EDEC297-2EBE-E611-857F-0242AC130003.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/8044F84B-44BB-E611-8915-001E674440E2.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/864E3740-E6BC-E611-AD01-0CC47AD98BC8.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/88A401C7-48BB-E611-A057-001E67348055.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/88FE7E84-17BC-E611-B83A-001EC94BF93F.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/90410DC3-0EBC-E611-AAC2-001E675A6AA9.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/9488E5A5-4ABB-E611-8F1A-0025905A60AA.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/94F362D7-5DBB-E611-AB61-FA163E508270.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/98E58784-40BB-E611-ACF5-0CC47AD98D0C.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/9A25B43C-3DBB-E611-917E-001E674FB149.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/9E2603C9-4DBB-E611-A64D-001E674FBA1D.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/A09103A1-3ABB-E611-9459-001E6745764D.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/A29BDC40-47BB-E611-93D2-B083FED42A1A.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/A416944E-41BB-E611-9753-0CC47AD99144.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/A86280DE-51BB-E611-B051-0025905A6118.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/A8BEF298-12BD-E611-90EE-E41D2D08DE30.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/AC1C716C-51BB-E611-BA14-0025907D1D6C.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/AC289CA3-4CBB-E611-83E8-001EC94BF6CA.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/AC9BCBDD-19BC-E611-9B23-002590791DA2.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/B0050E8E-32BB-E611-B390-0025900E3508.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/B08DA7E7-43BB-E611-993E-002590E2F5CE.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/B64CF6DF-3CBB-E611-BB5A-001E674FC800.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/B6B969CA-4EBB-E611-AF09-FA163EC9E920.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/B82DE45D-4DBB-E611-88CE-0025905B85FE.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/BC7EA562-4BBB-E611-BE25-0025901D08B8.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/BEDB4181-97BE-E611-A59F-001E675A67BB.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/C0306F0A-8FBE-E611-9828-0025905A6132.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/C0A1AF5B-44BB-E611-BD4B-0CC47AA992B4.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/C8F2E99E-3BBB-E611-9324-0090FAA59864.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/CA4A3834-2BBB-E611-834E-90B11C2CA3F8.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/CC245041-E2BC-E611-8171-00266CFCCB44.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/CEDA35B7-15BC-E611-B1AD-001E67E6F819.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/D21EA92D-07BC-E611-BD74-70106F4A93E8.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/D453B2C6-8FBE-E611-9644-141877411FCD.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/D4E99AD0-46BB-E611-BB98-001E67348055.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/D62C98A8-08BC-E611-A9AC-B8CA3A70A410.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/DA92E40D-48BB-E611-B782-0025905A612E.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIA
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.