blob_id
stringlengths 40
40
| language
stringclasses 1
value | text
stringlengths 7
63.2k
| index_in_file
int64 0
3
|
---|---|---|---|
d786e89b9d478dcff3c541c89731247075d078c3
|
Python
|
'''
@author: Ken Venner
@contact: [email protected]
@version: 1.13
Read in a file of wine names and create consistent wine descriptions
from these names.
'''
import kvutil
import kvcsv
import re
import sys
import shutil
# may comment out in the future
import pprint
pp = pprint.PrettyPrinter(indent=4)
ppFlag = False
# application variables
optiondictconfig = {
'AppVersion' : {
'value' : '1.13',
'description' : 'defines the version number for the app',
},
'debug' : {
'value' : False,
'type' : 'bool',
'description' : 'defines if we are running in debug mode',
},
'verbose' : {
'value' : 1,
'type' : 'int',
'description' : 'defines the display level for print messages',
},
'setup_check' : {
'value' : False,
'type' : 'bool',
'description' : 'defines if we checking out setup',
},
'pprint' : {
'value' : False,
'type' : 'bool',
'description' : 'defines if we output with pretty print when debugging',
},
'csvfile_master_in' : {
'value' : 'wine_xref.csv',
'description' : 'defines the name of the master data input file',
},
'csvfile_update_in' : {
'value' : 'wineref.csv',
'description' : 'defines the name of the input file to updated',
},
'csvfile_update_out' : {
'value' : 'wineref2.csv',
'description' : 'defines the name of the updated output file',
},
'fldWine' : {
'value' : 'wine',
'description' : 'defines the name of the field that holds the Wine ',
},
'fldWineDescr' : {
'value' : 'winedescr',
'description' : 'defines the name of the field holding the wine description',
},
'fldWineDescrNew' : {
'value' : 'winedescrnew',
'description' : 'defines the name of the NEW field holding the new description ',
},
'fldWineDescrMatch' : {
'value' : None,
'description' : 'defines the name of the NEW field holding the results of comparison existing to new description ',
},
'fldWineMaster' : {
'value' : None,
'description' : 'defines the name of the field that holds the Wine when reading the master file ',
},
'fldWineDescrMaster' : {
'value' : None,
'description' : 'defines the name of the field holding the wine description when reading the master file',
},
'backupfile_ext' : {
'value' : '.bak',
'description' : 'defines the extension to use to copy the update input file to if we are replacing it with output',
},
'defaultnew' : {
'value' : None,
'description' : 'defines if we should take field fldWineDescrNew and set to a value if not set',
},
}
### GLOBAL VARIABLES / LOOKUPS ########################################
# regex search for vintage in wine name
vintageLookup = (
re.compile('\d\d\d\d\s+\d\d(\d\d)'), # two years together - get this one over early
re.compile('^\d\d(\d\d)'), # four position start of line
re.compile('\s\d\d(\d\d)$'), # four position end of line
re.compile('\s\d\d(\d\d)\s'), # four position middle of line
re.compile('XX\d\d(\d\d)\s'), # four position middle of line
re.compile('\s\d\d(\d\d)\/'), # four position split
re.compile('\s\'?(\d\d)\'?$|\s\'?(\d\d)\'?\s'), # two position date with optional apostrophe front or back
)
# regex search for case in wine name
reCase = re.compile(r'12\s*X\s*750\s*ML|\bcase\b|12\/750\s*ML',re.IGNORECASE)
# regex to pick up qualifiers from the wine
reQualLookup = (
(None, re.compile(r'\bWithout\s+Gift\b|\bNo\s+Gift', re.IGNORECASE)), # the none gift do them first
('Gift', re.compile(r'\bGift\b', re.IGNORECASE)),
('VAP', re.compile(r'\bVAP\b', re.IGNORECASE)),
('VAP', re.compile(r'\bGlassVAP\b', re.IGNORECASE)),
('Glass', re.compile(r'\bGlass\b', re.IGNORECASE)),
('Glass', re.compile(r'\bGlasses\b', re.IGNORECASE)),
('Etch', re.compile(r'\bEtch\b', re.IGNORECASE)),
('Basket', re.compile(r'\bBasket\b', re.IGNORECASE)),
)
# regex search to define the size of the wine bottle
sizeLookup = (
('1.75L', re.compile(r'\b1\.75\s*Li?|\b1\.75$', re.IGNORECASE)),
('1.5L', re.compile(r'\b1\.5\s*L?\b|\bMagnum\b', re.IGNORECASE)),
('375mL', re.compile(r'Half\s+Bottle|375ml', re.IGNORECASE)),
('200mL', re.compile(r'\b200\s*ML|\(200\s*ML', re.IGNORECASE)),
('50mL', re.compile(r'\b50\s*ML|\(50\s*ML', re.IGNORECASE)),
('500mL', re.compile(r'\b500\s*ML|\(500\s*ML', re.IGNORECASE)),
('3L', re.compile(r'\b3\s*Li?', re.IGNORECASE)),
('6L', re.compile(r'\b6\s*Li?', re.IGNORECASE)),
('9L', re.compile(r'\b9\s*Li?', re.IGNORECASE)),
('1L', re.compile(r'\b1L\b|\b1\s+L$|\b1.0\s*L\b|\b1\s+Liter\b|\bOne\s+Liter\b|\bLITER\b|\b1\s*LTR', re.IGNORECASE)),
)
# regex extract winery names from the wine field
wineryLookup = (
('Alban', re.compile(r'\bAlban\b', re.IGNORECASE)),
('Arrowood', re.compile(r'\bArrowood\b', re.IGNORECASE)),
('Atalon', re.compile(r'\bAtalon\b', re.IGNORECASE)),
('Attune', re.compile(r'\bAttune\b', re.IGNORECASE)),
('Auteur', re.compile(r'\bAuteur\b', re.IGNORECASE)),
('Austin Hope', re.compile(r'\bAustin\s+Hope\b', re.IGNORECASE)),
('Badge', re.compile(r'\bBadge\b', re.IGNORECASE)),
('Balletto', re.compile(r'\bBalletto\b', re.IGNORECASE)),
('Bell', re.compile(r'\bBell\s+Cellar', re.IGNORECASE)),
('BR Cohn', re.compile(r'\bB\.?\s?R\.?\s+Cohn\b', re.IGNORECASE)),
('Bremer', re.compile(r'\bBremer\b', re.IGNORECASE)),
('Brewer-Clifton', re.compile(r'\bBrewer[\s\-]Clifton\b', re.IGNORECASE)),
('BV', re.compile(r'\bBeaulieu\s+V|\bBV\b', re.IGNORECASE)),
('Belle Glos', re.compile(r'\bBelle\s+Glos\b', re.IGNORECASE)),
('Bennett Ln', re.compile(r'\bBennet+\sLane\b', re.IGNORECASE)),
('Benovia', re.compile(r'\bBenovia\b', re.IGNORECASE)),
('Beringer', re.compile(r'\bBeringer\b', re.IGNORECASE)),
('Blackstone', re.compile(r'\bBlackstone\b', re.IGNORECASE)),
('Brancott', re.compile(r'\bBrancott\b', re.IGNORECASE)),
('Cade', re.compile(r'\bCade\b', re.IGNORECASE)),
('Cain Five', re.compile(r'\bCain\s+Five\b|\bCain\s-\sFive\b|\bCain\s5\b|\bCainFive\b', re.IGNORECASE)),
('Cakebread', re.compile(r'\bCakebread\b', re.IGNORECASE)),
('Cardinale', re.compile(r'\bCardinale\b', re.IGNORECASE)),
('Caymus', re.compile(r'\bCaymus\b', re.IGNORECASE)),
('Chappellet', re.compile(r'\bChappellet\b', re.IGNORECASE)),
('Chalk Hill', re.compile(r'\bChalk\s+Hill\b', re.IGNORECASE)),
('Clos Du Bois', re.compile(r'\bClos\s+Du\s+Bois\b', re.IGNORECASE)),
('ClosDuVal', re.compile(r'\bClos\s+du\s+Val\b', re.IGNORECASE)),
('Colgin', re.compile(r'\bColgin\b', re.IGNORECASE)),
('Concha Don Melchor', re.compile(r'\bConcha\s.*Don\s+Melchor\b|Don\s+Melchor\b', re.IGNORECASE)),
('Continuum', re.compile(r'\bContinuum\b', re.IGNORECASE)),
('Corison', re.compile(r'\bCorison\b', re.IGNORECASE)),
('Cristal', re.compile(r'Roederer\s?.*Cristal\b|\bCristal\b.+Brut', re.IGNORECASE)),
('Curran', re.compile(r'\bCurran\b', re.IGNORECASE)),
('Darioush', re.compile(r'\bDarioush\b', re.IGNORECASE)),
('Darioush', re.compile(r'\bCaravan\b', re.IGNORECASE)),
('David Arthur', re.compile(r'\bDavid\s+Arthur\b', re.IGNORECASE)),
('David Bruce', re.compile(r'\bDavid\s+Bruce\b', re.IGNORECASE)),
('Davis Family', re.compile(r'\bDavis\s+Family\b', re.IGNORECASE)),
('Del Dotto', re.compile(r'\bDel\s+Dotto\b', re.IGNORECASE)),
('Dominus', re.compile(r'\bDominus\b', re.IGNORECASE)),
('Goldeneye', re.compile(r'\bGoldeneye\b', re.IGNORECASE)), # before duckhorn
('Paraduxx', re.compile(r'\bParaduxx\b', re.IGNORECASE)), # before duckhorn
('Domaine Carneros', re.compile(r'\bDomaine\s+Carneros\b', re.IGNORECASE)),
('Dominus', re.compile(r'\Dominus\b', re.IGNORECASE)),
('Drappier', re.compile(r'\bDrappier\b', re.IGNORECASE)),
('Duckhorn', re.compile(r'\bDuckhorn\b', re.IGNORECASE)),
('Dumol', re.compile(r'\bDumol\b', re.IGNORECASE)),
('Dunn', re.compile(r'\bDunn\b', re.IGNORECASE)),
('Ehlers', re.compile(r'\bEhlers\b', re.IGNORECASE)),
('Etude', re.compile(r'\bEtude\b', re.IGNORECASE)),
('Far Niente', re.compile(r'\bFar Niente\b', re.IGNORECASE)),
('Flora', re.compile(r'\bFlora\s+Springs\b', re.IGNORECASE)),
('Flowers', re.compile(r'\bFlowers\b', re.IGNORECASE)),
('Robert Foley', re.compile(r'\bRobert\s+\bFoley\b', re.IGNORECASE)), #before Foley
('Foley', re.compile(r'\bFoley\b', re.IGNORECASE)),
('Foxen', re.compile(r'\bFoxen\b', re.IGNORECASE)),
('Franciscan', re.compile(r'\bFranciscan\b', re.IGNORECASE)),
('Frank Family', re.compile(r'\bFrank Family\b', re.IGNORECASE)),
('Gary Farrell', re.compile(r'\bGary\s+Farrel+\b', re.IGNORECASE)),
('Ghost Block', re.compile(r'\bGhost\s+Block\b', re.IGNORECASE)),
('Grgich', re.compile(r'\bGrgich\b', re.IGNORECASE)),
('Groth', re.compile(r'\bGroth\b', re.IGNORECASE)),
('Gundlach', re.compile(r'\bGundlach\b', re.IGNORECASE)),
('Hansel', re.compile(r'\bHansel\b', re.IGNORECASE)),
('Hanzell', re.compile(r'\bHanzell\b', re.IGNORECASE)),
('Hess', re.compile(r'\bHess\b', re.IGNORECASE)),
('Hewitt', re.compile(r'\bHewitt\b', re.IGNORECASE)),
('Hobbs', re.compile(r'\bHobbs\b|\bcrossbarn\b', re.IGNORECASE)),
('Hundred Acre', re.compile(r'\bHundred\s+Acre\b', re.IGNORECASE)),
('Jordan', re.compile(r'\bJordan\b', re.IGNORECASE)),
('Justin', re.compile(r'\bJustin\b', re.IGNORECASE)),
('Kim Crawford', re.compile(r'\bKim\s+Crawford\b', re.IGNORECASE)),
('Kistler', re.compile(r'\bKistler\b', re.IGNORECASE)),
('Kosta', re.compile(r'\bKosta\s+Browne?\b', re.IGNORECASE)),
('Krug', re.compile(r'\bKrug\b', re.IGNORECASE)),
('Kunde', re.compile(r'\bKunde\b', re.IGNORECASE)),
('LaCrema', re.compile(r'\bLa\s?Crema\b', re.IGNORECASE)),
('Lewis', re.compile(r'\bLewis\b', re.IGNORECASE)),
('Lokoya', re.compile(r'\bLokoya\b', re.IGNORECASE)),
('Meiomi', re.compile(r'\bMeiomi\b', re.IGNORECASE)),
('Melville', re.compile(r'\bMelville\b', re.IGNORECASE)),
('Momento Mori', re.compile(r'\bMomento\s+Mori\b', re.IGNORECASE)),
('Mondavi', re.compile(r'\bMondavi\b', re.IGNORECASE)),
('Montelena', re.compile(r'\bMontelena\b', re.IGNORECASE)),
('Mt Veeder', re.compile(r'^Mount\s+Veeder\b|^Mt\.? Veeder\b|\d+\s+M[^t]*t\s+Veeder\b', re.IGNORECASE)),
('Newton', re.compile(r'\bNewton\b', re.IGNORECASE)),
('Nickel', re.compile(r'\bNickel\b', re.IGNORECASE)),
('Opus One', re.compile(r'\bOpus\s+One\b', re.IGNORECASE)),
('P Togni', re.compile(r'\bTogni\b', re.IGNORECASE)),
('Pahlmeyer Jayson', re.compile(r'\bJayson\b', re.IGNORECASE)), # this before pahlmeyer
('Pahlmeyer', re.compile(r'\bPahlmeyer\b(?!\s*Jay)', re.IGNORECASE)),
('Papillon', re.compile(r'\bPapillon\b', re.IGNORECASE)),
('Patz', re.compile(r'\bPatz\b', re.IGNORECASE)),
('Phelps', re.compile(r'\bPhelps\b', re.IGNORECASE)),
('Plumpjack', re.compile(r'\bPlumpjack\b', re.IGNORECASE)),
('Pride', re.compile(r'\bPride\b', re.IGNORECASE)),
('Prisoner', re.compile(r'\bPrisoner\b', re.IGNORECASE)),
('Provenance', re.compile(r'\bProvenance\b', re.IGNORECASE)),
('R Sinskey', re.compile(r'\bSinskey\b', re.IGNORECASE)),
('Ramey', re.compile(r'\bRamey\b', re.IGNORECASE)),
('Revana', re.compile(r'\bRevana\b', re.IGNORECASE)),
('Raptor', re.compile(r'\bRaptor\s+Ridge\b', re.IGNORECASE)),
('Revana', re.compile(r'\bRevana\b', re.IGNORECASE)),
('Ridge', re.compile(r'\bRidge\b', re.IGNORECASE)),
('Robert Foley', re.compile(r'\bRobert\s+Foley\b', re.IGNORECASE)),
('Rombauer', re.compile(r'\bRombauer\b', re.IGNORECASE)),
('Rudd', re.compile(r'\bRudd\b', re.IGNORECASE)),
('Scarecrow', re.compile(r'\bScarecrow\b', re.IGNORECASE)),
('Sea Smoke', re.compile(r'\bSea\s+Smoke\b', re.IGNORECASE)),
('Seghesio', re.compile(r'\bSeghesio\b', re.IGNORECASE)),
('Shafer', re.compile(r'\bShafer\b', re.IGNORECASE)),
('Sherwin', re.compile(r'\bSherwin\b', re.IGNORECASE)),
('Silver Oak', re.compile(r'\bSilver\s+Oak\b', re.IGNORECASE)),
('Silverado', re.compile(r'\bSilverado\b', re.IGNORECASE)),
('Simi', re.compile(r'\bSimi\b', re.IGNORECASE)),
('Sonoma Cutrer', re.compile(r'\bCutrer\b', re.IGNORECASE)),
('Spottswoode', re.compile(r'\bSpottswoode\b', re.IGNORECASE)),
('Stag Leap', re.compile(r'\bStag.*\sLeap\b', re.IGNORECASE)),
('Sullivan', re.compile(r'\bSullivan\b', re.IGNORECASE)),
('Summerland', re.compile(r'\bSummerland\b', re.IGNORECASE)),
('Summers', re.compile(r'\bSummers\b', re.IGNORECASE)),
('Tantara', re.compile(r'\bTantara\b', re.IGNORECASE)),
('Turnbull', re.compile(r'\bTurnbull\b', re.IGNORECASE)),
('Veuve', re.compile(r'\bVeuve\b', re.IGNORECASE)),
('Viader', re.compile(r'\bViader\b', re.IGNORECASE)),
('Waterstone', re.compile(r'\bWaterstone\b', re.IGNORECASE)),
('Whitehall', re.compile(r'\bWhitehall\b', re.IGNORECASE)),
('Wm Selyem', re.compile(r'\bWilliams\s*\-?Selyem\b', re.IGNORECASE)),
('ZD', re.compile(r'\bZD\b', re.IGNORECASE)),
('Zaca', re.compile(r'\bZaca\b', re.IGNORECASE)),
('zBourbon Woodford Res', re.compile(r'\bWoodford\s+Reserve\b', re.IGNORECASE)),
('zBourbon Woodford Res', re.compile(r'\bWoodford\s+Rsv\b', re.IGNORECASE)),
('zCognac Courvoisier', re.compile(r'\bCourvoisier\b', re.IGNORECASE)),
('zCognac Hennessy', re.compile(r'\bHennesse?y\b', re.IGNORECASE)),
('zCognac Remy', re.compile(r'\bRemy\s+Martin\b|\bRemy\s+Louis', re.IGNORECASE)),
('zCointreau', re.compile(r'\bCointreau\b', re.IGNORECASE)),
('zGin Hendrick', re.compile(r'\bHendrick', re.IGNORECASE)),
('zGin Tanqueray', re.compile(r'\bTanqueray\b', re.IGNORECASE)),
('zRum Mt Gay', re.compile(r'\bMount\s+Gay\b|\bMt\s+Gay', re.IGNORECASE)),
('zRum Ron Zacapa', re.compile(r'\bRon\s+Zacapa\b', re.IGNORECASE)),
('zRye Hayden', re.compile(r'\bBasil\s+Hayden\b', re.IGNORECASE)),
('zSambuca', re.compile(r'\bSambuca\b', re.IGNORECASE)),
('zScotch Glenmorangie', re.compile(r'\bGlenmorangie\b', re.IGNORECASE)),
('zScotch Hibiki Harmony', re.compile(r'\bHibiki\s.*Harmony\b', re.IGNORECASE)),
('zScotch Hibiki', re.compile(r'\bHibiki\b(?!\s*Har)', re.IGNORECASE)),
('zScotch Macallan', re.compile(r'\bMacallan\b', re.IGNORECASE)),
('zTeq Campo Azul', re.compile(r'\bCampo\s+Azul\b', re.IGNORECASE)),
('zTeq Casamigos', re.compile(r'\bCasamigos\b', re.IGNORECASE)),
('zTeq Casino Azul', re.compile(r'\bCasino\s+Azul\b', re.IGNORECASE)),
('zTeq Clase Azul', re.compile(r'\bClase\s+Azul\b', re.IGNORECASE)),
('zTeq Cuervo', re.compile(r'\bJose\s+Cuervo\b|^Cuervo\b', re.IGNORECASE)),
('zTeq Don Julio', re.compile(r'\bDon\s+Julio\b', re.IGNORECASE)),
('zTeq Dos Artes', re.compile(r'\bDos\s+Artes\b|^Cuervo\b', re.IGNORECASE)),
('zTeq Gran Cava', re.compile(r'\bGran\s+Cava\b', re.IGNORECASE)),
('zTeq Herradura', re.compile(r'\bHerradura\b', re.IGNORECASE)),
('zTeq Loma Azul', re.compile(r'\bLoma\s+Azul\b', re.IGNORECASE)),
('zTeq Padre Azul', re.compile(r'\bPadre\s+Azul\b', re.IGNORECASE)),
('zTeq Partida', re.compile(r'\bPartida\b', re.IGNORECASE)),
('zTeq Patron', re.compile(r'\bPatron\b', re.IGNORECASE)),
('zTripleSec Gr Marnier', re.compile(r'\bGrand\s+Marnier\b', re.IGNORECASE)),
('zTripleSec Dekuyper', re.compile(r'\bDekuyper\b', re.IGNORECASE)),
('zTripleSec Hiram', re.compile(r'\bHiram\b', re.IGNORECASE)),
('zVodka Absolut', re.compile(r'\bAbsolut\b', re.IGNORECASE)),
('zVodka Skyy', re.compile(r'\bSkyy\b', re.IGNORECASE)),
('zVodka Tito', re.compile(r'\bTito', re.IGNORECASE)),
('zWhiskey Balvenie', re.compile(r'\bBalvenie\b', re.IGNORECASE)),
('zWhiskey J Walker', re.compile(r'\bJohn+ie\s+Walker\b', re.IGNORECASE)),
# ('', re.compile(r'\b\b', re.IGNORECASE)),
)
# regex extract the grape from the wine fld
grapeLookup = (
('Cab Franc', re.compile(r'\bCabernet\s+Franc|\bCab\s+Franc', re.IGNORECASE)), # before cab
('Cab', re.compile(r'\bCabernet\b|\sCS\s|\sCS$|\bCab\b', re.IGNORECASE)),
('Claret', re.compile(r'\bClaret\b', re.IGNORECASE)),
('Rose Pinot', re.compile(r'\bRose\b.*\bPinot\b|\bPinot\b.*\bRose\b', re.IGNORECASE)),
('Pinot', re.compile(r'\bPinot\b|\bPN\b|\bP\s+Noir\b', re.IGNORECASE)),
('Merlot', re.compile(r'\bMerlot\b|\bME\b', re.IGNORECASE)),
('Sauv Blanc', re.compile(r'\bSauvignon\s+Blanc\b|\bSB\b', re.IGNORECASE)),
('Sauv Blanc', re.compile(r'\bSauvignon\/Fume\s+Blanc\b', re.IGNORECASE)),
('Meritage', re.compile(r'\bMeritage\b', re.IGNORECASE)),
('Fume', re.compile(r'\bFume\b|\bFumé', re.IGNORECASE)),
('Champagne', re.compile(r'\bChampagne\b', re.IGNORECASE)),
('Chard', re.compile(r'\bChar+d|\bCH\b', re.IGNORECASE)),
('Shiraz', re.compile(r'\bShiraz\b', re.IGNORECASE)),
('Syrah', re.compile(r'\bSyrah\b|\bSY\b',re.IGNORECASE)),
('Zin', re.compile(r'\bZinfandel\b|\bZIN\b|\bZN\b', re.IGNORECASE)),
('Rose', re.compile(r'\bRose\b|\bRosé', re.IGNORECASE)),
('Sangiovese', re.compile(r'\Sangiovese\b', re.IGNORECASE)),
# ('Brandy', re.compile(r'\bBrandy\b', re.IGNORECASE)),
('Gewurzt', re.compile(r'\bGew.rztraminer\b|\bGewürzt', re.IGNORECASE)),
('Malbec', re.compile(r'\bMalbec\b', re.IGNORECASE)),
('Viognier', re.compile(r'\bViognier\b', re.IGNORECASE)),
('Roussanne', re.compile(r'\bRoussanne\b', re.IGNORECASE)),
('Charbono', re.compile(r'\bCharbono\b', re.IGNORECASE)),
('PSirah', re.compile(r'\bPetite Sirah\b', re.IGNORECASE)),
('Cuvee', re.compile(r'\bCuvee\b', re.IGNORECASE)),
('Red', re.compile(r'\bRed\b|\bBordeaux\s+Blend\b', re.IGNORECASE)),
('Syrah-Cab', re.compile(r'\bSyrcab\b|\bsyrah[-\s\/]+cab', re.IGNORECASE)),
('Grenache', re.compile(r'\bGrenache\b', re.IGNORECASE)),
('Tempranillo', re.compile(r'\bTempranillo\b', re.IGNORECASE)),
)
# wineries that we don't want to look up the grape on
ignoreGrapeLookup = {
'Cristal' : ['Rose', None],
'Domaine Carneros' : ['Brut', None],
'Dominus' : [None],
'Papillon' : None,
'Paraduxx' : None,
'Veuve' : None,
'zCointreau' : None,
'zGin Hendrick' : None,
'zGin Tanqueray' : ['Ten', None],
'zTripleSec Gr Marnier' : ['1880', '100th', 'Cent', 'Quin', None],
'zTripleSec Dekuyper' : None,
'zTripleSec Hiram' : None,
'zVodka Skyy' : ['Citrus', None],
'zVodka Tito' : None,
# 'Prisoner' : ['Cuttings', 'Red', 'Derange', 'Saldo', 'Blindfold', None],
}
# winery to wine lookup when no grape is found in the wine name
#
# extract the wine name from a winery - when a field does not have a grape lookup for the row
# the name looked up and found will be the name used
noGrapeLookup = {
'Ehlers' : ['120-80'], # matches an abbreviations - and matches fldWineDescr
'Alban' : ['Pandora'],
'BV' : ['Tapestry', 'Latour'],
'Bennett Ln' : ['Maximus'],
'Bremer' : ['Austintatious'],
'Cain Five' : None,
'Colgin' : ['Cariad', 'IX'],
'Concha Don Melchor' : None,
'Continuum' : None,
'Darioush' : ['Duel', 'Darius'],
'Duckhorn' : ['Discussion'],
'Far Niente' : ['Dolce'],
'Flora' : ['Trilogy'],
'Franciscan' : ['Magnificat'],
'Grgich' : ['Violetta'],
'Gundlach' : ['Vintage Reserve'],
'Justin' : ['Isosceles'],
'Krug' : ['Generations'],
'Mondavi' : ['Maestro'],
'Newton' : ['Puzzle'],
'Opus One' : None,
'Phelps' : ['Insignia'],
'Prisoner' : ['Cuttings', 'Derange', 'Saldo', 'Blindfold'],
'Ridge' : ['Monte Bello'],
'Robert Foley' : ['Griffin'],
'Sullivan' : ['Coeur de Vigne'],
'Zaca' : ['ZThree', 'ZCuvee'],
'zCognac Courvoisier' : ['Napolean', 'VS', 'VSOP', 'XO'],
'zCognac Hennessy' : ['Paradis', 'Richard', 'VS', 'VSOP', 'XO', 'Master'],
'zCognac Remy' : ['1738', 'Louis XIII', 'VSOP', 'XO', 'VS'],
'zRum Ron Zacapa' : ['23', 'Negra', 'XO'],
'zRye Hayden' : ['Dark', 'Caribbean'],
'zScotch Hibiki Harmony' : None,
# 'zScotch Hibiki' : ['Toki', '12', '17', '21', '30'],
'zTeq Campo Azul' : ['Extra Anejo', 'Anejo', 'Blanco', 'Reposado'],
'zTeq Casamigos' : ['Extra Anejo', 'Anejo', 'Blanco', 'Reposado'],
'zTeq Casino Azul' : ['Extra Anejo', 'Anejo', 'Blanco', 'Reposado', 'Silver'],
'zTeq Clase Azul' : ['Ultra', 'Extra Anejo', 'Anejo', 'Blanco', 'Reposado', 'Mezcal', 'Plata', 'Platino'],
'zTeq Dos Artes' : ['Extra Anejo'],
'zTeq Gran Cava' : ['Extra Anejo'],
'zTeq Loma Azul' : ['Extra Anejo', 'Anejo', 'Blanco', 'Reposado'],
# 'zTeq Padre Azul' : ['Extra Anejo', 'Anejo', 'Blanco', 'Reposado'],
'zTeq Partida' : ['Blanco', 'Elegante'],
'zVodka Absolut' : ['Citron', 'Mandarin', 'Mandrin', 'Mango', 'Ruby', 'Vanilia', 'Raspberri', 'Grapevine', None],
'zWhiskey J Walker' : ['Double Black', 'Black', 'Blue', 'Gold', 'Green', 'Platinum', 'Red','Swing', 'White', '18', '21'],
}
# regex to use to determine if this is a liquor not a wine
#
# winery -> [ liquor, regex ]
# if there is no grape, and no noGrapeLookup found, but the winery has a liquorLookup
# use the list of lookups to find the additional infomratoin to add to the winery
#
liquorLookup = {
'zRum Mt Gay' : [
('1703 Mst', re.compile(r'\b1703\b', re.IGNORECASE)),
('BB', re.compile(r'\bBlack Barrel\b', re.IGNORECASE)),
('Eclipse Silver', re.compile(r'\bEclipse\s+Silver\b', re.IGNORECASE)),
('Eclipse', re.compile(r'\bEclipse\b', re.IGNORECASE)),
('Old Peat', re.compile(r'\bOld Peat', re.IGNORECASE)),
('Old Pot', re.compile(r'\bPot\s+Still\b', re.IGNORECASE)),
('Old', re.compile(r'\bOld\b', re.IGNORECASE)),
('Silver', re.compile(r'\bSilver\b', re.IGNORECASE)),
('XO Peat', re.compile(r'\bXO\b', re.IGNORECASE)),
],
'zScotch Glenmorangie' : [
('10', re.compile(r'\b10(YR)?\b', re.IGNORECASE)),
('14 Port', re.compile(r'14.+\bQuinta\b|14.+\bPort\b|\bQuinta\b.+14|\bPort\b.+14', re.IGNORECASE)),
('12 Bacalta', re.compile(r'\bBacalta\b', re.IGNORECASE)),
('12 Burgundy', re.compile(r'\bBurgundy\b', re.IGNORECASE)),
('12 Nectar', re.compile(r'\bNectar\b', re.IGNORECASE)),
('12 Port', re.compile(r'\bQuinta\b|\bPort\b', re.IGNORECASE)),
('12 Sherry', re.compile(r'\bLa\s?Santa\b|\bSherry\b', re.IGNORECASE)),
('12 Signet', re.compile(r'\bSignet\b', re.IGNORECASE)),
('15 Cadboll', re.compile(r'\bCadboll', re.IGNORECASE)),
('15', re.compile(r'\b15(YR)?\b', re.IGNORECASE)),
('18', re.compile(r'\b18(YR)?\b|\b18YEAR\b', re.IGNORECASE)),
('25 Astar', re.compile(r'\bAstar\b', re.IGNORECASE)),
('25', re.compile(r'\b25(YR)?\b', re.IGNORECASE)),
('Companta', re.compile(r'\bCompanta\b', re.IGNORECASE)),
('Finealta', re.compile(r'\bFinealta\b', re.IGNORECASE)),
('Milsean', re.compile(r'\bMilsean\b', re.IGNORECASE)),
('Sonnalta', re.compile(r'\bSonnalta\b', re.IGNORECASE)),
],
'zScotch Macallan' : [
('10 Fine', re.compile(r'\bFine.*\b10\b|\b10.*Fine')),
('10', re.compile(r'\b10\b')),
('12 Double Gold', re.compile(r'\bDbl\b.*Gold|\bDouble\b.*Gold', re.IGNORECASE)),
('12 Double', re.compile(r'\bDouble\s.*12(YR)?\b', re.IGNORECASE)),
('12 Double', re.compile(r'\b12\s.*Double\b', re.IGNORECASE)),
('12 Double', re.compile(r'\bDbl\b|\bDouble\b', re.IGNORECASE)),
('12 Edition 1', re.compile(r'\bEdition\s.*1\b', re.IGNORECASE)),
('12 Edition 2', re.compile(r'\bEdition\s.*2\b', re.IGNORECASE)),
('12 Edition 3', re.compile(r'\bEdition\s.*3\b', re.IGNORECASE)),
('12 Edition 4', re.compile(r'\bEdition\s.*4\b', re.IGNORECASE)),
('12 Sherry', re.compile(r'\b12\s.*Sherry\b|\bSherry\b\s.*\b12', re.IGNORECASE)),
('12 Triple', re.compile(r'\b12(YR)?\s.*Triple\b', re.IGNORECASE)),
('12 Triple', re.compile(r'\bTriple\s.*12\b', re.IGNORECASE)),
('12', re.compile(r'\b12(YR)?\b', re.IGNORECASE)),
('15 Triple', re.compile(r'\b15(YR)?\s.*Triple\b|Triple.+\b15(YR)?\b', re.IGNORECASE)),
('15 Fine', re.compile(r'\b15(YR)?\b.*\bFine\b', re.IGNORECASE)),
('15', re.compile(r'\b15(YR)?\b', re.IGNORECASE)),
('17 Sherry', re.compile(r'\b17(YR)?\s.*Sherry\b', re.IGNORECASE)),
('17 Fine', re.compile(r'\b17(YR)?\b.*\bFine\b', re.IGNORECASE)),
('17', re.compile(r'\b17(YR)?\b', re.IGNORECASE)),
('18 Sherry', re.compile(r'\b18(YR)?\s.*Sherry\b|Sherry\b.*18', re.IGNORECASE)),
('18 Triple', re.compile(r'\b18(YR)?\s.*Triple\b|Triple.+\b18(YR)?\b', re.IGNORECASE)),
('18 Fine', re.compile(r'\b18(YR)?\b.*\bFine\b', re.IGNORECASE)),
('18 Gran', re.compile(r'Gran\b.*\b18', re.IGNORECASE)),
('18', re.compile(r'\b18(YR)?\b', re.IGNORECASE)),
('21 Fine', re.compile(r'\b21.*Fine\b', re.IGNORECASE)),
('21', re.compile(r'\b21(YR)?\b', re.IGNORECASE)),
('25 Sherry', re.compile(r'\b25\s.*Sherry\b', re.IGNORECASE)),
('25', re.compile(r'\b25(YR)?\b')),
('30 Sherry', re.compile(r'\b30\s.*Sherry', re.IGNORECASE)),
('30 Triple', re.compile(r'\b30(YR)?\s.*Triple\b|Triple.+\b30(YR)?\b', re.IGNORECASE)),
('30 Fine', re.compile(r'\b30(YR)?\b.*\bFine\b|Fine.*30', re.IGNORECASE)),
('30', re.compile(r'\b30(YR)?\b')),
('Rare', re.compile(r'\bRare\b', re.IGNORECASE)),
],
'zTeq Cuervo' : [
('Especial Gold', re.compile(r'\bEspecial\b.*Gold\b|Gold.*Especial', re.IGNORECASE)),
('Especial Blue', re.compile(r'\bEspecial\b.*Blue\b', re.IGNORECASE)),
('Especial', re.compile(r'\bEspecial\b', re.IGNORECASE)),
('Familia Platino', re.compile(r'\bPlatino\b', re.IGNORECASE)),
('Familia Anejo', re.compile(r'\bFamilia\b|\bReserva\b', re.IGNORECASE)),
('Gold', re.compile(r'\bGold\b', re.IGNORECASE)),
('Reposado Lagavulin', re.compile(r'\bReposado.*Lagavulin', re.IGNORECASE)),
('Tradicional Anejo', re.compile(r'Tradicional.*Anejo|Anejo.*Tradicional', re.IGNORECASE)),
('Tradicional Reposado', re.compile(r'Tradicional.*Reposado|Reposado.*Tradicional', re.IGNORECASE)),
('Tradicional Silver', re.compile(r'\bTradicional\b', re.IGNORECASE)),
('Tradicional Silver', re.compile(r'\bTraditional\b', re.IGNORECASE)),
('Reposado', re.compile(r'\bReposado\b', re.IGNORECASE)),
('Silver', re.compile(r'\bSilver\b', re.IGNORECASE)),
],
'zTeq Don Julio' : [
('1942', re.compile(r'\b1942\b', re.IGNORECASE)),
('Real', re.compile(r'\bReal\b', re.IGNORECASE)),
('Anejo Claro 70th', re.compile(r'\b70th\b', re.IGNORECASE)),
('Anejo Claro', re.compile(r'\bAnejo\b\s*Claro\b', re.IGNORECASE)),
('Anejo', re.compile(r'\bAnejo\b', re.IGNORECASE)),
('Blanco', re.compile(r'\bBlanco\b', re.IGNORECASE)),
('Reposado Lagavulin', re.compile(r'\bRepo.+Lagvulin\b', re.IGNORECASE)),
('Reposado Dbl', re.compile(r'\bReposado.+Double\b', re.IGNORECASE)),
('Reposado Dbl', re.compile(r'\bReposado.+Dbl\b', re.IGNORECASE)),
('Reposado Dbl', re.compile(r'\bDouble.+Reposado\b', re.IGNORECASE)),
('Reposado Private', re.compile(r'\bReposado.+Private\b', re.IGNORECASE)),
('Reposado', re.compile(r'\bReposado\b', re.IGNORECASE)),
('Silver', re.compile(r'\bSilver\b', re.IGNORECASE)),
],
'zTeq Herradura' : [
('Ultra', re.compile(r'\bUltra\b', re.IGNORECASE)),
('Suprema', re.compile(r'\bSuprema\b', re.IGNORECASE)),
('Anejo', re.compile(r'\bAnejo\b', re.IGNORECASE)),
('Blanco', re.compile(r'\bBlanco\b', re.IGNORECASE)),
('Reposado Gold', re.compile(r'\bReposado\s+Gold\b|\bGold\s+Reposado\b', re.IGNORECASE)),
('Reposado Scotch', re.compile(r'\bReposado.+Scotch\b|\bScotch.+Reposado\b', re.IGNORECASE)),
('Reposado Port', re.compile(r'\bPort.+Reposado\b|\bReposado.+Port\b', re.IGNORECASE)),
('Reposado', re.compile(r'\bReposado\b', re.IGNORECASE)),
('Silver', re.compile(r'\bSilver\b', re.IGNORECASE)),
],
'zTeq Patron' : [
('Gran Piedra', re.compile(r'\bPiedra\b', re.IGNORECASE)),
('DELETE Roca DELETE', re.compile(r'\bRoca\b', re.IGNORECASE)),
('Anejo Extra Lalique', re.compile(r'\bLalique\b', re.IGNORECASE)),
('Anejo Extra 7yr', re.compile(r'\b7YR\b|\b7 anos\b|\b7 year\b', re.IGNORECASE)),
('Anejo Extra 5yr', re.compile(r'\b5YR\b|\b5 anos\b|\b5 year\b', re.IGNORECASE)),
('Anejo Extra 10yr', re.compile(r'\b10\b.+\bExtra\b|\bExtra\b.+10', re.IGNORECASE)),
('Anejo Extra', re.compile(r'\bExtra\s+Anejo\b', re.IGNORECASE)),
('Gran Anejo', re.compile(r'\bGran\s+Anejo\b', re.IGNORECASE)),
('Gran Anejo', re.compile(r'\bBurdeos\b', re.IGNORECASE)),
('Gran Smoky', re.compile(r'\bGran\s+.*Smoky\b', re.IGNORECASE)),
('Anejo', re.compile(r'\bAnejo\b', re.IGNORECASE)),
('Gran Platinum', re.compile(r'\bPlatinum\b', re.IGNORECASE)),
('Reposado', re.compile(r'\bReposado\b', re.IGNORECASE)),
('Silver LTD', re.compile(r'\bSilver.*Limited\b|\bLimited.*Silver\b', re.IGNORECASE)),
('Silver Estate', re.compile(r'\bEstate.*Silver\b|\bSilver.*Estate\b', re.IGNORECASE)),
('Silver', re.compile(r'\bSilver\b', re.IGNORECASE)),
('Blanco', re.compile(r'\bBlanco\b', re.IGNORECASE)),
# ('', re.compile(r'\b\b', re.IGNORECASE)),
],
'zTeq Padre Azul' : [
('Blanco', re.compile(r'\bsilver\b', re.IGNORECASE)),
],
'zWhiskey Balvenie' : [
('12 Double', re.compile(r'\bDouble.*12(YR)?\b', re.IGNORECASE)),
('12 Double', re.compile(r'\b12(YR)?\s.*Double', re.IGNORECASE)),
('12 First', re.compile(r'\b12(YR)?\s.*First', re.IGNORECASE)),
('12 USA', re.compile(r'\b12.*American|American.*12', re.IGNORECASE)),
('12 Toast', re.compile(r'\b12(YR)?\s.*Toast', re.IGNORECASE)),
('12', re.compile(r'\b12(YR)?\b', re.IGNORECASE)),
('14 Carib', re.compile(r'\b14(YR)?\s.*Carib', re.IGNORECASE)),
('14 Carib', re.compile(r'\b14(YR)?\s.*CB\s+Cask', re.IGNORECASE)),
('14 Carib', re.compile(r'\bCarr?ib', re.IGNORECASE)),
('14 Peat', re.compile(r'\b14(YR)?\s.*Peat', re.IGNORECASE)),
('15 Sherry', re.compile(r'\b15(YR)?\s.*Sherry\b', re.IGNORECASE)),
('15 Sherry', re.compile(r'\bSherry\s+.*15(YR)?\b', re.IGNORECASE)),
('15', re.compile(r'\b15(YR)?\b', re.IGNORECASE)),
('16 Triple', re.compile(r'\b16(YR)?\s.*Triple\b', re.IGNORECASE)),
('17 Sherry Double', re.compile(r'\b17(YR)?\s.*Sherry\s+Doub', re.IGNORECASE)),
('17 Sherry', re.compile(r'\b17(YR)?\s.*Sherry', re.IGNORECASE)),
('17 Double', re.compile(r'\b17(YR)?\s.*Double', re.IGNORECASE)),
('17 Double', re.compile(r'\bDouble.*17(YR)?\b', re.IGNORECASE)),
# 17 Double Sherry
# 17 Islay
# 17 New Oak
('17 Peat', re.compile(r'\b17(YR)?\s.*Peat', re.IGNORECASE)),
('17 Peat', re.compile(r'\bPeat.*17(YR)?\b', re.IGNORECASE)),
('17', re.compile(r'\b17(YR)?\b', re.IGNORECASE)),
('21 Port', re.compile(r'\b21.*Port', re.IGNORECASE)),
('21 Port', re.compile(r'\bPort.*21\b', re.IGNORECASE)),
('21', re.compile(r'21', re.IGNORECASE)),
('25', re.compile(r'\b25(YR)?\b', re.IGNORECASE)),
('30', re.compile(r'\b30(YR)?\b', re.IGNORECASE)),
('40', re.compile(r'\b40(YR)?\b', re.IGNORECASE)),
],
'zBourbon Woodford Res' : [
('Dbl', re.compile(r'\bDouble\b', re.IGNORECASE)),
('Derby', re.compile(r'\bDerby\b', re.IGNORECASE)),
('Rye Choc', re.compile(r'\bChocolate.*Rye\b', re.IGNORECASE)),
('Rye', re.compile(r'\bRye\b', re.IGNORECASE)),
('Brandy', re.compile(r'\bBrandy\b', re.IGNORECASE)),
('Batch', re.compile(r'\bBatch\b', re.IGNORECASE)),
('Barrel', re.compile(r'\bBarrel\b', re.IGNORECASE)),
('Master', re.compile(r'\bMasters?\b', re.IGNORECASE)),
('Malt', re.compile(r'\bMalt\b', re.IGNORECASE)),
('Maple', re.compile(r'\bMaple\b', re.IGNORECASE)),
('Wheat', re.compile(r'\bWheat\b', re.IGNORECASE)),
('', re.compile(r'\bWoodford\b', re.IGNORECASE)),
],
'zSambuca' : [
('Romana Black', re.compile(r'\bRomana.*\bBlack\b|\bBlack\s+Romana\b', re.IGNORECASE)),
('Romana', re.compile(r'\bRomana\b', re.IGNORECASE)),
('Di Amore', re.compile(r'\bdi Amore\b', re.IGNORECASE)),
],
'zScotch Hibiki' : [
('12', re.compile(r'\b12\s*YE?A?R\b', re.IGNORECASE)),
('17 Limited', re.compile(r'\b17\s*YE?A?R\b.+Limited', re.IGNORECASE)),
('17', re.compile(r'\b17\s*YE?A?R\b', re.IGNORECASE)),
('21 Limited', re.compile(r'\b21\s*YE?A?R\b.+Limited', re.IGNORECASE)),
('21', re.compile(r'\b21\s*YE?A?R\b', re.IGNORECASE)),
('30', re.compile(r'\b30\s*YE?A?R\b', re.IGNORECASE)),
]
}
# regex to expand out optional values in the optoinal values to find a match against wine fld
wineAbbrLookup = {
'120-80' : r'\bOne\s+Twenty\s+Over\s+Eighty\b',
'3Amigos' : r'\bThree\s+Amigos\b',
'3Palms' : r'\bThree\s+Palms\b',
'3Sister' : r'\bThree\s+Sisters?\b',
'4Barrell' : r'\b4[\-\s]Barrels?\b',
'Alex' : r'\bAlexander\b',
'And' : r'\bAnderson\b',
'Car' : r'\bCarneros\b',
'Carries' : r'\bCarrie',
'CC' : r'\bC\.?C\.?\s+Ranch\b',
'Clone4' : r'\bClone\s+4\b',
'Clone6' : r'\bClone\s+6\b',
'Crossbarn' : r'\bCross\s+Barn\b',
'Donna' : r'\bDonna',
'Est' : r'\bEstate\b',
'Estate' : r'\bEst\b',
'Gap' : r'\bGap|\s%27Gap',
'Gary' : r'\bGary',
'Julia' : r'\bJulia',
'Knights' : r'\bKnight',
'KistlerVnyd' : r'\bKistler (Vineyard|VYD|EST)\b',
'LP' : r'\bLes Pierres\b',
'Lyn' : r'\bLyndenhur?st\b',
'Mont' : r'\bMonterey\b',
'Mt' : r'\bMount\b|\bMt\.\b',
'Napa/Son' : r'\bNapa.*Son',
'Oak' : r'\bOakville\b',
'One-Pt-5' : r'\bOne\s+Point\s+Five\b',
'Pomm' : r'\bPommeraie\b',
'Priv' : r'\bPrivate\b',
'RR' : r'\bRussian\s+Rivers?\b|RRV',
'RRR' : r'\bRussian\s+Rivers?\b|RRV',
'Res' : r'\bReserve\b|\bRsv\b|\bResrv\b|\bReserv\b|\bReserve$',
'Rose' : r'\bRosé|\bROSÉ|\bRos%E9',
'Ruth' : r'\bRutherford\b',
'Sandy' : r'\bSandy',
'Samanthas' : r'\bSamantha',
'SC' : r'\bSanta\s+Cruz\b',
'SLD' : r'\bStag.*Leap\b',
'SLH' : r'\bSanta\s+Lucia\b',
'SMV' : r'\bSanta\s+Maria|\bS\s+Maria',
'SRH' : r'\bSTA\.?|\bSANTA\s+Rita\b|\bSTA\sRITA\sHILLS|\bS\s+RITA\b',
'SS' : r'\bSpecial\s+\Selection\b',
'Stage' : r'\bStagecoach\b',
'Son' : r'\bSonoma\b',
'SYV' : r'\bSanta\s+Ynez\s+Valley\b',
'TD9' : r'\bTD\s+9\b|\bTD-9\b',
'Terraces' : r'\bTerrace',
'TheCutrer' : r'\bThe Cutrer\b|nnay Cutrer\b',
'Tok' : r'\bTo[\s\-]?Kolan|\bTo[\s\-]?Kalon',
'Turn4' : r'\bTurn\s+4\b',
'Vernas' : r'\bVerna',
'Vine' : r'\bVines\b',
'Yount' : r'\bYountville\b',
'ZThree' : r'\bZ.*\bThree\b',
'ZCuvee' : r'\bZ.*\bCuvee\b|\bCuvee Z\b',
# misspellings
'Agustina' : r'\bAugustina\b',
'Durell' : r'\bDurrell\b',
'Bench
| 0 |
d786e89b9d478dcff3c541c89731247075d078c3
|
Python
|
land' : r'\bBenchlands\b',
'Pritchard' : r'\bPitchard\b',
}
# regex search - set the ships as
reShipsAs = re.compile(r'\(ships?\s', re.IGNORECASE)
# the order in which we pull multiple single match attributes
defaultorderlist=[['Tok'], ['Oak'], ['Res'], ['RR'], ['Landslide'], ['Yount'], ['RRR'], ['Son'], ['Ruth'], ['Napa'], ['Helena'], ['SRH'], ['SLH'], ['SMV'], ['SLD'], ['Paso'], ['Alex'], ['Single'], ['Estate']]
### FUNCTIONS ############################################
#########################################################################################
def globalVariableCheck( debug=False ):
# check for liquor definitions that are in noGrapeLookup
# these will never execute
for liquor in liquorLookup:
if liquor in noGrapeLookup:
print('WARNING:liquorLookup regexs will never execute - they are in noGrapeLookup:', liquor)
if liquor in ignoreGrapeLookup:
print('WARNING:liquorLookup regexs will never execute - they are in ignoreGrapeLookup:', liquor)
for winery in ignoreGrapeLookup:
if winery in noGrapeLookup:
print('WARNING:ignoreGrapeLookup regexs will never execute - they are in noGrapeLookup:', winery)
#########################################################################################
def setOptionDictMasterFldValues( optiondict, debug=False ):
# default these fields to the fld values if they are not set
# otherwise leave them alone
for fld in ('fldWine', 'fldWineDescr'):
if not optiondict[fld+'Master']:
optiondict[fld+'Master'] = optiondict[fld]
#########################################################################################
# having a list of names to look at and match on - see if this record has a match
# nameLookup - list of names could have 'None' as the last value, or just the value of None
# lookupStr - string to be searched
# other - array of strings that will have the matching name removed from
# msg - string defining who called this function
#
# returns: string - if a matching string is found
# None - did not find a match
# '' - valid match with "None"
#
def wineLookupByName( nameLookup, lookupStr, other, msg, wineAbbrLookup=None, debug=False ):
# string for debugging messages
funcname = 'wineLookupByName:' + msg + ':'
# debugging
if debug: print(funcname + 'nameLookup:', nameLookup)
# if the value for this winery is None - than there is no additiona work we are done
if nameLookup is None:
# no additional processing
# debugging
if debug: print(funcname + 'match: value is none - continue on')
# return empty string
return ''
# there are additional lookups for this winery - not using grape as part of the description
# check each of the things to look up
for name in nameLookup:
# debugging
if debug: print(funcname + 'match-name:', name)
# special processing of a lookup value of none
if name is None:
# Lookup on none - means just use what we found
# debugging
if debug: print(funcname + 'name-matched: value is none - continue on:pass back blank')
# stop iterating on nameLookup - by returning empty string
return ''
# we have not encountered 'None' - so build the regex based on the text provided
reName = re.compile( r'\b'+name+r'\b', re.IGNORECASE)
# check to see if we have a match with this regex
if reName.search(lookupStr):
# we have a match - so this is the additional attribute we are looking for
# debugging
if debug: print(funcname+'name-MATCHED:', name)
# remove from other if it is in there
for val in other:
if reName.search(val):
other.remove(val)
# debugging
if debug: print(funcname + 'name-remove-from-other:', val)
# stop iterating on nameLookup - return what we found
return name
# 2nd check see if have a translation and this name is translatable
if wineAbbrLookup and name in wineAbbrLookup:
# build the regex with the look up value
reName = re.compile(wineAbbrLookup[name], re.IGNORECASE)
# debugging
if debug: print(funcname + 'Abbr-match-name:', name)
# check to see if we have a match with this regext
if reName.search(lookupStr):
# we have a match - so this is the additional attribute we are looking for
# debugging
if debug: print(funcname+'Abbr-name-MATCHED:', wineAbbrLookup[name])
# remove from other if it is in there
for val in other:
if reName.search(val):
other.remove(val)
# debugging
if debug: print(funcname + 'name-remove-from-other:', val)
# stop iterating on nameLookup - return what we found
return name
# checked all the namelookupd - and did not find any matches
# debuging
if debug: print(funcname + 'name match not found:set to blank')
# return none meaning we did not find a match
return None
#########################################################################################
# find the qualifer like gift, etch, glass tied to this string
#
#
#
# returns: first qualifier or None
#
def findQualifier( wine, debug=False ):
for (val, reSearch) in reQualLookup:
if reSearch.search(wine):
if debug: print('findQualifier:matched-returning:', val)
return val
if debug: print('findQualifier:no-match-returning:', None)
return None
#########################################################################################
# find the winery tied to the rec
#
# Global Variable Used: wineryLookup (an array of regex that define the winery)
#
# returns: (winery, reWinery)
#
def findWinery( rec, lastWinery, lastReWinery, fldWine, debug=False ):
# if we had a prior winery - test for this match first
if lastWinery:
# debugging
if debug:
try:
print('fw:new winery:', rec[fldWine])
except Exception as e:
print('debug error8-continuing:', str(e))
print('rec[fldWine]:type:', type(rec[fldWine]))
# print('fw:new winery:', rec[fldWine].decode('windows-1252'))
print('fw:checking if this is lastWinery:', lastWinery)
# check to see if the winery is a match again for this record
if lastReWinery.search(rec[fldWine]):
# debugging
if debug: print('fw:this matches the last winery')
# match again - return values
return(lastWinery, lastReWinery)
else:
# not match - debugging
if debug: print('fw:not last winery')
# if we did not match lastWinery - lets look through the list
# go through the list of wineries (global variable),
# each row contains wineryName, wineryRegex
# pulling out the tuple from the lookup
for (winery, reWinery) in wineryLookup:
# debugging
if debug: print('fw:not lastWinery-checking winery:', winery)
if fldWine not in rec:
print('not a column in this record fldWine:', fldWine)
print('rec:', rec)
# check to see if this winery is a match
if reWinery.search(rec[fldWine]):
# debugging
if debug: print('fw:winery match found:', winery)
# this is a match - set the variables
return (winery, reWinery)
# for loop ends without a match
# did not find a matching winery in the for loop - clear values
return (None, None)
#########################################################################################
# find the liquor tied to the rec, leveraging the winery
# Global Variable Used: liquorLookup
#
# returns: (liquor, reLiquor)
#
def findLiquor( rec, winery, fldWine, debug=False ):
# go through the list of liquors (global variable), pulling out the tuple from the lookup
for (liquor, reLiquor) in liquorLookup[winery]:
# debugging
if debug: print('fl:checking liquor:', liquor)
# check to see if this liquor is a match
if reLiquor.search(rec[fldWine]):
# debugging
if debug: print('fl:liquor match found:', liquor)
# this is a match - set the variables
return (liquor, reLiquor)
# for loop ends without a match
# did not find a matching liquor in the for loop - clear values
return (None, None)
#########################################################################################
# find the grape tied to the rec by regex evaluation
#
# Global Variable Used: grapeLookup
#
# returns: (grape, reGrape)
#
def findGrapeByRegex( rec, fldWine, debug=False ):
# go through the list of liquors (global variable), pulling out the tuple from the lookup
for (grape, reGrape) in grapeLookup:
# debugging
if debug: print('fgbr:grape:', grape)
# check to see if this liquor is a match
if grape is not None and reGrape.search(rec[fldWine]):
# debugging
if debug: print('fgbr:grape match found:', grape)
# this is a match - set the variables
return (grape, reGrape)
# for loop ends without a match
# did not find a matching grape in the for loop - clear values
return (None, None)
#########################################################################################
# find a string in a field of a record using string match and
# on match, return that it matched and the remainder of the string as an array
#
# returns: (findStr, other)
#
def findStrInRecReturnOther( rec, fldWineDescr, findStr, debug=False ):
# find where in the string this findStr is positioned
matchLoc = rec[fldWineDescr].find(findStr)
# if we found a location
if matchLoc > -1:
# then strip everthing to the left of the findStr value and then split this to create other attributes
other = rec[fldWineDescr][matchLoc+len(findStr)+1:].split()
# debugging
if debug: print('fsirro:findStr matched:', findStr)
if debug: print('fsirro:findStr other:', other)
# return what we found
return (findStr, other)
#no match found - debugging
if debug: print('fsirro:findStr did not match using:', findStr)
# did not find a matching findStr - return that fact
return (None, [])
#########################################################################################
# find the grape tied to the rec and the list of other attributes
# to the right of the grape in that description
#
# Global Variable Used: grapeLookup
#
# returns: (grape, other)
#
def findGrapeByStr( rec, fldWineDescr, debug=False ):
# find the grape and strip everything right of that from the fldWineDescr field
for (grape,reGrape) in grapeLookup:
# debugging
if debug: print('fg:grape:', grape)
# find where in the string this grape is positioned
(grape, other) = findStrInRecReturnOther( rec, fldWineDescr, grape, debug=debug)
# if we have a match return that match
if grape:
return (grape, other)
# did not find a matching grape - return that fact
return (None, [])
#########################################################################################
# find the vintage tied to the rec
#
# Global Variable Used: vintageLookup
#
# returns: vintage
#
def findVintage( rec, fldWine, debug=False ):
# loop through the vintage lookup records
for reVintage in vintageLookup:
# search for match
m = reVintage.search(rec[fldWine])
# if there is a match
if m:
# extract the vlaue from the first regex group with a value
if m.group(1):
vintage = m.group(1)
if debug: print('fv:vintage-match:', reVintage,':group1')
elif m.group(2):
vintage = m.group(2)
if debug: print('fv:vintage-match:', reVintage,':group2')
elif m.group(3):
vintage = m.group(3)
if debug: print('fv:vintage-match:', reVintage,':group3')
else:
vintage = m.group(4)
if debug: print('fv:vintage-match:', reVintage,':group4')
# return what we vound
return vintage
# did not find it
return None
#########################################################################################
# Create the winery/grape-wine-liquour conversion table based on the
# array of records passed in
#
# this routine takes the already read in list of definitions and parses them up
# in order to create a winery-wine-attributes file - that will be used
# later to take new records from searching the internet and properly assign
# an aligned/consistent wine description to that wine string
#
# we expect the wines array to have attributes: fldWineDescr (winedescr), and fldWine (wine_name)
#
# returns: wgLookup - dictionary - which is built from parsing winedescr NOT wine_name
#
# wgLookup[winery][grape] = list of lists of attributes to perform lookups with
#
def buildWineryGrapeLookup( wines, fldWineDescr='winedescr', fldWine='wine', debug=False ):
# local variables
wgLookup = {}
lastWinery = None
lastReWinery = None
# step through the records read in
for rec in wines:
# debugging
if debug: print('bwgl:new rec:', rec[fldWineDescr])
# set the variable
if not fldWineDescr in rec:
print('creating-field:', fldWineDescr)
rec[fldWineDescr] = ''
# local loop variables
winery = grape = wine = liquor = None
other = []
### WINERY
(lastWinery, lastReWinery) = (winery, reWinery) = findWinery( rec, lastWinery, lastReWinery, fldWine, debug=debug )
# if we did not find the winery - skipt this record
if not winery:
# debugging
if debug: print('bwgl:did not find winery-skipping:', rec[fldWine])
# don't process this record - get the next record to process
continue
### IGNOREGRAPE and NOGRAPE and LIQUOR
# if this winery has a noGrapeLookup option - use that to split up the record
if winery in ignoreGrapeLookup:
### BLANK WINE
# don't get the grape for this winery
# set wine to blank
wine = ''
# debugging
if debug: print('bwgl:wine check ignoreGrapeLookup on winery:', winery)
elif winery in noGrapeLookup:
### NO GRAPE WINE -- fldWineDescr
# debugging
if debug: print('bwgl:wine check noGrapeLookup on winery:', winery)
# find which wine is a match from the noGrapeLookup
wine = wineLookupByName( noGrapeLookup[winery], rec[fldWineDescr], [], 'noGrapeLookup', debug=debug )
# not getting a match - we want to continue to have the wine as blank
if False and wine == '':
# debugging
if debug: print('bwgl:nograpelookup:no-match:set wine to None')
wine = None
elif winery in liquorLookup:
### LIQUOR ---- fldWine
# debugging
if debug: print('bwgl:liquor check on winery:', winery)
# see if a liquor matches
(liquor, reLiquor) = findLiquor( rec, winery, fldWine, debug=debug )
# if we found match - populate wine so we don't look for grape
if liquor is not None:
wine = liquor
# debugging
if debug: print('bwgl:liquor found and put in wine:', wine)
### GRAPE (if we have not filled in wine) --- fldWineDescr
if wine is None:
# debugging
if debug: print('bwgl:grape check because wine is None')
# determine if there is a grape in this string
# if ther
(grape,other) = findGrapeByStr( rec, fldWineDescr )
# debugging
if debug: print('bwgl:grape:', grape, ':other:', other)
else:
# debugging
if debug: print('bwgl:grape check skipped - we have a wine')
### Skip this record if we don't have a wine or a grape
if wine is None and grape is None:
# debugging
if debug: print('bwgl:record skipped - no grape or wine defined')
continue
### OTHER (if not already created by grape lookup) ---- fldWineDescr
#
# if we did not find the grape in the string
# so other was not populated
# we need to look up other using 'winery' as the filter
if grape is None:
# debugging
if debug: print('bwgl:build other from winery')
# find where in the string this grape is positioned
(wineryFind, other) = findStrInRecReturnOther( rec, fldWineDescr, winery, debug=debug)
### OTHER Additional Processing
# remove CASE - the keyword case if it exists
if 'case' in other:
other.remove('case')
# debugging
if debug: print('bwgl:remove case from other')
# remove VINTAGE and/or BOTTLESIZE and/or other QUALIFIERS
# the last element will either be the vintage (no bottle size)
# or will be the bottle size and then next is the vintage
# if the last position is not vintage, attempt to remove the bottle size
# then remove vintage - this should be the vintage (validated by isdigit lookup)
if other:
if debug: print('bwgl:looking at other for quals, bottlesize and vintage')
# remove qualifiers if exist
if not other[-1].isdigit():
# first we check to see if there is a qualifier appended
# we are not vintage as the position posiition - see if it is size
for qual,reQual in reQualLookup:
if qual == other[-1]:
if debug: print('bwgl:remove qualifier from other:', qual)
del other[-1]
break
# remove bottle size if exist
if other and not other[-1].isdigit():
# we are not vintage as the position posiition - see if it is size
for size,reSize in sizeLookup:
if size == other[-1]:
if debug: print('bwgl:remove bottlesize from other:', size)
del other[-1]
break
# remove vintage if it is there
if other and other[-1].isdigit():
# first check to see if this is part of the ignore grape solution
if winery in ignoreGrapeLookup and ignoreGrapeLookup[winery]and other[-1] in ignoreGrapeLookup[winery]:
if debug: print('bwgl:value is in ignoreLookupGrape - keeping it:', other[-1])
else:
# debugging
if debug: print('bwgl:remove vintage from other:', other[-1])
del other[-1]
# remove WINE - the element if the element is the same as the wine
if wine and wine in other:
other.remove(wine)
# debugging
if debug: print('bwgl:remove wine from other:', wine)
# debugging
if debug:
try:
print('bwgl:Final-Build:', winery, ':', grape, ':', wine, ':', liquor, ':', other, ':', rec[fldWineDescr], ':', rec[fldWine])
except Exception as e:
print('debug error2-continuing:', str(e))
print('fldWine:', fldWine)
### BUILD LOOKUP FOR CONVERSION (we use the grape attribute to build the dictionary)
# move liquor value into grape because we did not find the
if grape is None and wine is not None:
grape = wine
# debugging
if debug: print('bwgl:set-grape-to-wine:', grape)
### WINERY:GRAPE-WINE-LIQOUR Dictionary creation
# debugging
if debug: print('bwgl:create wgLookup for winery:', winery, ':grape:', grape)
# validate we have an entry for this winery in the lookup dict
if winery not in wgLookup:
# one does not create - so create a stub for winery:grape
wgLookup[winery] = { grape : [] }
else:
# one DOES exist - check to see if the grape is already here
if grape not in wgLookup[winery]:
# grape is not here - so create an empty list to stuff values into
wgLookup[winery][grape] = []
# check to see if we have OTHER attributes
# and if we do - check to see that this list of attributes
# is not already in the wineLookup array
# and if this list does not exist - then append this list
if other and other not in wgLookup[winery][grape]:
# add this list of other to this entry
wgLookup[winery][grape].append(other)
# debugging
if debug: print('bwgl:appending to wgLookup:other:', other)
# end loop on wines
### SORTED WINERY:GRAPE lookup - most optional attributes first in the list
# debbuging
if debug: print('bwgl:complete-read-of-master-file:sort wgLookup')
# now sort the list of lookups from most specific (greatest number of attributes) to least
for winery in wgLookup:
for grape in wgLookup[winery]:
wgLookup[winery][grape] = sorted(wgLookup[winery][grape], key=len, reverse=True)
# debugging
if debug:
print('\n'*5)
print('START WGLOOKUP DUMPED')
print('#'*80)
if ppFlag:
pp.pprint(wgLookup)
else:
print('bwgl:final-wgLookup:\n', wgLookup)
print('#'*80)
# done with for loop - return the lookup
return wgLookup
#########################################################################################
# find the matching set of additional attributes that match this record
# from the global lookup.
#
# we assume that we have already tested that winery and value exist in wgLookup prior to calling this routine
#
# the special paramaters here are:
# value - this is either "wine" or "grape" - this routine allows you to lookup on different attributes
# valueDescr - passed in string for debugging telling us which value was passed in
#
# defaultorderlist = array of array of string - gives the default order of singlematch looks to determine which of
# many matches is the one we will select
#
# Global Variable Used: wgLookup
#
# returns: valuematchset array selected
#
def findAddAttribWgLookup( rec, winery, value, fldWine, AbbrLookup=[], defaultorderlist=None, valueDescr='', debug=False ):
# local variable - capture all the entries that are single match entries
singlematch=[]
# debugging
if debug:
try:
print('faawl:value:', valueDescr, ':match-wgLookup:', rec[fldWine], ':', wgLookup[winery][value])
except Exception as e:
print('debug error7-continuing:', str(e))
print('fldWine:', fldWine)
# for each set of values that could be a match
for valuematchset in wgLookup[winery][value]:
# debugging
if debug: print('faawl:testing valuematchset:', valuematchset, ':length:', len(valuematchset))
# set the flag to start
allmatch = True
# loop through the set of values that make up this set
for valuematch in valuematchset:
# for each entry - build a regex and test it and add it up
# we need all values in this valueset to be true for this valueset to be match
reMatch1 = re.compile(r'\b'+valuematch+r'\b', re.IGNORECASE)
reMatch2 = re.compile(r'\s'+valuematch+r'\s', re.IGNORECASE)
# check to see if this regex is a match
m1 = reMatch1.search(rec[fldWine])
m2 = reMatch2.search(rec[fldWine])
if m1 or m2:
# this regex is a match
allmatch = True and allmatch
elif valuematch in AbbrLookup:
# this regex was not a match - but we want to check if the value also has
# a translation - and if it has a translation - then we test the translation also
# the value did not work but there is an alternate value to check
# debugging
if debug: print('faawl:valuematch-abbr:', valuematch, ':', wineAbbrLookup[valuematch])
# create the regex
reMatch = re.compile(wineAbbrLookup[valuematch], re.IGNORECASE)
# test the regex and attach the results to allmatch
allmatch = reMatch.search(rec[fldWine]) and allmatch
else:
# not a match - update allmatch
allmatch = False and allmatch
# debugging
if debug: print('faawl:valuematch:', valuematch, ':allmatch:', allmatch)
# check to see if all matched
if allmatch:
# all matched - so this is a match - so break out of the valuematchset group
# debugging
if debug: print('faawl:value matched:', valuematchset)
# different action based on # of items being match
if len(valuematchset) == 1:
# debugging
if debug: print('faawl:single-valuematch-set-added-to-singlematch:', valuematchset)
# single value matching - we don't stop when we find a match
singlematch.append(valuematchset)
else:
# debugging
if debug: print('faawl:multivalue-valuematch-set-found:done')
# multi value match so we are done when we find a match - so return
return valuematchset
# did not find matchset in the for loop - check to see if we have singlematch
if not singlematch:
# debugging
if debug: print('faawl:exit with singlematch NOT populated return blank')
# did not have singlematch found - we are done - return empty
return []
# singlematch populated
# debugging
if debug: print('faawl:exit with singlematch populated:', singlematch)
# check to see how many matches we got
if len(singlematch) == 1 or not defaultorderlist:
# debugging
if debug: print('faawl:return first entry in singlematch:', singlematch[0])
# if there is only one entry in here
# or we don't have a default order so we pick the first found
# and we set the value to this
return singlematch[0]
# we need to define which of the singlematch values we will return
# the defaultorderlist will be used to set that ordering
#
# create a local copy of the list that can be changed in this routine
defaultorder = defaultorderlist[:]
# multiple singlematch values so lets find and pick the best one
# debugging
if debug: print('faawl:multiple single match value-singlematch:', singlematch)
# get the values from singlematch that are not in defaultorder
# and put them at the start of defaultorder list
# go in reverse order when doing this lookup
for val in singlematch[::-1]:
if val not in defaultorder:
defaultorder.insert(0,val)
### HARDCODED ###
# very short term fix - we need to prioritze these single tags (mondavi problem)
if winery == 'Mondavi' and ['Tok'] in singlematch:
if debug: print('faawl:Change from:', valuematchset, ':to Tok for mondavi')
return ['Tok']
# find the first matching value from priority order list
for val in defaultorder:
if val in singlematch:
# debugging
if debug: print('faawl:selected-singlematch-value:', val)
# we found the first match - set it and break out
return val
# debugging
if debug: print('faawl:valuematchset-empty')
# did not match - return empty
return []
#########################################################################################
# create a consistent wine name for a list or records with store based wine descriptions
#
# the special paramaters here are:
# wgLookup - dictionary of winery, wine, list of wines
# wines - list of records to be processed
#
# Global Variable Used: ignoreGrapeLookup, noGrapeLookup, wineAbbrLookup, liquorLookup
# reCase, sizeLookup
#
# returns: [updated values in teh wines array]
#
#### Use the winery/grape-wine-liquour conversion table to define a wine description for the records
def setWineryDescrFromWineryGrapeLookup( wgLookup, wines, fldWineDescr = 'winedescr', fldWine = 'wine', fldWineDescrNew = 'winedescrnew', fldWineDescrMatch=False, debug=False ):
if debug:
print('\n'*10,'START WINEDESCR SETTING HERE ---------------------------------------------')
# step through all the records passed in
for rec in wines:
# local variables
winery = grape = wine = vintage = case = size = liquor = nongrape = qual = None
winematchset = grapematchset = []
# debugging
if debug:
try:
print('setWinery:fldWine:', rec[fldWine])
except Exception as e:
print('debug error2-continuing:', str(e))
print('fldWine:', fldWine)
# make the field if it does not exist
if fldWineDescrNew not in rec:
rec[fldWineDescrNew] = rec[fldWineDescr]
### WINERY
(winery, reWinery) = findWinery( rec, None, None, fldWine, debug=debug )
# validate the winery
if winery is None:
### WINERY NONE - go to next record
# debugging
if debug: print('setWinery:winery not found-next record:' + rec[fldWine])
# get the next record
continue
elif winery not in wgLookup:
### WINERY NOT IN LOOKUP
# skip this record - nothing to process
# debugging
if debug: print('setWinery:winery not in wgLookup:', winery)
continue
### GRAPE
# find the grape that is this record
(grape, reGrape) = findGrapeByRegex( rec, fldWine, debug=debug )
# debugging
if debug: print('setWinery:grape found:', grape)
### OVERRIDES
if winery in ignoreGrapeLookup:
### IGNORE GRAPE
# debugging
if debug: print('setWinery:winery-match-ignoreGrape:clear-wine:set-grape-to-None:set-nongrape-True:winery:', winery)
# clear wine and grape
wine = ''
# clear the grape field
grape = None
# set the liquor flag to control processing
nongrape = True
if winery in noGrapeLookup:
### NOGRAPE - WINE
# debugging
if debug: print('setWinery:noGrapeLookup wine check:', winery)
# do the lookup and if a search is a match on None take appropriate action
wine = wineLookupByName( noGrapeLookup[winery], rec[fldWine], [], 'noGrapeLookup', wineAbbrLookup, debug=debug )
# debugging
if debug: print('setWinery:nogrape check:wine:', wine)
# test the value we got back
if wine == '':
# debugging
if debug: print('setWinery:noGrapeLookup:matched:None::clear grape:set nongrape to True')
# the lookup match None - so we want to ignore any grape found and we blank out the wine
grape = None
wine = ''
nongrape = True
elif wine:
# matched a wine - so clear the grape value
grape = None
# debugging
if debug: print('setWinery:nograpeLookup:wine found - clear grape field')
if wine is None and winery in liquorLookup:
### LIQUOR
# debugging
if debug: print('setWinery:liqourLookup:', winery)
(liquor, reLiquor) = findLiquor( rec, winery, fldWine, debug=debug)
# if we found something update wine to be what we found
if liquor is not None:
wine = liquor
# debugging
if debug: print('setWinery:liquorLookup-match:', liquor)
if not grape and not nongrape and not wine and liquor is None:
# NO GRAPE - and not connected to noGrapeLookup or liquorLookkup
# get the next record
# debugging
if debug: print('setWinery:did not find grape-skipping record:', rec[fldWineDescr])
continue
# debugging
if debug: print('setWinery:pre-vintage found values for wine/liquor:', wine, ':grape:', grape)
### VINTAGE
vintage = findVintage( rec, fldWine, debug=debug )
# debugging
if debug: print('setWinery:vintage:', vintage)
### CASE information
if reCase.search(rec[fldWine]):
case = 'case'
### BOTTLE SIZE - get the size information
for (size, reSize) in sizeLookup:
# debugging
if debug: print('setWinery:sizeLookup:',size)
if reSize.search(rec[fldWine]) and not reShipsAs.search(rec[fldWine]):
# debugging
if debug: print('setWinery:sizeLookup:matched:',reSize)
break
else:
size = None
if debug: print('setWinery:sizeLookup:None-found')
### QUAL for this wine
qual = findQualifier(rec[fldWine], debug=debug)
# debugging
if debug:
try:
print('setWinery:FinalAttributes:', winery, ':', grape, ':', wine, ':', liquor, ':', vintage, ':', case, ':', size, ':', qual, ':', rec[fldWine])
except Exception as e:
print('debug error5-continuing:', str(e))
print('fldWine:', fldWine)
### WINE - ADDITIONAL INFORMATION
if liquor is not None:
# debugging
if debug: print('setWinery:liquor flag set - no additional data needs to be collected')
elif wine is not None:
# debugging
if debug: print('setWinery:wine is not None - do additional lookups:wine:', wine)
# we found a wine / liquor - so see if there are additional attributes
if wine in wgLookup[winery] and wgLookup[winery][wine]:
# debugging
if debug: print('setWinery:lookup winematchset')
# there is one or more additional lookups for this winery/wine
winematchset = findAddAttribWgLookup( rec, winery, wine, fldWine, wineAbbrLookup, None, valueDescr='wine', debug=debug )
else:
# wine not in wgLookup so thing to work
print('setWinery:unable to perform wgLookup on winery:', winery, ':wine:', wine, ':rec-wine:', rec[fldWine])
# debugging
if debug:
try:
print('wgLookup[winery]:', wgLookup[winery])
except Exception as e:
print('debug error3-continuing:', str(e))
print('winery:', winery)
# debugging - wine is not None - what is the final winematchset
if debug: print('setWinery:winematchset:', winematchset)
elif grape is not None:
# debugging
if debug: print('setWinery:grape is not None - do additional lookups:', grape)
# grape was returned (not wine) so do the lookup on grape
if grape in wgLookup[winery] and wgLookup[winery][grape]:
# see if we can create a match based on attributes and the grape
grapematchset = findAddAttribWgLookup( rec, winery, grape, fldWine, wineAbbrLookup, defaultorderlist, valueDescr='grape', debug=debug )
elif grape in wgLookup[winery]:
# do nothing this is a empty set
if debug: print('setWinery:grape match: matching record set is blank - no action required')
else:
# wine not in wgLookup so thing to work
# debugging
print('setWinery:grape NONMATCH:', rec[fldWine])
if debug: print('setWinery:liquor:', liquor, ':wine:', wine, ':grape:', grape, ':wgLookup[winery]:', wgLookup[winery])
# debugging - wine is not None - what is the final grapematchset
if debug: print('setWinery:grapematchset:', grapematchset)
### check the matchsets we got back - if any of them look like vintage values
### remove them from the string and look at up vintage again
if vintage:
newVintageLookupWine = rec[fldWine]
for matchvalue in winematchset:
if vintage in matchvalue:
newVintageLookupWine = newVintageLookupWine.replace(matchvalue,'')
if debug: print('setWinery:2nd-vintage:winematchset:wine-name-removal:', matchvalue)
for matchvalue in grapematchset:
if vintage in matchvalue:
newVintageLookupWine = newVintageLookupWine.replace(matchvalue,'')
if debug: print('setWinery:2nd-vintage:grapematchset:wine-name-removal:', matchvalue)
if newVintageLookupWine != rec[fldWine]:
if debug: print('setWinery:2nd-vintage:newVintageLookupWine:', newVintageLookupWine)
newVintage = findVintage( { fldWine : newVintageLookupWine}, fldWine, debug=debug )
if debug: print('setWinery:2nd-vintage:newVintage:', newVintage)
vintage = newVintage
### FINAL WINEDESCR
# create initial value
wineDescr = ''
# if winery starts with a z then we don't have a vintage
if winery.startswith('z'):
vintage = None
# debugging
if debug: print('setWinery:winery starts with z: clear vintage')
# quick test - does the wine and the winematchset the same
if winematchset and ' '.join(winematchset) in wine:
#debugging
if debug: print('setWinery:clearing-winematchset:', winematchset,':is-in-wine:', wine)
winematchset = []
if grapematchset and ' '.join(grapematchset) in grape:
#TODO - work around for single letter matches
if not (len(grapematchset)==1 and len(grapematchset[0])==1):
#debugging
if debug: print('setWinery:clearing-grapematchset:',grapematchset,':is-in-grape:', grape)
grapematchset = []
if grapematchset and size and size in ' '.join(grapematchset):
size = ''
if winematchset and size and size in ' '.join(winematchset):
size = ''
if debug:
print('setWinery:vallist1:', [winery, grape, wine] + grapematchset + winematchset + [vintage, size, qual, case])
print('setWinery:vallist2:', [winery, grape, wine, *grapematchset, *winematchset, vintage, size, qual, case])
# create a list
wdList= []
# step through the values
for val in [winery, grape, wine] + grapematchset + winematchset + [vintage, size, qual, case]:
# and if there is a value add to the list - otherwise skip
if val: wdList.append(val)
# build the wine description by joining all these values together
wineDescr = ' '.join(wdList)
# debugging
if False:
if debug: print('setWinery:wdList:', wdList)
if debug: print('setWinery:wineDescr:', wineDescr)
# debugging
if debug:
try:
print(':'.join(['setWinery:wineDescrList', wineDescr, rec[fldWineDescr], str(wineDescr==rec[fldWineDescr]), rec[fldWine]]) )
except Exception as e:
print('debug error6-continuing:', str(e))
print('fldWine:', fldWine)
# fill thew new value into the array
rec[fldWineDescrNew] = wineDescr
# fill in the matching field
if fldWineDescrMatch:
rec[fldWineDescrMatch] = (rec[fldWineDescr] == rec[fldWineDescrNew])
#########################################################################################
# set any digit only field to the word passed
def setDigitFld2Value( wines, fld, value, debug=False ):
for rec in wines:
if rec[fld].isdigit():
rec[fld] = value
#########################################################################################
# validate the field settings match the file we read in for update
def updateFileOptionDictCheck( optiondict, wines, header, debug=False ):
# check to see if the description field is in the file we read in
if optiondict['fldWineDescr'] not in wines[0]:
if debug: print('updateFileOptionDictCheck:fldWineDescr NOT in file read in:', optiondict['fldWineDescr'])
# field needed is not in the record - see if we know what to do
if 'cnt' in wines[0]:
# the cnt field is in the file - so set to that structure
# we will put the updated values into the 'cnt' field
print('setting values fldWineDescr and fldWineDescrNew to: cnt')
# change the field we are updating
optiondict['fldWineDescr'] = optiondict['fldWineDescrNew'] = 'cnt'
elif 'winedescr' in wines[0]:
# the WineDescr field is in the file - so set to that structure
print('setting values fldWineDescr to winedescr and fldWineDescrNew to winedescrnew')
# change the field we are updating
optiondict['fldWineDescr'] = 'winedescr'
optiondict['fldWineDescrNew'] = 'winedescrnew'
else:
# no idea - we need to error out
print('could not find fldWineDescr in wines[0]-aborting:', optiondict['fldWineDescr'], '\nwines[0]:', wines[0])
# force the error
error = wines[0][optiondict['fldWineDescr']]
# determine if we should create the match column (may want ot remove this section later)
# removed this logic - require the person to set this field - we will not set it for them.
if False and optiondict['fldWineDescr'] == 'winedescr':
# we are using the file format that is the xref file
# so check to see if we have match enabled
if not optiondict['fldWineDescrMatch']:
# create the default value
optiondict['fldWineDescrMatch'] = 'same'
# provide message
print('setting value fldWineDescrMatch to: same')
# check to see if the input file is the same as the output file
if optiondict['csvfile_update_in'] == optiondict['csvfile_update_out']:
# they are the same file (in and out) - so we need to move the input file to a backup location
(file_path, base_filename, file_ext) = kvutil.filename_split(optiondict['csvfile_update_in'])
# create the new filename
backupfile = kvutil.filename_proper( base_filename + optiondict['backupfile_ext'], file_path )
# messaging
print('copying ', optiondict['csvfile_update_in'], ' to ', backupfile)
# copy the input file to the backup filename
shutil.copyfile(optiondict['csvfile_update_in'], backupfile)
# set the output keys we are going to assign
if optiondict['fldWineDescrNew'] == 'cnt':
# output matches the original ref file format with the "cnt" field
optiondict['csvdictkeys'] = ['cnt','date','search','store','wine','winesrt']
elif optiondict['fldWineDescrMatch']:
# output is a modified xref format so you can look at old and new definitions
# optiondict['csvdictkeys'] = [optiondict['fldWineDescr'],optiondict['fldWineDescrNew'],optiondict['fldWineDescrMatch'], 'date','search','company','wine','winesrt']
optiondict['csvdictkeys'] = [optiondict['fldWineDescr'],optiondict['fldWineDescrNew'],optiondict['fldWineDescrMatch'], *header]
else:
# copy over the read in format
optiondict['csvdictkeys'] = [optiondict['fldWineDescrNew']] + header[1:]
# output matches expected input - should really change this to be the format of the read in file
#optiondict['csvdictkeys'] = [optiondict['fldWineDescrNew'], 'date','search','company','wine','winesrt']
print('updateFileOptionDictCheck:set csvdictkeys to:',optiondict['csvdictkeys'])
# ---------------------------------------------------------------------------
if __name__ == '__main__':
# capture the command line
optiondict = kvutil.kv_parse_command_line( optiondictconfig, debug=False )
# set the global debug flag
ppFlag = optiondict['pprint']
# set master fields
setOptionDictMasterFldValues( optiondict, debug=False )
### global variable checks ###
if optiondict['setup_check']:
print('Running global variable check')
globalVariableCheck( debug = optiondict['debug'] )
sys.exit()
# messaging
print('reading in master file:', optiondict['csvfile_master_in'])
# read in the MASTER FILE INPUT file
wines,header = kvcsv.readcsv2list_with_header(optiondict['csvfile_master_in'], headerlc=True)
# build the wine lookup dictionary
wgLookup = buildWineryGrapeLookup( wines, optiondict['fldWineDescrMaster'], optiondict['fldWineMaster'], debug=optiondict['debug'] )
# read in the UPDATE FILE INPUT file - if not updating the master file
if optiondict['csvfile_master_in'] != optiondict['csvfile_update_in']:
# messaging
print('reading in update file:', optiondict['csvfile_update_in'])
# read in the INPUT file
wines,header = kvcsv.readcsv2list_with_header(optiondict['csvfile_update_in'], headerlc=True)
# check to see if we read in any records and if not just return
if not wines:
print('wineset.py - no records read in - no work to be done - exitting')
sys.exit()
# test to see if we should set the fields based on what we just read in
updateFileOptionDictCheck( optiondict, wines, header, debug=optiondict['debug'] )
# do the assignment of wines to records
setWineryDescrFromWineryGrapeLookup( wgLookup, wines, optiondict['fldWineDescr'], optiondict['fldWine'], optiondict['fldWineDescrNew'], optiondict['fldWineDescrMatch'], debug=optiondict['debug'] )
# if enabled - set all unassigned new descriptions the default value
if optiondict['defaultnew'] is not None:
# message
print('Setting ', optiondict['fldWineDescrNew'], ' to ', optiondict['defaultnew'], 'if not set')
# do the work
setDigitFld2Value( wines, optiondict['fldWineDescrNew'], optiondict['defaultnew'], debug=optiondict['debug'] )
# save the output to the file of interest
kvcsv.writelist2csv( optiondict['csvfile_update_out'], wines, optiondict['csvdictkeys'] )
# messaging
print('Saved results to:', optiondict['csvfile_update_out'])
| 1 |
920cd41b18f5cfb45f46c44ed707cebe682d4dd9
|
Python
|
# Software License Agreement (BSD License)
#
# Copyright (c) 2009-2011, Eucalyptus Systems, Inc.
# All rights reserved.
#
# Redistribution and use of this software in source and binary forms, with or
# without modification, are permitted provided that the following conditions
# are met:
#
# Redistributions of source code must retain the above
# copyright notice, this list of conditions and the
# following disclaimer.
#
# Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the
# following disclaimer in the documentation and/or other
# materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# Author: [email protected]
'''
@author: clarkmatthew
extension of the boto instance class, with added convenience methods + objects
Add common instance test routines to this class
Examples:
from eucaops import Eucaops
from nephoria.windows_instance import WinInstance
tester = Eucaops(credpath='eucarc-10.111.5.80-eucalyptus-sys_admin')
wins = WinInstance.make_euinstance_from_instance(tester.get_instances(idstring='i-89E13DA8')[0], tester=tester, keypair='test')
vol = tester.get_volume(status='available', zone=wins.placement)
wins.attach_volume(vol)
'''
import socket
import os
import re
import time
import copy
import types
import operator
from prettytable import PrettyTable, ALL
from boto.ec2.instance import Instance
from nephoria.aws.ec2.euvolume import EuVolume
from cloud_utils.log_utils import eulogger, get_line, markup
from nephoria.euca.taggedresource import TaggedResource
from boto.ec2.instance import InstanceState
from datetime import datetime
from cloud_utils.net_utils import winrm_connection
termline = get_line()
class WinInstanceDiskType():
gigabyte = 1073741824
megabyte = 1048576
def __init__(self, win_instance, wmic_dict):
self.check_dict_requires(wmic_dict)
self.__dict__ = self.convert_numbers_in_dict(copy.copy(wmic_dict))
self.win_instance = win_instance
self.size_in_gb = self.get_size_in_gb()
self.size_in_mb = self.get_size_in_mb()
self.size = long(self.size or 0)
self.last_updated = time.time()
self.setup()
def setup(self):
raise Exception('Not Implemented')
def check_dict_requires(self, wmic_dict):
raise Exception('Not Implemented')
def convert_numbers_in_dict(self, dict):
#convert strings representing numbers to ints
for key in dict:
value = str(dict[key])
if (re.search("\S", str(dict[key])) and not re.search("\D", str(dict[key]))):
dict[key] = long(dict[key])
return dict
def get_partition_ids(self):
retlist = []
for part in self.disk_partitions:
retlist.append(part.deviceid)
return retlist
def get_logicaldisk_ids(self):
retlist = []
for part in self.disk_partitions:
retlist.extend(part.get_logicaldisk_ids())
return retlist
def get_size_in_gb(self):
'''
Attempts to convert self.size from bytes to gigabytes as well as round up > .99 to account for a differences
in how the size is represented
'''
self.size = int(self.size or 0)
gigs = self.size / self.gigabyte
if (self.size % self.gigabyte) /float(self.gigabyte) > .99:
gigs += 1
return gigs
def get_size_in_mb(self):
'''
Attempts to convert self.size from bytes to gigabytes as well as round up > .99 to account for a differences
in how the size is represented
'''
self.size = int(self.size or 0)
mb = self.size / self.megabyte
if (self.size % self.megabyte) /float(self.megabyte) > .99:
mb += 1
return mb
def print_self(self):
self.get_summary(printmethod=self.win_instance.debug)
def get_summary(self, printheader=True, printmethod=None):
raise Exception('Method not implemented')
def print_self_full(self, printmethod=None):
'''
formats and prints self.dict
'''
self.win_instance.print_dict(dict=self.__dict__, printmethod=printmethod)
class WinInstanceDiskDrive(WinInstanceDiskType):
def setup(self):
if not hasattr(self, 'serialnumber'):
self.serialnumber = ''
if not hasattr(self, 'caption'):
self.caption = ''
if hasattr(self, 'model'):
self.caption = self.model
else:
self.model = self.caption
self.cygwin_scsi_drive = self.win_instance.get_cygwin_scsi_dev_for_windows_drive(windisk=self)
self.update_ebs_info()
self.disk_partitions = []
def check_dict_requires(self, wmic_dict):
if not ('deviceid' in wmic_dict and
'size' in wmic_dict and
('caption' in wmic_dict or 'model in wmic_dict') and
'index' in wmic_dict):
raise Exception('wmic_dict passed does not contain needed attributes; deviceid, size, caption, and index')
def get_partition_ids(self):
retlist = []
for part in self.disk_partitions:
retlist.append(part.deviceid)
return retlist
def get_logicaldisk_ids(self):
retlist = []
for part in self.disk_partitions:
retlist.extend(part.get_logicaldisk_ids())
return retlist
def update_md5_info_from_ebs(self):
self.md5 = None
self.md5len = None
for vol in self.win_instance.attached_vols:
if vol.guestdev == self.deviceid:
if not vol.md5:
vol.md5len = 1024
vol.md5 = self.win_instance.get_dev_md5(self.cygwin_scsi_drive, vol.md5len)
self.md5 = vol.md5
self.md5len = vol.md5len
break
def update_ebs_info_from_serial_number(self):
'''
Attempts to parse the serial number field from an EBS volume and find the correlating ebs volume
example format: vol-81C13EA4-dev-sdg
'''
if re.match("^vol-", self.serialnumber):
split = self.serialnumber.split('-')
self.ebs_volume = str(split[0]) + "-" + str(split[1])
self.ebs_cloud_dev = "/" + str(split[2]) + "/" + str(split[3])
else:
self.ebs_volume = ''
self.ebs_cloud_dev = ''
def update_ebs_info(self):
self.update_ebs_info_from_serial_number()
if not self.ebs_volume:
if self.index == 0 and self.win_instance.root_device_type == 'ebs':
bdm = self.win_instance.block_device_mapping[self.win_instance.root_device_name]
self.ebs_volume = bdm.volume_id
else:
for vol in self.win_instance.attached_vols:
if vol.guestdev == self.deviceid:
self.ebs_volume = vol.id
break
if not self.ebs_cloud_dev and self.ebs_volume:
volume = self.win_instance.tester.get_volume(volume_id=self.ebs_volume)
if hasattr(volume,'attach_data') and volume.attach_data:
self.ebs_cloud_dev = volume.attach_data.device
self.update_md5_info_from_ebs()
def get_summary(self, printheader=True, printmethod=None):
buf = ""
deviceid = 20
size = 16
sizegb = 7
ebsvol = 12
serialnumber = 24
caption = 36
part_count = 6
logical_ids = 8
cygdrive = 10
md5 = 32
header = "DISKDRIVE DEV ID".center(deviceid) + "|" + \
"SIZE B".center(size) + "|" + \
"SIZE GB".center(sizegb) + "|" + \
"EBS VOL".center(ebsvol) + "|" + \
"CAPTION".center(caption) + "|" + \
"PARTS".center(part_count) + "|" + \
"LOGICAL".center(logical_ids) + "|" + \
"CYGDRIVE".center(cygdrive) + "|" + \
"SERIAL NUMBER".center(serialnumber) + "|" + \
"MD5 CHECK SUM".center(md5) + "|"
summary = str(self.deviceid).center(deviceid) + "|" + \
str(self.size).center(size) + "|" + \
str(self.size_in_gb).center(sizegb) + "|" + \
str(self.ebs_volume).center(ebsvol) + "|" + \
str(self.caption).center(caption) + "|" + \
str(self.partitions).center(part_count) + "|" + \
str(",".join(str(x) for x in self.get_logicaldisk_ids())).center(logical_ids) + "|" + \
str(self.cygwin_scsi_drive).center(cygdrive) + "|" + \
str(self.serialnumber).center(serialnumber) + "|" + \
str(self.md5).center(md5) + "|"
length = len(header)
if len(summary) > length:
length = len(summary)
line = get_line(length)
if printheader:
buf += line + header + line
buf += summary + line
if printmethod:
printmethod(buf)
return buf
class WinInstanceDiskPartition(WinInstanceDiskType):
def setup(self):
#self.cygwin_scsi_drive = self.win_instance.get_cygwin_scsi_dev_for_windows_drive(drive_id=self.deviceid)
self.logicaldisks = []
#Set values in case 'brief' was used when fetching partitions
if not hasattr(self,'deviceid'):
self.deviceid = self.name
if not hasattr(self,'bootable'):
self.bootable = self.bootpartition
if not hasattr(self,'diskindex'):
self.diskindex = self.get_disk_index_from_name()
def check_dict_requires(self, wmic_dict):
if not ('name' in wmic_dict and
'size' in wmic_dict and
'bootpartition' in wmic_dict and
'index' in wmic_dict):
raise Exception('wmic_dict passed does not contain needed attributes; deviceid, size, index and bootable')
def get_disk_index_from_name(self):
diskindex = None
diskindexstring = self.name.split(',')[0]
if re.search('disk', diskindexstring, re.IGNORECASE):
diskindex = int(diskindexstring.split('#')[1])
return diskindex
def get_logicaldisk_ids(self):
retlist = []
for disk in self.logicaldisks:
retlist.append(disk.deviceid)
return retlist
def get_summary(self, printheader=True, printmethod=None):
buf = ""
deviceid = 24
size = 16
sizegb = 12
sizemb = 12
bootable = 10
header = "PARTITION DEV ID".center(deviceid) + "|" + \
"SIZE B".center(size) + "|" + \
"SIZE GB".center(sizegb) + "|" + \
"SIZE MB".center(sizemb) + "|" + \
"BOOTABLE".center(bootable) + "|"
summary = str(self.deviceid).center(deviceid) + "|" + \
str(self.size).center(size) + "|" + \
str(self.size_in_gb).center(sizegb) + "|" + \
str(self.size_in_mb).center(sizemb) + "|" + \
str(self.bootable).center(bootable) + "|"
length = len(header)
if len(summary) > length:
length = len(summary)
line = get_line(length)
if printheader:
buf += line + header + line
buf += summary + line
if printmethod:
printmethod(buf)
return buf
class WinInstanceLogicalDisk(WinInstanceDiskType):
def setup(self):
self.cygwin_scsi_drive = self.win_instance.get_cygwin_scsi_dev_for_windows_drive(windisk=self)
self.partition = None
def check_dict_requires(self, wmic_dict):
if not ('deviceid' in wmic_dict and
'size' in wmic_dict and
'description' in wmic_dict and
'freespace' in wmic_dict and
'filesystem' in wmic_dict):
raise Exception('wmic_dict passed does not contain needed attributes; deviceid, size, and description')
def get_summary(self, printheader=True, printmethod=None):
buf = ""
deviceid = 24
size = 16
freespace = 16
filesystem = 24
description = 30
cygdrive = 10
header = "LOGICAL DEV ID".center(deviceid) + "|" + \
"SIZE".center(size) + "|" + \
"FREE SPACE".center(freespace) + "|" + \
"FILE SYSTEM".center(filesystem) + "|" + \
"DESCRIPTION".center(description) + "|" + \
"CYGDRIVE".center(cygdrive) + "|"
summary = str(self.deviceid).center(deviceid) + "|" + \
str(self.size).center(size) + "|" + \
str(self.freespace).center(freespace) + "|" + \
str(self.filesystem).center(filesystem) + "|" + \
str(self.description).center(description) + "|" + \
str(self.cygwin_scsi_drive).center(cygdrive) + "|"
length = len(header)
if len(summary) > length:
length = len(summary)
line = get_line(length)
if printheader:
buf += line + header + line
buf += summary + line
if printmethod:
printmethod(buf)
return buf
class WinInstance(Instance, TaggedResource):
gigabyte = 1073741824
megabyte = 1048576
@classmethod
def make_euinstance_from_instance(cls,
instance,
tester,
debugmethod = None,
keypair=None,
keypath=None,
password=None,
username="Administrator",
auto_connect = True,
verbose=True,
timeout=120,
private_addressing = False,
reservation = None,
cmdstart=None,
try_non_root_exec=True,
winrm_port='5985',
winrm_protocol='http',
rdp_port='3389',
rootfs_device = "sda",
block_device_prefix = "sd",
bdm_root_vol = None,
virtio_blk = True,
cygwin_path = None,
disk_update_interval=10,
retry=2,
brief=False
):
'''
Primary constructor for this class. Note: to avoid an ssh session within this method, provide keys, username/pass later.
Arguments:
instance - mandatory- a Boto instance object used to build this euinstance object
keypair - optional- a boto keypair object used for creating ssh connection to the instance
username - optional- string used to create ssh connection as an alternative to keypair
password - optional- string used to create ssh connection to this instance as an alternative to keypair
exec_password -optional -string used for su or sudo where prompted for password, will default to 'password'
auto_connect -optional -boolean, if True will attempt to automatically create an ssh session for this instance
try_non_root_exec -optional -boolean, if True will attempt to use sudo if available else su -c to execute privileged commands
timeout - optional- integer used for ssh connection timeout
debugmethod - optional - method, used for debug output
verbose - optional - boolean to determine if debug is to be printed using debug()
retry - optional - integer, ssh connection attempts for non-authentication failures
'''
newins = WinInstance(instance.connection)
newins.__dict__ = instance.__dict__
newins.tester = tester
newins.winrm_port = winrm_port
newins.rdp_port = rdp_port
newins.bdm_root_vol = None
newins.winrm_protocol = winrm_protocol
newins.debugmethod = debugmethod
if newins.debugmethod is None:
newins.log = eulogger.Eulogger(identifier= str(instance.id))
newins.debugmethod= newins.log.debug
if (keypair is not None):
if isinstance(keypair,types.StringTypes):
keyname = keypair
keypair = tester.get_keypair(keyname)
else:
keyname = keypair.name
newins.keypath = keypath or os.getcwd() + "/" + keyname + ".pem"
newins.keypair = keypair
newins.password = password
newins.username = username
newins.verbose = verbose
newins.attached_vols=[]
newins.timeout = timeout
newins.virtio_blk = virtio_blk
newins.disk_update_interval = disk_update_interval
newins.retry = retry
newins.brief = brief
newins.rootfs_device = rootfs_device
newins.block_device_prefix = block_device_prefix
newins.private_addressing = private_addressing
newins.reservation = reservation or newins.get_reservation()
if newins.reservation:
newins.security_groups = newins.tester.get_instance_security_groups(newins)
else:
newins.security_groups = None
newins.laststate = newins.state
newins.cmdstart = cmdstart
newins.auto_connect = auto_connect
newins.set_last_status()
newins.update_vm_type_info()
newins.cygwin_path = cygwin_path
newins.system_info = None
newins.diskdrives = []
newins.disk_partitions = []
newins.logicaldisks = []
newins.cygwin_dev_map = {}
#newins.set_block_device_prefix()
if newins.root_device_type == 'ebs':
try:
volume = newins.tester.get_volume(volume_id = newins.block_device_mapping.get(newins.root_device_name).volume_id)
newins.bdm_root_vol = EuVolume.make_euvol_from_vol(volume, tester=newins.tester,cmdstart=newins.cmdstart)
except:pass
newins.winrm = None
if newins.auto_connect and newins.state == 'running':
newins.connect_to_instance(timeout=timeout)
return newins
@property
def age(self):
launchtime = self.tester.get_datetime_from_resource_string(self.launch_time)
# return the elapsed time in seconds
return (time.mktime(datetime.utcnow().utctimetuple()) -
time.mktime(launchtime.utctimetuple()))
def update(self, validate=False, dry_run=False,
err_state='terminated', err_code=-1):
ret = None
tb = ""
retries = 2
for x in xrange(0, retries):
try:
#send with validation True, fail later...
ret = super(WinInstance, self).update(validate=True,
dry_run=dry_run)
break
except ValueError:
if validate:
raise
tb = self.tester.get_traceback()
self.debug('Failed to update instance. Attempt:{0}/{1}'
.format(x, retries))
if not ret:
failmsg = 'Failed to update instance. Instance may no longer ' \
'be present on system"{0}"'.format(self.id)
self.debug('{0}\n{1}'.format(tb, failmsg))
self.debug('{0} setting fake state to:"{1}"'.format(self.id,
err_state))
state = InstanceState(name=err_state, code=err_code)
self._state = state
ret = self.state
self.set_last_status()
return ret
def update_vm_type_info(self):
self.vmtype_info = self.tester.get_vm_type_from_zone(self.placement,self.instance_type)
return self.vmtype_info
def set_last_status(self,status=None):
self.laststate = self.state
self.laststatetime = time.time()
self.age_at_state = self.tester.get_instance_time_launched(self)
#Also record age from user's perspective, ie when they issued the run instance request (if this is available)
if self.cmdstart:
self.age_from_run_cmd = "{0:.2f}".format(time.time() - self.cmdstart)
else:
self.age_from_run_cmd = None
def print_dict(self, dict=None, printmethod=None):
'''
formats and prints
'''
printmethod = printmethod or self.debug
buf = "\n"
dict = dict or self.__dict__
longest_key = 0
for key in dict:
if len(key) > longest_key:
longest_key = len(key)
for key in dict:
buf += str(key).ljust(longest_key) + " -----> :" + str(dict[key]) + "\n"
printmethod(buf)
def printself(self, title=True, footer=True, printmethod=None, printme=True):
def state_markup(state):
# Markup instance state...
if state == 'running':
return markup(state, markups=[1, 92])
if state == 'terminated':
return markup(state, markups=[1, 97])
if state == 'shutting-down':
return markup(state, markups=[1, 95])
if state == 'pending':
return markup(state, markups=[1, 93])
if state == 'stopped':
return markup(state, markups=[1, 91])
else:
return markup(state, markups=[1, 91])
def multi_line(lines):
# Utility method for creating multi line table entries...
buf = ""
maxlen = 0
for line in lines:
if len(line) + 2 > maxlen:
maxlen = len(line) + 2
for line in lines:
buf += str(line).ljust(maxlen) + "\n"
buf = buf.rstrip()
return (buf, maxlen)
bdmvol = self.root_device_type
if self.bdm_root_vol:
bdmvol += ":" + self.bdm_root_vol.id
reservation_id = None
if self.reservation:
reservation_id = self.reservation.id
owner_id = self.reservation.owner_id
else:
owner_id = "???"
# Create a multi line field for instance's run info
idlist = [markup("{0} {1}".format('ID:', self.id), markups=[1, 4, 94]),
"{0} {1}".format(markup('TYPE:'), self.instance_type),
"{0} {1}".format(markup('RES:'), reservation_id),
"{0}".format(markup("ACCOUNT ID:")), owner_id]
id_string, idlen = multi_line(idlist)
try:
emi = self.tester.get_emi(self.image_id)
emi_name = str(emi.name[0:18]) + ".."
except:
emi_name = ""
# Create a multi line field for the instance's image info
virt_type = 'PV'
if self.virtualization_type == 'hvm':
virt_type = 'HVM'
emi_string, emilen = multi_line(
[markup("{0} {1}".format('EMI:', self.image_id)),
"{0} {1}".format(markup('OS:'), self.platform or 'linux'),
"{0} {1}".format(markup('VIRT:'), virt_type),
"{0}".format(markup('IMAGE NAME:')),
emi_name])
# Create a multi line field for the instance's state info
if self.age:
age = int(self.age)
state_string, state_len = multi_line(["STATE: " + state_markup(self.laststate),
"{0} {1}".format(markup('AGE:'), age),
"{0} {1}".format(markup("ZONE:"), self.placement),
markup('ROOTDEV:'), bdmvol])
# Create the primary table called pt...
netinfo = 'INSTANCE NETWORK INFO:'
idheader = 'INSTANCE ID'
imageheader = 'INSTANCE IMAGE'
stateheader = 'INSTANCE STATE'
pt = PrettyTable([idheader, imageheader, stateheader, netinfo])
pt.align[netinfo] = 'l'
pt.valign[netinfo] = 'm'
pt.align[idheader] = 'l'
pt.align[imageheader] = 'l'
pt.align[stateheader] = 'l'
pt.max_width[idheader] = idlen
pt.max_width[imageheader] = emilen
pt.max_width[stateheader] = state_len
pt.padding_width = 0
pt.hrules = ALL
# PrettyTable headers do not work with ascii markups, so make a sudo header
new_header = []
for field in pt._field_names:
new_header.append(markup(field, markups=[1, 4]))
pt.add_row(new_header)
pt.header = False
# Create a subtable 'netpt' to summarize and format the networking portion...
# Set the maxwidth of each column so the tables line up when showing multiple instances
vpc_col = ('VPC', 4)
subnet_col = ('SUBNET', 6)
if self.vpc_id:
vpc_col = ('VPC', 12)
subnet_col = ('SUBNET', 15)
secgrp_col = ('SEC GRPS', 11)
privaddr_col = ('P', 1)
privip_col = ('PRIV IP', 15)
pubip_col = ('PUB IP', 15)
net_cols = [vpc_col, subnet_col, secgrp_col, privaddr_col, privip_col, pubip_col]
# Get the Max width of the main tables network summary column...
# Start with 2 to account for beginning and end column borders
netinfo_width = 2
netinfo_header = []
for col in net_cols:
netinfo_width += col[1] + 1
netinfo_header.append(col[0])
pt.max_width[netinfo] = netinfo_width
netpt = PrettyTable([vpc_col[0], subnet_col[0], secgrp_col[0], privaddr_col[0],
privip_col[0], pubip_col[0]])
netpt.padding_width = 0
netpt.vrules = ALL
for col in net_cols:
netpt.max_width[col[0]] = col[1]
sec_grps = []
for grp in self.groups:
sec_grps.append(str(grp.id))
sec_grps = ",".join(sec_grps)
private_addressing = "N"
if self.private_addressing:
private_addressing = "Y"
netpt.add_row([str(self.vpc_id).center(vpc_col[1]),
str(self.subnet_id).center(subnet_col[1]),
str(sec_grps).center(secgrp_col[1]),
str(private_addressing).center(privaddr_col[1]),
str(self.private_ip_address).center(privip_col[1]),
str(self.ip_address).center(pubip_col[1])])
# To squeeze a potentially long keyname under the network summary table, get the length
# and format this column to allow for wrapping a keyname under the table...
# netbuf = netpt.get_string()
netbuf = "{0}:{1} {2}:{3}\n".format(markup("NODE"),
self.tags.get('euca:node', "???").ljust(16),
markup("KEYPAIR"), self.key_name)
netbuf += "\n".join(netpt.get_string().splitlines()[0:-1])
# Create the row in the main table...
pt.add_row([id_string, emi_string, state_string, netbuf])
if printme:
printmethod = printmethod or self.log.debug
printmethod("\n" + str(pt) + "\n")
return pt
def get_password(self,
private_key_path=None,
key=None,
dir=None,
exten=".pem",
encoded=True,
force_update=False):
'''
:param private_key_path: private key file used to decrypt password
:param key: name of private key
:param dir: Path to private key
:param exten: extension of private key
:param encoded: boolean of whether string returned from server is
Base64 encoded
:return: decrypted password
'''
if self.password is None or force_update:
self.password = self.tester.get_windows_instance_password(
self,
private_key_path=private_key_path,
key=key,
dir=dir,
exten=exten,
encoded=encoded)
return self.password
def reset_ssh_connection(self, timeout=None):
# todo: Remove ssh reference from this method, use something like
# reset_instance_connection, etc..
self.debug('Note ssh not implemented at this time, using winrm for '
'shell access instead...')
return self.reset_winrm_connection(timeout=timeout)
def reset_winrm_connection(self, timeout=None, force=False):
# todo:
timeout = timeout or self.timeout
self.debug('reset_winrm_connection for:'+str(self.id))
self.get_password(force_update=True)
if self.username is None or self.password is None:
#Allow but warn here as this may be a valid negative test
self.debug('Warning username and/or password were None in '
'winrm connnection?')
# Create a new winrm interface if this is a new instance or
# an attribute has changed...
try:
#Check the port in order to provide debug if the connection fails
self.test_port_status(port=self.winrm_port, ip=self.ip_address)
except:pass
if force or not (self.winrm and \
self.winrm.hostname == self.ip_address and \
self.winrm.username == self.username and \
self.winrm.password == self.password):
if self.winrm:
self.winrm.close_shell()
self.winrm = winrm_connection.Winrm_Connection(
hostname = self.ip_address,
username = self.username,
password = self.password,
port = self.winrm_port,
protocol = self.winrm_protocol,
debug_method = self.debug,
verbose=True
)
def get_reservation(self):
res = None
try:
res = self.tester.get_reservation_for_instance(self)
except Exception, e:
self.update()
self.debug('Could not get reservation for instance in state:' +
str(self.state) + ", err:" + str(e))
return res
def connect_to_instance(self, wait_for_boot=180, timeout=120):
'''
Attempts to connect to an instance via ssh.
:params wait_for_boot: time to wait, allowing guest to boot before
attempting to poll for ports active status
:params timeout: -optional - time in seconds to wait when polling
port(s) status(s) before failure
'''
self.debug("{0}connect_to_instance starting.\nwait_for_boot:{1} "
"seconds\ntimeout from boot:{2}{3}"
.format(termline, wait_for_boot, timeout, termline))
try:
self.poll_for_port_status_with_boot_delay(waitforboot=wait_for_boot,
timeout=timeout)
except Exception, e:
self.debug('Warning failed to poll port status:' + str(e))
self.debug("Attempting to create connection to instance:" + self.id)
attempts = 0
start = time.time()
elapsed = 0
if self.winrm is not None:
self.winrm.close_shell()
self.winrm = None
while (elapsed < timeout):
attempts += 1
try:
self.update()
self.reset_winrm_connection()
self.debug('Try some sys...')
self.sys("whoami")
except Exception, se:
tb = self.tester.get_traceback()
self.debug('Caught exception attempting to connect '
'winrm shell:\n'+ str(tb) + str(se))
elapsed = int(time.time()-start)
self.debug('connect_to_instance: Attempts:' + str(attempts) +
', elapsed:'+str(elapsed)+'/'+str(timeout))
if self.winrm is not None:
self.winrm.close_shell()
self.winrm = None
time.sleep(5)
pass
else:
break
elapsed = int(time.time()-start)
if self.winrm is None:
self.get_connection_debug()
raise RuntimeError(str(self.id) +
":Failed establishing management connection to "
"instance, elapsed:" + str(elapsed) +
"/" + str(timeout))
self.debug('Connect_to_instance updating attached volumes/disk '
'info for vols: ' + str(self.attached_vols))
if self.brief:
self.update_system_info()
else:
self.update_system_and_disk_info()
self.init_attached_volumes()
self.debug("{0}connect_to_instance completed{1}"
.format(termline, termline))
def get_connection_debug(self):
# Add network debug/diag info here...
# First show arp cache from local machine
# todo Consider getting info from relevant euca components:
# - iptables info
# - route info
# - instance xml
try:
# Show local ARP info...
arp_out = "\nLocal ARP cache for instance ip: " \
+ str(self.ip_address) + "\n"
arp_fd = os.popen('arp ' + str(self.ip_address))
for line in arp_fd:
arp_out += line
self.debug(arp_out)
except Exception as AE:
self.log.debug('Failed to get arp info:' + str(AE))
try:
self.tester.get_console_output(self)
except Exception as CE:
self.log.debug('Failed to get console output:' + str(CE))
def update_root_device_diskdrive(self):
if not self.root_device_type == 'ebs':
return
for disk in self.diskdrives:
if disk.index == 0:
if disk.ebs_volume:
for vol in self.attached_vols:
if vol.id == disk.ebs_volume:
if not disk.md5:
disk.update_md5_info_from_ebs()
return
volume = self.tester.get_volume(volume_id=disk.ebs_volume)
if not isinstance(volume, EuVolume):
volume = EuVolume.make_euvol_from_vol(volume, self.tester)
volume.guestdev = disk.deviceid
volume.md5len = 1024
volume.md5 = self.get_dev_md5(disk.cygwin_scsi_drive, volume.md5len)
if not self.get_volume_from_attached_list_by_id(volume.id):
self.debug("{0} updating with root vol:{1}{2}"
.format(termline,
volume.id,
termline))
self.attached_vols.append(volume)
disk.update_md5_info_from_ebs()
return
def get_volume_from_attached_list_by_id(self, volume_id):
for vol in self.attached_vols:
if vol.id == volume_id:
return vol
def update_system_and_disk_info(self):
try:
self.update_system_info()
except Exception, sie:
tb = self.tester.get_traceback()
self.debug(str(tb) + "\nError updating system info:" + str(sie))
try:
self.update_disk_info()
self.update_root_device_diskdrive()
self.print_partition_summary()
self.print_logicaldisk_summary()
self.print_diskdrive_summary()
except Exception, ude:
tb = self.tester.get_traceback()
self.debug(str(tb) + "\nError updating disk info:" + str(ude))
def has_sudo(self):
return False
def debug(self,msg,traceback=1,method=None,frame=False):
'''
Used to print debug, defaults to print() but over ridden by self.debugmethod if not None
msg - mandatory -string, message to be printed
'''
if ( self.verbose is True ):
self.debugmethod(msg)
def sys(self, cmd, verbose=True, code=None, include_stderr=False, enable_debug=False, timeout=None):
'''
Issues a command against the ssh connection to this instance
Returns a list of the lines from stdout+stderr as a result of the command
cmd - mandatory - string, the command to be executed
verbose - optional - boolean flag to enable debug
timeout - optional - command timeout in seconds
'''
if (self.winrm is None):
raise Exception("WinInstance winrm connection is None")
return self.winrm.sys(command=cmd, include_stderr=include_stderr, timeout=timeout, verbose=verbose, code=code)
def test_rdp_port_status(self, ip=None, port=3389, timeout=10):
'''
Description: Attempts to test that the host is accepting tcp connections to the RDP port
'''
ip = ip or self.ip_address
return self.test_port_status(ip=ip, port=port, timeout=timeout)
def test_port_status(self, port, ip=None, timeout=5, tcp=True, verbose=True):
ip = ip or self.ip_address
return self.tester.test_port_status(ip, int(port), timeout=timeout, tcp=tcp, verbose=verbose)
def poll_for_port_status_with_boot_delay(self, interval=15, ports=[], socktimeout=5,timeout=180, waitforboot=300):
'''
Make sure some time has passed before we test on the guest side before running guest test...
'''
launch_seconds = self.tester.get_instance_time_launched(self)
sleeptime = 0 if launch_seconds > waitforboot else (waitforboot - launch_seconds)
self.debug("Instance was launched "+str(launch_seconds)+" seconds ago, waiting:"+str(sleeptime)+" for instance to boot")
time.sleep(sleeptime)
return self.poll_for_ports_status(ports,
ip=self.ip_address,
interval=interval,
socktimeout=socktimeout,
timeout=timeout)
def wait_for_time_since_launch(self,waitforboot=420):
'''
When using larger instance store images, this can allow for the delays caused by image size/transfer.
'''
boot_seconds = self.tester.get_instance_time_launched(self)
sleeptime = 0 if boot_seconds > waitforboot else (waitforboot - boot_seconds)
self.debug("Instance was launched "+str(boot_seconds)+"/"+str(waitforboot) + " seconds ago, waiting:"+str(sleeptime)+" for instance to boot")
start = time.time()
elapsed = 0
print "Waiting for Windows to fully boot:",
while elapsed < sleeptime:
print "Waiting for Windows to fully boot:"+str(sleeptime-elapsed),
time.sleep(5)
elapsed=int(time.time()-start)
self.debug("test_wait_for_instance_boot: done waiting, instance up for "+str(waitforboot)+" seconds")
def poll_for_ports_status(self, ports=[], ip=None, interval=10, socktimeout=5, timeout=180):
ip = ip or self.ip_address
ports = ports or [self.rdp_port, self.winrm_port]
start = time.time()
elapsed = 0
attempt = 0
while elapsed < timeout:
attempt +=1
self.debug('test_poll_for_ports_status, ports: ' + ",".join(str(x) for x in ports) + ", attempt:" + str(attempt))
for port in ports:
if elapsed < timeout:
try:
self.debug('Trying ip:port:' + str(self.ip_address) + ':' + str(port) + ", elapsed:" + str(elapsed))
self.test_port_status(ip=ip, port=int(port), timeout=5)
return
except socket.error, se:
self.debug('test_ports_status failed socket error:'+str(se[0]))
#handle specific errors here, for now just for debug...
ecode=se[0]
if ecode == socket.errno.ETIMEDOUT or ecode == "timed out":
self.debug("test_poll_for_ports_status: Connect "+str(ip)+":" +str(port)+ " timed out retrying. Time remaining("+str(timeout-elapsed)+")")
except Exception, e:
tb = self.tester.get_traceback()
self.debug(tb)
self.debug('test_poll_for_ports_status:'+str(ip)+':'+str(port)+' FAILED after attempts:'+str(attempt)+', elapsed:'+str(elapsed)+', err:'+str(e) )
elapsed = int(time.time() -start)
if elapsed < timeout:
time.sleep(interval)
raise Exception('test_poll_for_ports_status:'+str(ip)+':'+str(port)+' FAILED after attempts:'+str(attempt)+', elapsed:'+str(elapsed)+' seconds')
def init_attached_volumes(self):
self.debug('init_attahced_volumes... attached_vols: ' + str(self.attached_vols))
syncdict = self.sync_attached_volumes_with_clouds_view()
if syncdict['errors']:
errmsg = 'Errors syncing guest volumes with cloud at init:' + ",".join(str(e) for e in syncdict['errors'])
errmsg += 'Failed to sync guest volumes with cloud at init:' + ",".join(str(x) for x in syncdict['badvols'])
self.debug(errmsg)
time.sleep(60)
raise Exception(errmsg)
def sync_attached_volumes_with_clouds_view(self):
self.debug(termline +
"Starting sync_attached_volumes_with_clouds_view"
+ termline )
badvols = []
errors = []
ret = {'errors':errors, 'badvols':badvols}
#Get a list of volumes that the cloud believes are currently attached
cloud_volumes = self.tester.get_volumes(attached_instance=self.id)
#Make a copy of a list of volumes this instance thinks are currenlty attached
locallist = copy.copy(self.attached_vols)
self.debug('Cloud list:' + str(cloud_volumes))
self.debug('Local list:' + str(locallist))
for vol in cloud_volumes:
for local_vol in locallist:
if local_vol.id == vol.id:
locallist.remove(local_vol)
if not isinstance(vol, EuVolume):
vol = EuVolume.make_euvol_from_vol(vol, self.tester)
try:
self.update_volume_guest_info(volume=vol)
except Exception, e:
badvols.append(vol)
errors.append(vol.id + ' Error syncing with cloud:' + str (e) + '. \n')
for local_vol in locallist:
badvols.append(local_vol)
errors.append(local_vol.id + ' Error unattached volume found in guests attach list. \n')
self.debug(termline +
"Finishing sync_attached_volumes_with_clouds_view"
+ termline )
return ret
def update_system_info(self):
'''
Gather basic system info for this windows instance object and store in self.system_info
Example:
# print wins.system_info.OS_NAME
'Microsoft Windows 7 Professional'
'''
currentkey = None
swap = re.compile('([!@#$%^&*. ])')
info = self.sys('systeminfo')
if self.system_info:
system_info = self.system_info
else:
system_info = type('obj', (object,),{})
if info:
for line in info:
if re.match("^\w.+:", line):
linevals = line.split(':')
currentkey = linevals.pop(0)
#clean up the key string...
currentkey = re.sub('[()]', '', currentkey)
currentkey = re.sub(swap, '_', currentkey)
currentkey = currentkey.lower()
value = ":".join(str(x) for x in linevals) or ""
setattr(system_info, currentkey, str(value).strip())
elif currentkey:
#this is an additional value to our previous key
prev_value = getattr(system_info, currentkey)
if not isinstance(prev_value, types.ListType):
updated_value = [prev_value]
updated_value.append(str(line).strip())
setattr(system_info, currentkey, updated_value)
self.system_info = system_info
def get_cygwin_path(self, prefix="c:\\"):
if self.cygwin_path:
return self.cygwin_path
path = None
self.debug('Trying to find cygwin path...')
out = self.sys('dir ' + str(prefix) + ' /B')
for line in out:
if re.search('cygwin', line):
path = str(prefix) + str(line.strip()) + "\\"
self.cygwin_path = path
break
return path
def cygwin_curl(self, url, connect_timeout=30):
cygpath = self.get_cygwin_path()
if cygpath is None:
raise Exception('Could not find cygwin path on guest for curl?')
curl = cygpath + 'bin\curl.exe --connect-timeout ' + str(connect_timeout) + ' '
return self.sys(curl + str(url), code=0, timeout=connect_timeout)
def get_metadata(self, element_path='', prefix='latest/meta-data/', use_cygwin=True):
"""Return the lines of metadata from the element path provided"""
### If i can reach the metadata service ip use it to get metadata otherwise try the clc directly
try:
if use_cygwin:
return self.cygwin_curl("http://169.254.169.254/"+str(prefix)+str(element_path), connect_timeout=10)
else:
return self.sys("curl --connect-timeout 10 http://169.254.169.254/"+str(prefix)+str(element_path), code=0)
except:
if use_cygwin:
return self.cygwin_curl("http://" + self.tester.get_ec2_ip() + ":8773/"+str(prefix) + str(element_path))
else:
return self.sys("curl http://" + self.tester.get_ec2_ip() + ":8773/"+str(prefix) + str(element_path), code=0)
def print_diskdrive_summary(self,printmethod=None):
printmethod = printmethod or self.debug
if not self.diskdrives:
printmethod('No disk drives to print?')
return
disklist = copy.copy(self.diskdrives)
buf = (disklist.pop()).get_summary()
for disk in disklist:
buf += disk.get_summary(printheader=False)
printmethod(buf)
def print_partition_summary(self,printmethod=None):
printmethod = printmethod or self.debug
if not self.disk_partitions:
printmethod('No disk partitions to print?')
return
partlist = copy.copy(self.disk_partitions)
buf = (partlist.pop()).get_summary()
for part in partlist:
buf += part.get_summary(printheader=False)
printmethod(buf)
def print_logicaldisk_summary(self,printmethod=None):
printmethod = printmethod or self.debug
if not self.logicaldisks:
printmethod('No disk disk_partitions to print?')
return
disklist = copy.copy(self.logicaldisks)
buf = (disklist.pop()).get_summary()
for disk in disklist:
buf += disk.get_summary(printheader=False)
printmethod(buf)
def update_disk_info(self , forceupdate=False):
if self.diskdrives:
if not forceupdate and (time.time() - self.diskdrives[0].last_updated) <= self.disk_update_interval:
return
self.debug('Fetching updated disk info...')
self.diskdrives = []
self.disk_partitions = []
self.logicaldisks = []
self.diskdrives = self.get_updated_diskdrive_info()
self.disk_partitions = self.get_updated_partition_info()
self.logicaldisks = self.get_updated_logicaldisk_info()
self.associate_diskdrives_to_partitions()
self.associate_partitions_to_logicaldrives()
def get_updated_diskdrive_info(self):
'''
Populate self.diskdrives with WinInstanceDisk objects containing info parsed from wmic command.
Since wmic doesn't seem to use delimeters this method attempts to derive the lengh of each column/header
in order to parse out the info per disk.
:pararm force: boolean. Will force an update, otherwise this method will wait a minimum of
self.disk_update_interval before updating again.
'''
#cmd = "wmic diskdrive get /format:textvaluelist.xsl"
self.debug('Getting updated diskdrive info...')
cmd = "wmic diskdrive list full"
diskdrives = []
for disk_dict in self.get_parsed_wmic_command_output(cmd):
try:
diskdrives.append(WinInstanceDiskDrive(self,disk_dict))
except Exception, e:
tb = self.tester.get_traceback()
self.debug('Error attempting to create WinInstanceDiskDrive from following dict:')
self.print_dict(dict=disk_dict)
raise Exception(str(tb) + "\n Error attempting to create WinInstanceDiskDrive:" + str(e))
self.debug('get_updated_diskdrive_info, Done')
return diskdrives
def get_updated_partition_info(self):
'''
Populate self.diskdrives with WinInstanceDisk objects containing info parsed from wmic command.
Since wmic doesn't seem to use delimeters this method attempts to derive the lengh of each column/header
in order to parse out the info per disk.
:pararm force: boolean. Will force an update, otherwise this method will wait a minimum of
self.disk_update_interval before updating again.
'''
self.debug('Getting udpated partition info...')
cmd = "wmic partition list brief /format:textvaluelist.xsl"
disk_partitions = []
for part_dict in self.get_parsed_wmic_command_output(cmd):
try:
disk_partitions.append(WinInstanceDiskPartition(self,part_dict))
except Exception, e:
tb = self.tester.get_traceback()
self.debug('Error attempting to create WinInstanceDiskPartition from following dict:')
self.print_dict(dict=part_dict)
raise Exception(str(tb) + "\n Error attempting to create WinInstanceDiskPartition:" + str(e))
self.debug('get_updated_partition_info, Done')
return disk_partitions
def get_updated_logicaldisk_info(self):
self.debug('Getting updated logicaldisk info...')
cmd ='wmic logicaldisk list /format:textvaluelist.xsl'
logicaldisks = []
for part_dict in self.get_parsed_wmic_command_output(cmd):
try:
logicaldisks.append(WinInstanceLogicalDisk(self,part_dict))
except Exception, e:
tb = self.tester.get_traceback()
self.debug('Error attempting to create WinInstanceLogicalDisk from following dict:')
self.print_dict(dict=part_dict)
raise Exception(str(tb) + "\n Error attempting to create WinInstanceLogicalDisk:" + str(e))
self.debug('get_updated_logicaldisk_info, Done')
return logicaldisks
def associate_diskdrives_to_partitions(self):
for disk in self.diskdrives:
disk.disk_partitions = []
for part in self.disk_partitions:
if part.diskindex == disk.index:
disk.disk_partitions.append(part)
def associate_partitions_to_logicaldrives(self, verbose=False):
for part in self.disk_partitions:
drive_id = None
part.logicaldisks = []
cmd = 'wmic partition where (DeviceID="Disk #' + str(part.diskindex) + \
', Partition #' + str(part.index) + '") assoc /assocclass:Win32_LogicalDiskToPartition'
output = self.sys(cmd, verbose=verbose, code=0)
for line in output:
if re.search('Win32_LogicalDisk.DeviceID',line):
try:
drive_id = str(line.split()[0].split('=')[1]).replace('"','').strip()
except Exception, e:
tb = self.tester.get_traceback()
self.debug(str(tb)+ "\nError getting logical drive info:" + str(e))
if drive_id:
for disk in self.logicaldisks:
if re.match(disk.deviceid, drive_id):
part.logicaldisks.append(disk)
disk.partition = part
break
def get_cygwin_scsi_dev_for_windows_drive(self, windisk=None, drive_id=""):
'''
param windisk: WinInstanceDiskType object. windisk.deviceid is used to look up the associated cygwin device
param drive_id: String representing the deviceid. Can be used instead of passing a WinInstanceDiskType
'''
windisk_classname = ""
update = False
retries = 2
if windisk:
drive_id = windisk.deviceid
windisk_classname = str(windisk.__class__).split('.').pop()
#If this is a disk drive allow a retry which set the force update flag, otherwise don't force and retry
if isinstance(windisk,WinInstanceDiskDrive):
update = True
if not drive_id:
raise Exception('WinInstanceDiskType or string w/ device id not provided')
self.debug('Attempting to get cygwin dev for windows drive:' + str(drive_id))
self.update_cygwin_windows_device_map()
for retry in xrange(0, retries):
for device in self.cygwin_dev_map:
if re.search("dev", device):
win_dev = str(self.cygwin_dev_map[device].split('\\').pop()).strip().upper()
formated_drive_id = str(drive_id.split('\\').pop()).strip().upper()
#self.debug('Attempt to match:"' + str(win_dev) + '" with "' + str(formated_drive_id) + '"')
if formated_drive_id == win_dev:
#self.debug('Found match')
return device
if update:
self.update_cygwin_windows_device_map(force_update=True)
else:
break
self.debug('WARNING: Could not find cygwin device for type:"' + str(windisk_classname) + '", deviceid:' +
| 0 |
920cd41b18f5cfb45f46c44ed707cebe682d4dd9
|
Python
|
str(drive_id))
return ""
def get_parsed_wmic_command_output(self, wmic_command, verbose=False):
'''
Attempts to parse a wmic command using "/format:textvaluelist.xsl" for key value format into a list of
dicts.
:param wmic_command: string representing the remote wmic command to be run
:returns : list of dict(s) created from the parsed key value output of the command.
Note keys will be in lowercase
'''
self.debug('get_parsed_wmic_command_output, command:' + str(wmic_command))
ret_dicts = []
output = self.sys(wmic_command, verbose=verbose, code=0)
newdict = {}
for line in output:
if not re.match(r"^\w",line):
#If there is a blank line(s) then the previous object is complete
if newdict:
ret_dicts.append(newdict)
newdict = {}
else:
splitline = line.split('=')
key = str(splitline.pop(0)).lower()
if len(splitline) > 1:
value = "=".join(str(x) for x in splitline)
else:
if splitline:
value = splitline.pop()
else:
value = ''
newdict[key] = value
return ret_dicts
def get_logicaldisk_ids(self, forceupdate=False):
'''
:param forceupdate: boolean, to force an update of logical disks detected on the guest. Otherwise updates are
throttled to self.disk_update_interval
:returns list of device ids (ie: [A:,C:,D:]
'''
ret = []
self.update_disk_info(forceupdate=forceupdate)
for disk in self.logicaldisks:
ret.append(disk.deviceid)
return ret
def get_diskdrive_ids(self, drivelist=None, forceupdate=False):
'''
:param forceupdate: boolean, to force an update of logical disks detected on the guest. Otherwise updates are
throttled to self.disk_update_interval
:returns list of device ids ie: ['\\.\PHYSICALDRIVE0','\\.\PHYSICALDRIVE1,'\\.\PHYSICALDRIVE2']
'''
ret = []
if not drivelist:
self.update_disk_info(forceupdate=forceupdate)
drivelist = self.diskdrives
for disk in drivelist:
ret.append(disk.deviceid)
return ret
def get_diskdrive_by_deviceid(self, deviceid):
for disk in self.diskdrives:
if disk.deviceid == deviceid:
return disk
def found(self, command, regex):
""" Returns a Boolean of whether the result of the command contains the regex"""
result = self.sys(command)
for line in result:
found = re.search(regex,line)
if found:
return True
return False
def assertFilePresent(self,filepath):
'''
Raise exception if file not found at filepath on remote guest. dirs '\' need to be represented as '\\'
'''
self.sys('dir ' + str(filepath), code=0)
def assertCygwinFilePresent(self, filepath):
self.cygwin_cmd('ls ' + str(filepath), code=0)
def attach_volume(self, volume, dev=None, timeout=180, overwrite=False):
'''
Method used to attach a volume to an instance and track it's use by that instance
required - euvolume - the euvolume object being attached
required - tester - the eucaops/nephoria object/connection for this cloud
optional - dev - string to specify the dev path to 'request' when attaching the volume to
optional - timeout - integer- time allowed before failing
optional - overwrite - flag to indicate whether to overwrite head data of a non-zero filled volume upon attach for md5
'''
if not isinstance(volume, EuVolume):
volume = EuVolume.make_euvol_from_vol(volume)
return self.attach_euvolume(volume, dev=dev, timeout=timeout, overwrite=overwrite)
def attach_euvolume(self, euvolume, dev=None, timeout=180, overwrite=False):
'''
Method used to attach a volume to an instance and track it's use by that instance
required - euvolume - the euvolume object being attached
required - tester - the eucaops/nephoria object/connection for this cloud
optional - dev - string to specify the dev path to 'request' when attaching the volume to
optional - timeout - integer- time allowed before failing
optional - overwrite - flag to indicate whether to overwrite head data of a non-zero filled volume upon attach for md5
'''
if not isinstance(euvolume, EuVolume):
raise Exception("Volume needs to be of type euvolume, try attach_volume() instead?")
self.debug('Disk drive summary before attach attempt:')
self.print_logicaldisk_summary()
self.print_diskdrive_summary()
self.debug("Attempting to attach volume:"+str(euvolume.id)+" to instance:" +str(self.id)+" to dev:"+ str(dev))
#grab a snapshot of our devices before attach for comparison purposes
diskdrive_list_before = self.get_diskdrive_ids()
use_serial = False
for disk in self.diskdrives:
if re.search('vol-', disk.serialnumber):
use_serial = True
break
attached_dev = None
start= time.time()
elapsed = 0
if dev is None:
#update our block device prefix
dev = self.get_free_scsi_dev()
if (self.tester.attach_volume(self, euvolume, dev, pause=10,timeout=timeout)):
if euvolume.attach_data.device != dev:
raise Exception('Attached device:' + str(euvolume.attach_data.device) +
", does not equal requested dev:" + str(dev))
#Find device this volume is using on guest...
euvolume.guestdev = None
while (not euvolume.guestdev and elapsed < timeout):
#Since all hypervisors may not support serial number info, check for an incremental diff in the
# list of physical diskdrives on this guest.
self.debug("Checking for volume attachment on guest, elapsed time("+str(elapsed)+")")
diskdrive_list_after = self.get_diskdrive_ids(forceupdate=True)
self.print_logicaldisk_summary()
self.print_diskdrive_summary()
self.debug("dev_list_after:"+" ".join(diskdrive_list_after))
diff =list( set(diskdrive_list_after) - set(diskdrive_list_before) )
if len(diff) > 0:
self.debug('Got Diff in drives:' + str(diff))
for disk in self.diskdrives:
if re.search('vol-', disk.serialnumber):
use_serial = True
if euvolume.id == disk.ebs_volume:
attached_dev = disk.deviceid
euvolume.guestdev = attached_dev
self.debug("Volume:"+str(euvolume.id)+" guest device by serialnumber:"+str(euvolume.guestdev))
break
if not use_serial:
attached_dev = str(diff[0])
euvolume.guestdev = attached_dev.strip()
self.debug("Volume:"+str(euvolume.id)+"found guest device by diff:"+str(euvolume.guestdev))
if attached_dev:
euvolume.guestdev = attached_dev
attached_vol = self.get_volume_from_attached_list_by_id(euvolume.id)
self.attached_vols.append(euvolume)
self.debug(euvolume.id+": Requested dev:"+str(euvolume.attach_data.device)+", attached to guest device:"+str(euvolume.guestdev))
break
elapsed = int(time.time() - start)
time.sleep(2)
if not euvolume.guestdev or not attached_dev:
raise Exception('Device not found on guest after '+str(elapsed)+' seconds')
else:
self.debug('Failed to attach volume:'+str(euvolume.id)+' to instance:'+self.id)
raise Exception('Failed to attach volume:'+str(euvolume.id)+' to instance:'+self.id)
if (attached_dev is None):
self.debug("List after\n"+" ".join(diskdrive_list_after))
raise Exception('Volume:'+str(euvolume.id)+' attached, but not found on guest'+str(self.id)+' after '+str(elapsed)+' seconds?')
#Store the md5sum of this diskdrive in the euvolume...
disk = self.get_diskdrive_by_deviceid(attached_dev)
euvolume.md5len = 1024
euvolume.md5 = self.get_dev_md5(devpath=disk.cygwin_scsi_drive, length=euvolume.md5len)
#update the volume and instances information about the attachment...
self.update_volume_guest_info(volume=euvolume,md5=euvolume.md5, md5len=euvolume.md5len, guestdev=euvolume.guestdev)
self.debug('Success attaching volume:'+str(euvolume.id)+' to instance:'+self.id +
', cloud dev:'+str(euvolume.attach_data.device)+', attached dev:'+str(attached_dev) +
", elapsed:" + str(elapsed))
try:
self.rescan_disks(timeout=20)
except Exception, e:
self.debug('Warning. Error while trying to rescan disks after attaching volume. Error: ' + str(e))
euvolume.printself(printmethod=self.debug)
disk.print_self()
return attached_dev
def get_guest_dev_for_volume(self, volume, forceupdate=False):
use_serial = False
self.update_disk_info(forceupdate=forceupdate)
for disk in self.diskdrives:
if re.search('vol-', disk.serialnumber):
use_serial = True
break
if not isinstance(volume, EuVolume):
volume = EuVolume.make_euvol_from_vol(volume=volume, tester=self.tester)
def get_disk_drive_by_id(self, deviceid):
self.update_system_info()
for disk in self.diskdrives:
if disk.deviceid == deviceid:
return disk
return None
def get_guestdevs_inuse_by_vols(self):
retlist = []
for vol in self.attached_vols:
retlist.append(vol.guestdev)
return retlist
def get_free_scsi_dev(self, prefix=None,maxdevs=16):
'''
The volume attach command requires a cloud level device name that is not currently associated with a volume
Note: This is the device name from the clouds perspective, not necessarily the guest's
This method attempts to find a free device name to use in the command
optional - prefix - string, pre-pended to the the device search string
optional - maxdevs - number use to specify the max device names to iterate over.Some virt envs have a limit of 16 devs.
'''
d='e'
in_use_cloud = ""
in_use_guest = ""
dev = None
if prefix is None:
prefix = self.block_device_prefix
cloudlist=self.tester.get_volumes(attached_instance=self.id)
for x in xrange(0,maxdevs):
inuse=False
#double up the letter identifier to avoid exceeding z
if d == 'z':
prefix= prefix+'e'
dev = "/dev/"+prefix+str(d)
for avol in self.attached_vols:
if avol.attach_data.device == dev:
inuse = True
in_use_guest += str(avol.id)+", "
continue
#Check to see if the cloud has a conflict with this device name...
for vol in cloudlist:
vol.update()
if (vol.attach_data is not None) and (vol.attach_data.device == dev):
inuse = True
in_use_cloud += str(vol.id)+", "
continue
if inuse is False:
self.debug("Instance:"+str(self.id)+" returning available cloud scsi dev:"+str(dev))
return str(dev)
else:
d = chr(ord('e') + x) #increment the letter we append to the device string prefix
dev = None
if dev is None:
raise Exception("Could not find a free scsi dev on instance:"+self.id+", maxdevs:"+str(maxdevs)+"\nCloud_devs:"+str(in_use_cloud)+"\nGuest_devs:"+str(in_use_guest))
def detach_euvolume(self, euvolume, waitfordev=True, timeout=180):
'''
Method used to detach detach a volume to an instance and track it's use by that instance
required - euvolume - the euvolume object being deattached
waitfordev - boolean to indicate whether or no to poll guest instance for local device to be removed
optional - timeout - integer seconds to wait before timing out waiting for the volume to detach
'''
start = time.time()
elapsed = 0
found = True
for vol in self.attached_vols:
if vol.id == euvolume.id:
dev = vol.guestdev
if (self.tester.detach_volume(euvolume,timeout=timeout)):
if waitfordev:
self.debug("Cloud has detached" + str(vol.id) + ", Wait for device:"+str(dev)+" to be removed on guest...")
while (elapsed < timeout):
diskdrive_ids = []
try:
disk_drives = self.get_updated_diskdrive_info()
for disk in disk_drives:
if dev == disk.deviceid:
found = True
break
found = False
self.debug('Diskdrive associated with ' + str(vol.id) + ' has been removed from guest.')
#if device is not present remove it
self.attached_vols.remove(vol)
except Exception, de:
self.debug('Warning, error getting diskdrive id during detach:' + str(de))
if not found:
try:
self.rescan_disks(timeout=20)
except Exception, re:
self.debug('Warning: Error while trying to rescan disks after detaching volume:' + str(re))
try:
self.update_disk_info()
except Exception, ue:
self.debug('Warning: Error while trying to update disk info:' + str(ue))
try:
self.print_diskdrive_summary()
except: pass
self.debug('Volume:' + str(vol.id) + ', detached, and no longer found on guest at:' + str(dev))
vol.set_volume_detached_tags()
return True
time.sleep(10)
elapsed = int(time.time()-start)
diskdrive_ids = self.get_diskdrive_ids(drivelist=disk_drives)
self.debug('Current disk drives on guest:' + ",".join(str(x) for x in diskdrive_ids))
self.debug("Waiting for device '"+str(dev)+"' on guest to be removed.Elapsed:"+str(elapsed))
else:
self.attached_vols.remove(vol)
vol.set_volume_detached_tags()
return True
else:
raise Exception("Volume("+str(vol.id)+") failed to detach from device("+str(dev)+") on ("+str(self.id)+")")
raise Exception("Detach Volume("+str(euvolume.id)+") not found on ("+str(self.id)+")")
return False
def check_hostname(self):
if not hasattr(self, 'system_info'):
self.update_system_info()
if hasattr(self, 'system_info') and hasattr(self.system_info, 'host_name'):
if self.id.upper() == self.system_info.host_name.upper():
self.debug('Hostname:' + str(self.id) + ", instance.id:" + str(self.system_info.host_name))
else:
raise Exception('check_hostname failed: hostname:' + str(self.system_info.host_name).upper() +
" != id:" + str(self.id).upper())
else:
raise Exception('check_hostname failed: System_info.hostname not populated')
def get_process_list_brief(self):
'''
Returns a list of dicts representing the processes running on the remote guest. Each service is represented by a
dict containing information about the service.
'''
cmd = "wmic process list brief /format:textvaluelist.xsl"
return self.get_parsed_wmic_command_output(cmd)
def get_process_list_full(self):
'''
Returns a list of dicts representing the processes running on the remote guest. Each service is represented by a
dict containing information about the service.
'''
cmd = "wmic process list full"
return self.get_parsed_wmic_command_output(cmd)
def get_process_by_name(self,process_name):
'''
Attempts to lookup a service on the remote guest.
param service_name: string. The name of the service to get info
returns a dict representing the information returned from the remote guest
'''
cmd = 'wmic process ' + str(process_name) + ' get /format:textvaluelist.xsl'
result = self.get_parsed_wmic_command_output(cmd)
if result:
return result[0]
def get_services_list_brief(self):
'''
Returns a list of dicts representing the services from the remote guest. Each service is represented by a
dict containing information about the service.
'''
cmd = 'wmic service list brief /format:textvaluelist.xsl'
return self.get_parsed_wmic_command_output(cmd)
def get_services_list_full(self):
'''
Returns a list of dicts representing the services from the remote guest. Each service is represented by a
dict containing information about the service.
'''
cmd = 'wmic service list full'
return self.get_parsed_wmic_command_output(cmd)
def get_service_by_name(self,service_name):
'''
Attempts to lookup a service on the remote guest.
param service_name: string. The name of the service to get info
returns a dict representing the information returned from the remote guest
'''
cmd = 'wmic service ' + str(service_name) + ' get /format:textvaluelist.xsl'
result = self.get_parsed_wmic_command_output(cmd)
if result:
return result[0]
def get_memtotal_in_mb(self):
return long(self.system_info.total_physical_memory.split()[0].replace(',',''))
def get_memtotal_in_gb(self):
return long(self.get_memtotal_in_mb()/1024)
def check_ram_against_vmtype(self, pad=32):
total_ram = self.get_memtotal_in_mb()
self.debug('Ram check: vm_ram:' + str(self.vmtype_info.ram)
+ "mb vs memtotal:" + str(total_ram)
+ "mb. Diff:" + str(self.vmtype_info.ram - total_ram)
+ "mb, pad:" + str(pad) + "mb")
if not ((self.vmtype_info.ram - total_ram) <= pad):
raise Exception('Ram check failed. vm_ram:' + str(self.vmtype_info.ram)
+ " vs memtotal:" + str(total_ram) + ". Diff is greater than allowed pad:" + str(pad) + "mb")
else:
self.debug('check_ram_against_vmtype, passed')
def check_ephemeral_against_vmtype(self):
gb = self.gigabyte
size = self.vmtype_info.disk
ephemeral_dev = self.get_ephemeral_dev()
block_size = self.get_blockdev_size_in_bytes(ephemeral_dev)
gbs = block_size / gb
self.debug('Ephemeral check: ephem_dev:'
+ str(ephemeral_dev)
+ ", bytes:"
+ str(block_size)
+ ", gbs:"
+ str(gbs)
+ ", vmtype size:"
+ str(size))
if gbs != size:
raise Exception('Ephemeral check failed. ' + str(ephemeral_dev) + ' Blocksize: '
+ str(gbs) + "gb (" + str(block_size) + "bytes)"
+ ' != vmtype size:' +str(size) + "gb")
else:
self.debug('check_ephemeral_against_vmtype, passed')
return ephemeral_dev
def get_ephemeral_dev(self):
"""
Attempts to find the block device path on this instance
:return: string representing path to ephemeral block device
"""
ephem_name = None
dev_prefixs = ['s','v','xd','xvd']
if not self.root_device_type == 'ebs':
try:
self.assertFilePresent('/dev/' + str(self.rootfs_device))
return self.rootfs_device
except:
ephem_name = 'da'
else:
ephem_name = 'db'
devs = self.get_dev_dir()
for prefix in dev_prefixs:
if str(prefix+ephem_name) in devs:
return str('/dev/'+prefix+ephem_name)
raise Exception('Could not find ephemeral device?')
def cygwin_cmd(self, cmd, timeout=120, verbose=False, code=None):
cmd = self.get_cygwin_path() + '\\bin\\bash.exe --login -c "' + str(cmd) + '"'
return self.sys(cmd,timeout=timeout, verbose=verbose, code=code)
def get_dev_md5(self, devpath, length, timeout=60):
self.assertCygwinFilePresent(devpath)
if length == 0:
md5 = str(self.cygwin_cmd('md5sum ' + devpath, timeout=timeout)[0]).split(' ')[0].strip()
else:
md5 = str(self.cygwin_cmd("head -c " + str(length) + " " + str(devpath) + " | md5sum")[0]).split(' ')[0].strip()
return md5
def update_cygwin_windows_device_map(self, prefix='/dev/*', force_update=False):
cygwin_dev_map = {}
if not force_update:
if self.cygwin_dev_map:
if time.time() - self.cygwin_dev_map['last_updated'] <= 30:
cygwin_dev_map = self.cygwin_dev_map
if not cygwin_dev_map:
self.debug('Updating cygwin to windows device mapping...')
output = self.cygwin_cmd("for DEV in " + prefix + " ; do printf $DEV=$(cygpath -w $DEV); echo ''; done",
verbose=False, code=0)
for line in output:
if re.match(prefix, line):
split = line.split('=')
key = split.pop(0)
if split:
value = split.pop()
else:
value = ''
cygwin_dev_map[key]=value
cygwin_dev_map['last_updated'] = time.time()
self.cygwin_dev_map = cygwin_dev_map
self.debug('Updated cygwin to windows device mapping')
return cygwin_dev_map
def rescan_disks(self, timeout=20):
'''
Attempts to rescan disks on the guest. This may help expedite updates/discovery when attaching/detaching
volumes to the guest. This has also been found to hang post device removal so is used with a 20 second
command timeout as the default.
param timeout: integer. Seconds to wait on command before failing
'''
scriptname = 'eutester_diskpart_script'
self.sys('(echo rescan && echo list disk ) > ' + str(scriptname), code=0)
self.sys('diskpart /s ' + str(scriptname), code=0, timeout=timeout)
def get_diskdrive_for_volume(self, volume):
if not self.is_volume_attached_to_this_instance(volume):
return None
ret_disk = None
for disk in self.diskdrives:
disk.update_ebs_info()
if disk.ebs_volume == volume.id:
ret_disk = disk
if not ret_disk:
ret_disk = self.find_diskdrive_for_volume_by_serial_number(volume, force_check=True)
if not ret_disk:
if hasattr(volume,'md5') and volume.md5:
ret_disk = self.find_diskdrive_for_volume_by_md5(volume, force_check=True)
return ret_disk
def find_diskdrive_for_volume_by_md5(self, volume, md5=None, length=None, force_check=False):
if not force_check and not self.is_volume_attached_to_this_instance(volume):
return None
if not isinstance(volume, EuVolume):
volume = EuVolume.make_euvol_from_vol(volume=volume,tester=self.tester)
md5 = md5 or volume.md5
if not md5:
return None
length = length or volume.md5len
for disk in self.diskdrives:
if disk.cygwin_scsi_drive:
disk_md5 = self.get_dev_md5(disk.cygwin_scsi_drive, length=length)
if disk_md5 == md5:
volume.guestdev = disk.deviceid
volume.md5 = disk_md5
volume.md5len = length
disk.ebs_volume = volume.id
return disk
return None
def find_diskdrive_for_volume_by_serial_number(self, volume, serial_number=None, force_check=False):
'''
Attempt to iterate through all the diskdrives were aware of. If a diskdrive is found with a serial_number
associated with the volume, return that diskdrive obj..
example serial number format: vol-81C13EA4-dev-sdg
:param volume: volume obj to use for deriving the serial_number
:param serial_number: string. Optional. The string representing the serial # to match.
:returns WinInstanceDiskDrive if found, else None
'''
if not force_check and not self.is_volume_attached_to_this_instance(volume):
return None
if not serial_number:
serial_number = volume.id + volume.attach_data.device.replace('/','-')
for disk in self.diskdrives:
if disk.serialnumber == serial_number:
return disk
return None
def is_volume_attached_to_this_instance(self, volume):
'''
Attempts to look up volume state per cloud to confirm the cloud believe the state of this volume is attached
to this instance. This does not verify the guest/hypervisor also belives the volume is attached.
:param volume: volume obj.
:returns boolean
'''
volume.update()
if hasattr(volume, 'attach_data') and volume.attach_data and (volume.attach_data.instance_id == self.id):
self.debug('Volume:' + str(volume.id) + " is attached to this instance: " + str(self.id) + " per cloud perspective")
return True
else:
self.debug('Volume:' + str(volume.id) + " is NOT attached to this instance: " + str(self.id) + " per cloud perspective")
return False
def update_volume_guest_info(self, volume, md5=None, md5len=None, guestdev=None):
self.debug("{0} update_volume_guest_info: {1} {2}"
.format(termline, volume, termline))
if not self.is_volume_attached_to_this_instance(volume):
raise Exception('Volume not attached to this instance')
disk = None
if not self.get_volume_from_attached_list_by_id(volume.id):
self.attached_vols.append(volume)
volume.guestdev = guestdev or volume.guestdev
if md5:
if not md5len:
raise Exception('Must provide md5len if providing the md5')
volume.md5 = md5
volume.md5len = md5len
else:
disk = self.get_diskdrive_for_volume(volume)
if not disk:
raise Exception('Could not find diskdrive for volume when attempting to update volume guest info:' + str(volume))
volume.md5len = md5len or 1024
volume.md5 = self.get_dev_md5(disk.cygwin_scsi_drive, volume.md5len)
if not guestdev:
volume.guestdev = disk.deviceid
disk = disk or self.get_diskdrive_for_volume(volume)
disk.update_ebs_info()
volume.update_volume_attach_info_tags(md5=volume.md5, md5len=volume.md5len, instance_id=self.id, guestdev=volume.guestdev)
return volume
def get_unsynced_volumes(self, check_md5=True):
'''
Description: Returns list of volumes which are:
-in a state the cloud believes the vol is no longer attached
-the attached device has changed, or is not found.
If all euvols are shown as attached to this instance, and the last known local dev is present and/or a local device is found with matching md5 checksum
then the list will return 'None' as all volumes are successfully attached and state is in sync.
By default this method will iterate through all the known euvolumes attached to this euinstance.
A subset can be provided in the list argument 'euvol_list'.
Returns a list of euvolumes for which a corresponding guest device could not be found, or the cloud no longer believes is attached.
:param euvol_list: - optional - euvolume object list. Defaults to all self.attached_vols
:param md5length: - optional - defaults to the length given in each euvolume. Used to calc md5 checksum of devices
:param timerpervolume: -optional - time to wait for device to appear, per volume before failing
:param min_polls: - optional - minimum iterations to check guest devs before failing, despite timeout
:param check_md5: - optional - find devices by md5 comparision. Default is to only perform this check when virtio_blk is in use.
'''
bad_list = []
retdict = self.sync_attached_volumes_with_clouds_view()
bad_list.extend(retdict['badvols'])
return bad_list
def reboot_instance_and_verify(self,
waitconnect=60,
timeout=600,
wait_for_ports=180,
connect=True,
checkvolstatus=False,
pad=5,
uptime_retries=3):
'''
Attempts to reboot an instance and verify it's state post reboot.
waitconnect-optional-integer representing seconds to wait before attempting to connect to instance after reboot
timeout-optional-integer, seconds. If a connection has failed, this timer is used to determine a retry
connect- optional - boolean to indicate whether an ssh session should be established once the expected state has been reached
checkvolstatus - optional -boolean to be used to check volume status post start up
'''
msg=""
newuptime = None
attempt = 0
def get_safe_uptime():
uptime = None
try:
uptime = self.get_uptime()
except: pass
return uptime
self.debug('Attempting to reboot instance:'+str(self.id)+', check attached volume state first')
uptime = self.tester.wait_for_result( get_safe_uptime, None, oper=operator.ne)
elapsed = 0
start = time.time()
if checkvolstatus:
#update the md5sums per volume before reboot
bad_vols=self.get_unsynced_volumes()
if bad_vols != []:
for bv in bad_vols:
self.debug(str(self.id)+'Unsynced volume found:'+str(bv.id))
raise Exception(str(self.id)+"Could not reboot using checkvolstatus flag due to unsync'd volumes")
self.debug('Rebooting now...')
self.reboot()
time.sleep(waitconnect)
try:
self.poll_for_ports_status(ports=[3389,5589], timeout=wait_for_ports)
except:
self.debug('Failed to poll winrm and rdp ports after ' + str(wait_for_ports) + ' seconds, try to connect anyways...')
timeout=timeout - int(time.time()-start)
while (elapsed < timeout):
self.connect_to_instance(timeout=timeout)
#Wait for the system to provide a valid response for uptime, early connections may not
newuptime = self.tester.wait_for_result( get_safe_uptime, None, oper=operator.ne)
elapsed = int(time.time()-start)
#Check to see if new uptime is at least 'pad' less than before, allowing for some pad
if (newuptime - (uptime+elapsed)) > pad:
err_msg = "Instance uptime does not represent a reboot. Orig:"+str(uptime)+\
", New:"+str(newuptime)+", elapsed:"+str(elapsed)+"/"+str(timeout)
if elapsed > timeout:
raise Exception(err_msg)
else:
self.debug(err_msg)
else:
self.debug("Instance uptime indicates a reboot. Orig:"+str(uptime)+\
", New:"+str(newuptime)+", elapsed:"+str(elapsed))
break
if checkvolstatus:
badvols= self.get_unsynced_volumes()
if badvols != []:
for vol in badvols:
msg = msg+"\nVolume:"+vol.id+" Local Dev:"+vol.guestdev
raise Exception("Missing volumes post reboot:"+str(msg)+"\n")
self.debug(self.id+" reboot_instance_and_verify Success")
def get_uptime(self):
if not hasattr(self, 'system_info'):
self.update_system_info()
if hasattr(self.system_info, 'system_boot_time'):
return self._get_uptime_from_system_boot_time()
elif hasattr(self.system_info, 'system_up_time'):
return self._get_uptime_from_system_up_time()
else:
tb = self.tester.get_traceback()
raise Exception(str(tb) + '\nCould not get system boot or up time from system_info')
def _get_uptime_from_system_boot_time(self):
#11/18/2013, 3:15:39 PM
if not hasattr(self, 'system_info'):
self.update_system_info()
splitdate = self.system_info.system_boot_time.split()
datestring = splitdate[0]
timestring = splitdate[1]
ampm = splitdate[2]
month, day, year = datestring.replace(',',"").split('/')
hours, minutes, seconds = timestring.split(':')
if ampm == 'PM':
hours = int(hours) + 12
datetimestring = str(year) + " " + \
str(month) + " " + \
str(day) + " " + \
str(hours) + " " + \
str(minutes) + " " + \
str(seconds)
dt = datetime.strptime(datetimestring, "%Y %m %d %H %M %S")
return int(time.time() - time.mktime(dt.timetuple()))
def _get_uptime_from_system_up_time(self):
#0 Days, 0 Hours, 6 Minutes, 39 Seconds
if not hasattr(self, 'system_info'):
self.update_system_info()
uptime_string = self.system_info.system_up_time
days = 0
hours = 0
minutes = 0
seconds = 0
split = uptime_string.split(',')
for part in split:
time_string = ""
if re.search('Days', part, re.IGNORECASE):
time_string = str(part.split()[0]).strip()
days = int(time_string or 0)
elif re.search('Hours', part, re.IGNORECASE):
time_string = str(part.split()[0]).strip()
hours = int(time_string or 0)
elif re.search('Minutes', part, re.IGNORECASE):
time_string = str(part.split()[0]).strip()
minutes = int(time_string or 0)
elif re.search('Seconds', part, re.IGNORECASE):
time_string = str(part.split()[0]).strip()
seconds = int(time_string or 0)
self.debug("Days:" +str(days)+', Hours:'+ str(hours) + ", Minutes:" + str(minutes) + ", Seconds:" + str(seconds))
uptime = (days * 86400) + (hours * 3600) + (minutes * 60) + seconds
return uptime
def stop_instance_and_verify(self, timeout=200, state='stopped',
failstate='terminated', check_vols=True):
'''
Attempts to stop instance and verify the state has gone to
stopped state
:param timeout; -optional-time to wait on instance to go to state 'state' before failing
:param state: -optional-the expected state to signify success, default is stopped
:param failstate: -optional-a state transition that indicates failure, default is terminated
'''
self.debug(self.id+" Attempting to stop instance...")
start = time.time()
elapsed = 0
self.stop()
while (elapsed < timeout):
time.sleep(2)
self.update()
if self.state == state:
break
if self.state == failstate:
raise Exception(str(self.id) + " instance went to state:" +
str(self.state) + " while stopping")
elapsed = int(time.time()- start)
if elapsed % 10 == 0 :
self.debug(str(self.id) + " wait for stop, in state:" +
str(self.state) + ",time remaining:" +
str(elapsed) + "/" + str(timeout) )
if self.state != state:
raise Exception(self.id + " state: " + str(self.state) +
" expected:" + str(state) +
", after elapsed:" + str(elapsed))
if check_vols:
for volume in self.attached_vols:
volume.update
if volume.status != 'in-use':
raise Exception(str(self.id) + ', Volume ' +
str(volume.id) + ':' + str(volume.status)
+ ' state did not remain in-use '
'during stop')
self.debug(self.id + " stop_instance_and_verify Success")
def start_instance_and_verify(self, timeout=300, state = 'running',
failstates=['terminated'], failfasttime=30,
connect=True, checkvolstatus=True):
'''
Attempts to start instance and verify state, and reconnects ssh session
:param timeout: -optional-time to wait on instance to go to state
'state' before failing
:param state: -optional-the expected state to signify success,
default is running
:param failstate: -optional-a state transition that indicates failure,
default is terminated
:param connect: -optional - boolean to indicate whether an ssh
session should be established once the expected state
has been reached
:param checkvolstatus: -optional -boolean to be used to check volume
status post start up
'''
self.debug(self.id+" Attempting to start instance...")
if checkvolstatus:
for volume in self.attached_vols:
volume.update
if checkvolstatus:
if volume.status != 'in-use':
raise Exception(str(self.id) + ', Volume ' + str(volume.id) + ':' + str(volume.status)
+ ' state did not remain in-use during stop' )
self.debug("\n"+ str(self.id) + ": Printing Instance 'attached_vol' list:\n")
self.tester.show_volumes(self.attached_vols)
msg=""
start = time.time()
elapsed = 0
self.update()
#Add fail fast states...
if self.state == 'stopped':
failstates.extend(['stopped','stopping'])
self.start()
while (elapsed < timeout):
elapsed = int(time.time()- start)
self.update()
self.debug(str(self.id) + " wait for start, in state:" +
str(self.state) + ",time remaining:" + str(elapsed) +
"/"+str(timeout) )
if self.state == state:
break
if elapsed >= failfasttime:
for failstate in failstates:
if self.state == failstate:
raise Exception(str(self.id) +
" instance went to state:" +
str(self.state) + " while starting")
time.sleep(10)
if self.state != state:
raise Exception(self.id + " not in " + str(state) +
" state after elapsed:" + str(elapsed))
else:
self.debug(self.id + " went to state:" + str(state))
if connect:
self.connect_to_instance(timeout=timeout)
if checkvolstatus:
badvols= self.get_unsynced_volumes(check_md5=True)
if badvols != []:
for vol in badvols:
msg = msg + "\nVolume:" + vol.id + " Local Dev:" +\
vol.guestdev
raise Exception("Missing volumes post reboot:" + str(msg) +
"\n")
self.debug(self.id+" start_instance_and_verify Success")
| 1 |
33867677611ceb757f6973eb70368c9f75f3ce92
|
Python
|
# system
import os
import numpy as np
import random
import copy
import time
# ROS
import rospy
import std_msgs.msg
import sensor_msgs.msg
import geometry_msgs.msg
import visualization_msgs.msg
import tf2_ros
import rosbag
import actionlib
from actionlib_msgs.msg import GoalStatus
import ros_numpy
# spartan ROS
import spartan_grasp_msgs.msg
import spartan_grasp_msgs.srv
import pdc_ros_msgs.msg
import fusion_server.msg
import fusion_server.srv
# spartan
import spartan.utils.utils as spartanUtils
import spartan.utils.ros_utils as rosUtils
import spartan.utils.director_utils as director_utils
import spartan.utils.control_utils as control_utils
from spartan.manipulation.schunk_driver import SchunkDriver
import fusion_server
from fusion_server.srv import *
import spartan.manipulation.gripper
from spartan.poser.poser_visualizer import PoserVisualizer
from spartan.manipulation.grasp_data import GraspData
from spartan.manipulation.object_manipulation import ObjectManipulation
from spartan.manipulation.category_manipulation_type import CategoryManipulationType
from spartan.utils.director_ros_visualizer import DirectorROSVisualizer
# director
from director import transformUtils
from director import visualization as vis
import director.objectmodel as om
import director.vtkNumpy as vnp
from director.debugVis import DebugData
import director.vtkAll as vtk
import director.segmentation as segmentation
import director.filterUtils as filterUtils
USING_DIRECTOR = True
if USING_DIRECTOR:
from spartan.utils.taskrunner import TaskRunner
MUG_RACK_CONFIG_FILE = os.path.join(spartanUtils.getSpartanSourceDir(), "src/catkin_projects/station_config/RLG_iiwa_1/manipulation/mug_rack.yaml")
# IF true limits you to this speed
DEBUG_SPEED = 20 # degrees per second
USE_DEBUG_SPEED = False
MANIP_TYPE = CategoryManipulationType.SHOE_ON_RACK
# MANIP_TYPE = CategoryManipulationType.MUG_ON_SHELF_3D
EXPERIMENT_MODE = True
class GraspSupervisorState(object):
STATUS_LIST = ["ABOVE_TABLE", "PRE_GRASP", "GRASP", "IK_FAILED", "NO_GRASP_FOUND", "GRASP_FOUND", "OBJECT_IN_GRIPPER", "GRASP_FAILED", "SAFETY_CHECK_FAILED", "PLANNING_FAILED", "FAILED"]
def __init__(self):
self.setPickFront()
self.clear()
def setPickFront(self):
self.graspingLocation = "front"
self.stowLocation = "left"
def setPickLeft(self):
self.graspingLocation = "left"
self.stowLocation = "front"
@property
def grasp_data(self):
return self._grasp_data
@grasp_data.setter
def grasp_data(self, value):
"""
:param value: GraspData
:return:
"""
self._grasp_data = value
@property
def cache(self):
return self._cache
def clear(self):
"""
Clear any stateful elements of the state
:return:
"""
self._grasp_data = None
self._status = None
self._cache = dict()
self._trajectory_result = None
def clear_cache(self):
"""
Clears only the cache
:return:
"""
self._cache = dict()
def set_status(self, status):
assert status in GraspSupervisorState.STATUS_LIST
self._status = status
@property
def status(self):
return self._status
@status.setter
def status(self, status):
assert status in GraspSupervisorState.STATUS_LIST
self._status = status
def set_status_ik_failed(self):
self.status = "IK_FAILED"
def print_status(self):
"""
Prints the status
:return:
"""
if self._status is None:
print "Current Status: None"
else:
print "Current Status: " + self._status
class GraspSupervisor(object):
def __init__(self, graspingParamsFile=None, cameraSerialNumber="carmine_1", tfBuffer=None):
self.graspingParamsFile = graspingParamsFile
self.reloadParams()
self.cameraSerialNumber = cameraSerialNumber
self.cameraName = 'camera_' + str(cameraSerialNumber)
self.pointCloudTopic = '/' + str(self.cameraName) + '/depth/points'
self.rgbImageTopic = '/' + str(self.cameraName) + '/rgb/image_rect_color'
self.depthImageTopic = '/' + str(self.cameraName) + '/depth_registered/sw_registered/image_rect'
self.camera_info_topic = '/' + str(self.cameraName) + '/rgb/camera_info'
self.graspFrameName = 'base'
self.ggcnn_grasp_frame_camera_axes_id = "ggcnn_grasp"
self.depthOpticalFrameName = self.cameraName + "_depth_optical_frame"
self.rgbOpticalFrameName = self.cameraName + "_rgb_optical_frame"
self.state = GraspSupervisorState()
self.robotService = rosUtils.RobotService.makeKukaRobotService()
self.robotService._use_debug_speed = USE_DEBUG_SPEED
self.robotService._debug_speed = DEBUG_SPEED
self.usingDirector = True
self.tfBuffer = tfBuffer # don't create a new one if it is passed in
self.setupConfig()
self._grasp_point = None # stores the grasp point to be used in grasp3DLocation
self._cache = dict()
self._gripper = spartan.manipulation.gripper.Gripper.make_schunk_gripper()
self._poser_visualizer = PoserVisualizer.make_default()
self.poser_result = None
self._object_manipulation = None
self._category_manip = None # can be assigned later as needed
self._shoe_manipulation_counter = 0
filename = os.path.join(os.path.join(spartanUtils.getSpartanSourceDir(), 'src/catkin_projects/station_config/RLG_iiwa_1/stored_poses.yaml'))
self._stored_poses_director = spartanUtils.getDictFromYamlFilename(filename)
if USING_DIRECTOR:
self.taskRunner = TaskRunner()
self.taskRunner.callOnThread(self.setup)
else:
self.setup()
self.debugMode = False
if self.debugMode:
print "\n\n----------WARNING GRASP SUPERVISOR IN DEBUG MODE----------\n"
# if self.debugMode:
# self.pointCloudListMsg = GraspSupervisor.getDefaultPointCloudListMsg()
def reloadParams(self):
self.graspingParams = spartanUtils.getDictFromYamlFilename(self.graspingParamsFile)
def setup(self):
self.setupSubscribers()
self.setupPublishers()
self.setupTF()
self.setupROSActions()
self.gripperDriver = SchunkDriver()
self.setup_visualization()
def _clear_cache(self):
"""
Clears our local cache of variables
:return:
"""
self._cache = dict()
def setupDirector(self):
self.taskRunner.callOnThread(self.setup)
def setupConfig(self):
self.config = dict()
self.config['base_frame_id'] = "base"
self.config['end_effector_frame_id'] = "iiwa_link_ee"
self.config['pick_up_distance'] = 0.25 # distance to move above the table after grabbing the object
self.config["sleep_time_for_sensor_collect"] = 0.1
self.config['scan'] = dict()
self.config['scan']['pose_list'] = ['scan_left_close', 'scan_above_table', 'scan_right']
self.config['scan']['joint_speed'] = 45
self.config['grasp_speed'] = 20
normal_speed = 30
self.config['speed'] = dict()
self.config['speed']['stow'] = normal_speed
self.config['speed']['pre_grasp'] = normal_speed
self.config['speed']['grasp'] = 10
self.config['home_pose_name'] = 'above_table_pre_grasp'
self.config['grasp_nominal_direction'] = np.array([1, 0, 0]) # x forwards
self.config['grasp_to_ee'] = dict()
self.config["object_interaction"] = dict()
self.config["object_interaction"]["speed"] = 10
self.config["object_interaction"]["rotate_speed"] = 30
self.config["object_interaction"]["pickup_distance"] = 0.15
# self.config["object_interaction"]["drop_distance_above_grasp"] = 0.035 # good for shoes
self.config["object_interaction"]["drop_distance_above_grasp"] = 0.002 # good for mugs
self.config["object_interaction"]["drop_location"] = [0.65, 0, 0.5] # z coordinate is overwritten later
self.graspToIiwaLinkEE = spartanUtils.transformFromPose(
self.graspingParams['gripper_palm_to_ee'])
self.iiwaLinkEEToGraspFrame = self.graspToIiwaLinkEE.GetLinearInverse()
self.gripper_fingertip_to_iiwa_link_ee = spartanUtils.transformFromPose(
self.graspingParams['gripper_fingertip_to_ee'])
self.T_gripper_fingertip__iiwa_link_ee = self.gripper_fingertip_to_iiwa_link_ee.GetLinearInverse()
pos = [-0.15, 0, 0]
quat = [1, 0, 0, 0]
self.preGraspToGraspTransform = transformUtils.transformFromPose(pos, quat)
def setupSubscribers(self):
self.pointCloudSubscriber = rosUtils.SimpleSubscriber(self.pointCloudTopic, sensor_msgs.msg.PointCloud2)
self.rgbImageSubscriber = rosUtils.SimpleSubscriber(self.rgbImageTopic, sensor_msgs.msg.Image)
self.depthImageSubscriber = rosUtils.SimpleSubscriber(self.depthImageTopic, sensor_msgs.msg.Image)
self.camera_info_subscriber = rosUtils.SimpleSubscriber(self.camera_info_topic, sensor_msgs.msg.CameraInfo)
self.pointCloudSubscriber.start()
self.rgbImageSubscriber.start()
self.depthImageSubscriber.start()
self.camera_info_subscriber.start()
self.clicked_point_subscriber = rosUtils.SimpleSubscriber("/clicked_point", geometry_msgs.msg.PointStamped,
self.on_clicked_point)
self.clicked_point_subscriber.start()
self.ggcnn_subscriber = rosUtils.SimpleSubscriber('ggcnn/out/command', std_msgs.msg.Float32MultiArray)
def setupPublishers(self):
"""
Sets up some ROS publishers
"""
self.rviz_marker_publisher = rospy.Publisher("/spartan_grasp/visualization_marker",
visualization_msgs.msg.Marker, queue_size=1)
self.rviz_marker_array_publisher = rospy.Publisher("/grasp_supervisor/visualization_marker_array",
visualization_msgs.msg.MarkerArray, queue_size=1)
self.grasp_pointcloud_publisher = rospy.Publisher("/grasp_supervisor/points", sensor_msgs.msg.PointCloud2,
queue_size=1)
def setup_visualization(self):
self._vis_container = om.getOrCreateContainer("grasp supervisor")
def on_clicked_point(self, clicked_point_msg):
"""
Visualizes the clicked point in rviz
"""
print "received a /clicked_point message . . . visualizing"
pos = clicked_point_msg.point
x, y, z = pos.x, pos.y, pos.z
marker = visualization_msgs.msg.Marker()
marker.header.frame_id = "base"
marker.header.stamp = rospy.Time.now()
marker.ns = "clicked_point"
marker.id = 0
marker.type = visualization_msgs.msg.Marker.SPHERE
marker.action = visualization_msgs.msg.Marker.ADD
marker.pose.position.x = x
marker.pose.position.y = y
marker.pose.position.z = z
marker.pose.orientation.x = 0.0
marker.pose.orientation.y = 0.0
marker.pose.orientation.z = 0.0
marker.pose.orientation.w = 1.0
marker.scale.x = 0.03
marker.scale.y = 0.03
marker.scale.z = 0.03
marker.color.a = 1.0
marker.color.r = 1.0
marker.color.g = 0.0
marker.color.b = 0.0
# hack to get around director funny business
for i in xrange(0, 5):
self.rviz_marker_publisher.publish(marker)
rospy.sleep(0.02)
def get_clicked_point(self):
"""
Returns the stored clicked point. If there is none it raises and error
rtype: geometry_msgs.Point
"""
lastMsg = self.clicked_point_subscriber.lastMsg
if lastMsg is None:
raise ValueError("No /clicked_point messages found.")
return lastMsg.point
def setupROSActions(self):
actionName = '/spartan_grasp/GenerateGraspsFromPointCloudList'
self.generate_grasps_client = actionlib.SimpleActionClient(actionName,
spartan_grasp_msgs.msg.GenerateGraspsFromPointCloudListAction)
actionName = '/spartan_grasp/Grasp3DLocation'
self.grasp_3D_location_client = actionlib.SimpleActionClient(actionName,
spartan_grasp_msgs.msg.Grasp3DLocationAction)
findBestBatchActionName = '/FindBestMatch'
self.find_best_match_client = actionlib.SimpleActionClient(findBestBatchActionName,
pdc_ros_msgs.msg.FindBestMatchAction)
poser_action_name = '/Poser'
self.poser_client = actionlib.SimpleActionClient(poser_action_name,
pdc_ros_msgs.msg.DeformableRegistrationAction)
category_manipulation_name = "/CategoryManipulation"
self.category_manip_client = actionlib.SimpleActionClient(category_manipulation_name, pdc_ros_msgs.msg.CategoryManipulationAction)
action_name = "/KeypointDetection"
self.keypoint_detection_client = actionlib.SimpleActionClient(action_name, pdc_ros_msgs.msg.KeypointDetectionAction)
action_name = "/PoseEstimation"
self.pose_estimation_client = actionlib.SimpleActionClient(action_name,
pdc_ros_msgs.msg.EstimatePoseAction)
action_name = "/SaveRGBD"
self.save_RGBD_client = actionlib.SimpleActionClient(action_name,
pdc_ros_msgs.msg.KeypointDetectionAction)
def setupTF(self):
if self.tfBuffer is None:
self.tfBuffer = tf2_ros.Buffer()
self.tfListener = tf2_ros.TransformListener(self.tfBuffer)
self.tfBroadcaster = tf2_ros.TransformBroadcaster()
def getDepthOpticalFrameToWorldTransform(self):
depth_optical_frame_to_world = self.tfBuffer.lookup_transform("base", self.depthOpticalFrameName,
rospy.Time(0))
return depth_optical_frame_to_world
def get_transform(self, from_name, to_name, ros_time=None):
if ros_time is None:
ros_time = rospy.Time(0)
transform_stamped_msg = self.tfBuffer.lookup_transform(to_name, from_name, ros_time)
# convert to vtkTransform
pos, quat = rosUtils.poseFromROSTransformMsg(transform_stamped_msg.transform)
return pos, quat
def getRgbOpticalFrameToWorldTransform(self, time=None):
"""
:param time:
:type time:
:return: geometry_msgs/TransformStamped
:rtype:
"""
if time is None:
time = rospy.Time(0)
rgb_optical_frame_to_world = self.tfBuffer.lookup_transform("base", self.rgbOpticalFrameName,
time)
return rgb_optical_frame_to_world
def capturePointCloudAndCameraTransform(self, cameraOrigin=[0, 0, 0]):
"""
Captures the current PointCloud2 from the sensor. Also records the pose of camera frame.
"""
# sleep to transforms can update
msg = spartan_grasp_msgs.msg.PointCloudWithTransform()
msg.header.stamp = rospy.Time.now()
msg.camera_origin.x = cameraOrigin[0]
msg.camera_origin.y = cameraOrigin[1]
msg.camera_origin.z = cameraOrigin[2]
msg.point_cloud_to_base_transform = self.getDepthOpticalFrameToWorldTransform()
msg.point_cloud = self.pointCloudSubscriber.waitForNextMessage()
self.testData = msg # for debugging
return msg
def captureRgbdAndCameraTransform(self, cameraOrigin=[0, 0, 0]):
# sleep to transforms can update
msg = pdc_ros_msgs.msg.RGBDWithPose()
msg.header.stamp = rospy.Time.now()
msg.camera_pose = self.getRgbOpticalFrameToWorldTransform()
msg.rgb_image = self.rgbImageSubscriber.waitForNextMessage()
msg.depth_image = self.depthImageSubscriber.waitForNextMessage()
# maybe be careful about rostime here
msg.point_cloud = self.pointCloudSubscriber.waitForNextMessage()
msg.point_cloud_pose = self.getDepthOpticalFrameToWorldTransform()
return msg
def moveHome(self, speed=None):
rospy.loginfo("moving home")
if speed is None:
speed = self.graspingParams['speed']['nominal']
homePose = self.graspingParams[self.state.graspingLocation]['poses']['scan_above_table']
self.robotService.moveToJointPosition(homePose,
maxJointDegreesPerSecond=speed)
def getStowPose(self):
stow_location = self.state.stowLocation
params = self.graspingParams[stow_location]
return params['poses']['stow']
# scans to several positions
def collectSensorData(self, saveToBagFile=False, **kwargs):
"""
Collects PointCloud Messages, also RGB and Depth images.
Writes the result to two class variables
- self.pointCloudListMsg
- self.listOfRgbdWithPose
also returns these two values
"""
self.moveHome()
rospy.loginfo("collecting sensor data")
graspLocationData = self.graspingParams[self.state.graspingLocation]
pointCloudListMsg = spartan_grasp_msgs.msg.PointCloudList()
pointCloudListMsg.header.stamp = rospy.Time.now()
data = dict()
pose_list = graspLocationData['scan_pose_list']
listOfRgbdWithPoseMsg = []
for poseName in pose_list:
rospy.loginfo("moving to pose = " + poseName)
joint_positions = graspLocationData['poses'][poseName]
self.robotService.moveToJointPosition(joint_positions,
maxJointDegreesPerSecond=self.config['scan']['joint_speed'])
rospy.sleep(self.config["sleep_time_for_sensor_collect"])
pointCloudWithTransformMsg = self.capturePointCloudAndCameraTransform()
pointCloudListMsg.point_cloud_list.append(pointCloudWithTransformMsg)
data[poseName] = pointCloudWithTransformMsg
rgbdWithPoseMsg = self.captureRgbdAndCameraTransform()
listOfRgbdWithPoseMsg.append(rgbdWithPoseMsg)
self.sensorData = data
self.pointCloudListMsg = pointCloudListMsg
self.listOfRgbdWithPoseMsg = listOfRgbdWithPoseMsg
if saveToBagFile:
self.saveSensorDataToBagFile(pointCloudListMsg=pointCloudListMsg, **kwargs)
return pointCloudListMsg, listOfRgbdWithPoseMsg
def findBestBatch(self):
"""
This function will:
- collect a small handful of RGBDWithPose msgs
- call the FindBestMatch service (a service of pdc-ros)
- return what was found from FindBestMatch
"""
self.moveHome()
_, listOfRgbdWithPoseMsg = self.collectSensorData()
self.list_rgbd_with_pose_msg = listOfRgbdWithPoseMsg
# request via a ROS Action
rospy.loginfo("waiting for find best match server")
self.find_best_match_client.wait_for_server()
goal = pdc_ros_msgs.msg.FindBestMatchGoal()
goal.rgbd_with_pose_list = listOfRgbdWithPoseMsg
goal.camera_info = self.camera_info_subscriber.waitForNextMessage()
rospy.loginfo("requesting best match from server")
self.find_best_match_client.send_goal(goal)
self.moveHome()
rospy.loginfo("waiting for find best match result")
self.find_best_match_client.wait_for_result()
result = self.find_best_match_client.get_result()
rospy.loginfo("received best match result")
self.best_match_result = result
if result.match_found:
print "match found"
print "location:", result.best_match_location
else:
print "NO MATCH FOUND"
return result
def run_poser(self):
"""
This function will:
- collect a small handful of RGBDWithPose msgs
- call the FindBestMatch service (a service of pdc-ros)
- return what was found from FindBestMatch
"""
# self.moveHome()
rgbdWithPoseMsg = self.captureRgbdAndCameraTransform()
listOfRgbdWithPoseMsg = [rgbdWithPoseMsg]
self.list_rgbd_with_pose_msg = listOfRgbdWithPoseMsg
# request via a ROS Action
rospy.loginfo("waiting for poser server")
self.poser_client.wait_for_server()
rospy.loginfo("connected to poser server")
goal = pdc_ros_msgs.msg.DeformableRegistrationGoal()
goal.rgbd_with_pose_list = listOfRgbdWithPoseMsg
goal.camera_info = self.camera_info_subscriber.waitForNextMessage()
rospy.loginfo("requesting registration from poser")
self.poser_client.send_goal(goal)
self.moveHome()
rospy.loginfo("waiting for poser result")
self.poser_client.wait_for_result()
result = self.poser_client.get_result()
state = self.poser_client.get_state()
rospy.loginfo("received poser result")
print("result:\n", result)
succeeded = (state == GoalStatus.SUCCEEDED)
if not succeeded:
rospy.loginfo("Poser failed")
self.poser_result = result
self._cache['poser_result'] = result
result_dict = dict()
result_dict['result'] = result
result_dict['output_dir'] = result.output_dir
result_dict['state'] = state
result_dict['succeeded'] = succeeded
result_dict['type'] = "mankey"
self._cache["keypoint_detection_result"] = result_dict
self.taskRunner.callOnMain(self.visualize_poser_result)
def run_keypoint_detection(self, wait_for_result=True, move_to_stored_pose=True, clear_state=True):
"""
Runs keypoint detection using ManKey in pdc-ros. Note that this clears the cache
:return:
:rtype:
"""
if clear_state:
self._clear_cache()
self.state.clear()
if move_to_stored_pose:
CMT = CategoryManipulationType
q = self._stored_poses_director["General"]["home"] # for mugs
if MANIP_TYPE in [CMT.SHOE_ON_RACK, CMT.SHOE_ON_TABLE]:
q = self._stored_poses_director['General']['center_back']
else: # basically all mugs
q = self._stored_poses_director["General"]["home"]
self.robotService.moveToJointPosition(q,
maxJointDegreesPerSecond=self.graspingParams['speed']['fast'])
rgbdWithPoseMsg = self.captureRgbdAndCameraTransform()
self.state.cache['rgbd_with_pose_list'] = []
self.state.cache['rgbd_with_pose_list'].append(rgbdWithPoseMsg)
# request via a ROS Action
rospy.loginfo("waiting for KeypointDetection server")
self.keypoint_detection_client.wait_for_server()
rospy.loginfo("connected to KeypointDetection server")
goal = pdc_ros_msgs.msg.KeypointDetectionGoal()
goal.rgbd_with_pose_list = self.state.cache['rgbd_with_pose_list']
goal.camera_info = self.camera_info_subscriber.waitForNextMessage()
if EXPERIMENT_MODE:
goal.output_dir = "mankey_experiments/%s" %(spartanUtils.get_current_YYYY_MM_DD_hh_mm_ss())
rospy.loginfo("requesting action from KeypointDetection server")
self.keypoint_detection_client.send_goal(goal)
self.state.set_status("ABOVE_TABLE")
if wait_for_result:
self.wait_for_keypoint_detection_result()
def wait_for_keypoint_detection_result(self):
"""
Wait for keypont detection result, save it to cache
"""
rospy.loginfo("waiting for KeypointDetection result")
self.keypoint_detection_client.wait_for_result()
result = self.keypoint_detection_client.get_result()
state = self.keypoint_detection_client.get_state()
rospy.loginfo("received KeypointDetection result")
print "result:\n", result
self.keypoint_detection_result = result
succeeded = (state == GoalStatus.SUCCEEDED)
if not succeeded:
rospy.loginfo("KeypointDetection failed")
result_dict = dict()
result_dict['result'] = result
result_dict['output_dir'] = result.output_dir
result_dict['state'] = state
result_dict['succeeded'] = succeeded
result_dict['type'] = "mankey"
self._cache["keypoint_detection_result"] = result_dict
self.state._cache["keypoint_detection_result"] = result_dict
return result_dict
def check_keypoint_detection_succeeded(self):
"""
Checks whether keypoint detection succeeded or not
:return:
:rtype:
"""
# you should have run keypoint detection before this
keypoint_detection_result = self.state.cache['keypoint_detection_result']
if keypoint_detection_result["state"] == GoalStatus.SUCCEEDED:
return True
else:
print("keypoint detection failed, ABORTING")
return False
def check_category_goal_estimation_succeeded(self):
"""
Returns a bool as to whether category goal estimation succeeded or not
:return:
:rtype:
"""
state = self.state.cache['category_manipulation_goal']['state']
if state == GoalStatus.SUCCEEDED:
return True
else:
print("category goal estimation failed, ABORTING")
return False
def estimate_mug_rack_pose(self):
"""
:return:
:rtype:
"""
# fusion_params_file = os.path.join(spartanUtils.getSpartanSourceDir(), "src/catkin_projects/station_config/RLG_iiwa_1/fusion/fusion_params.yaml")
#
#
# fusion_params = spartanUtils.getDictFromYamlFilename(fusion_params_file)
# bbox_min = np.array(fusion_params['left']['bbox_min'])
# bbox_min[2] += 0.05 # be conservative on where bottom of table is
# bbox_max = np.array(fusion_params['left']['bbox_max'])
bbox_min = np.array([0.07001, 0.49, 0.01026])
bbox_max = np.array([0.47195, 0.85201, 0.75])
rgbd_with_pose_list = []
# move to pose 1, capture RGBD
q = self._stored_poses_director["left_table"]["look_at_rack"]
speed = self.graspingParams["speed"]["fast"]
self.robotService.moveToJointPosition(q, maxJointDegreesPerSecond=speed)
rgbd_with_pose = self.captureRgbdAndCameraTransform()
rgbd_with_pose_list.append(rgbd_with_pose)
# move to pose 2, capture RGBD
q = self._stored_poses_director["left_table"]["look_at_rack_2"]
speed = self.graspingParams["speed"]["fast"]
self.robotService.moveToJointPosition(q, maxJointDegreesPerSecond=speed)
rgbd_with_pose = self.captureRgbdAndCameraTransform()
rgbd_with_pose_list.append(rgbd_with_pose)
# convert to VTK poly data and crop
d = DebugData()
for msg in rgbd_with_pose_list:
pointcloud_numpy = DirectorROSVisualizer.numpy_from_pointcloud2_msg(msg.point_cloud)
pointcloud_vtk = vnp.getVtkPolyDataFromNumpyPoints(pointcloud_numpy)
T_world_pointcloud = ros_numpy.numpify(msg.point_cloud_pose.transform)
T_world_pointcloud_vtk = transformUtils.getTransformFromNumpy(T_world_pointcloud)
pointcloud_vtk = filterUtils.transformPolyData(pointcloud_vtk, T_world_pointcloud_vtk)
d.addPolyData(pointcloud_vtk)
pointcloud = d.getPolyData()
print "pointcloud.GetNumberOfPoints()", pointcloud.GetNumberOfPoints()
# crop
transform = vtk.vtkTransform()
bounds = np.zeros([2,3])
bounds[0,:] = bbox_min
bounds[1,:] = bbox_max
print "bounds", bounds
cropped_pointcloud = segmentation.cropToBounds(pointcloud, transform, bounds)
print "cropped_pointcloud.GetNumberOfPoints()", cropped_pointcloud.GetNumberOfPoints()
# visualize it
def vis_function():
print "visualizing pointcloud"
vis.showPolyData(pointcloud, "pointcloud")
vis.showPolyData(cropped_pointcloud, "Mug rack pointcloud")
self.mug_rack_pointcloud = cropped_pointcloud
# not working for some reason
print "visualizing"
self.taskRunner.callOnMain(vis_function)
return
rgbd_with_pose = pdc_ros_msgs.msg.RGBDWithPose()
# N x 3
cropped_pointcloud_numpy = vnp.getNumpyFromVtk(cropped_pointcloud)
print "cropped_pointcloud_numpy.shape", cropped_pointcloud_numpy.shape
# save numpy to file
save_file = "/home/manuelli/sandbox/spartan/pointcloud.npy"
np.save(save_file, cropped_pointcloud_numpy)
return
# it's already in world frame
rgbd_with_pose.point_cloud = DirectorROSVisualizer.pointcloud2_msg_from_numpy(cropped_pointcloud_numpy)
# convert it back to ROS msg
goal = pdc_ros_msgs.msg.EstimatePoseGoal()
goal.rgbd_with_pose_list.append(rgbd_with_pose)
T_world_rack_vtk = self._category_manip.mug_rack_vis_obj.getChildFrame().transform
T_world_rack = transformUtils.getNumpyFromTransform(T_world_rack_vtk)
goal.T_init = ros_numpy.msgify(geometry_msgs.Pose, T_world_rack)
# send out service call
self.pose_estimation_client.wait_for_server()
self.pose_estimation_client.send_goal(goal)
# wait for result
self.pose_estimation_client.wait_for_result()
result = self.pose_estimation_client.get_result()
T_world_rack_estimated = ros_numpy.numpify(result.T_world_model)
T_world_rack_estimated_vtk = transformUtils.getTransformFromNumpy(T_world_rack_estimated)
self._category_manip.mug_rack_vis_obj.getChildFrame().copyFrame(T_world_rack_estimated_vtk)
def run_category_manipulation_goal_estimation(self, wait_for_result=True, capture_rgbd=True):
"""
Calls the CategoryManipulation service of pdc-ros
which is provided by category_manip_server.py.
Uses the keypoint detection result from either
`run_poser` or `run_keypoint_detection`
:return: bool
:rtype:
"""
if not self.check_keypoint_detection_succeeded():
return False
keypoint_detection_result = self.state.cache['keypoint_detection_result']
# don't specify poser output dir for now
goal = pdc_ros_msgs.msg.CategoryManipulationGoal()
goal.output_dir = keypoint_detection_result['output_dir']
goal.keypoint_detection_type = keypoint_detection_result['type']
if capture_rgbd:
self.moveHome()
rgbd_with_pose = self.captureRgbdAndCameraTransform()
self.state.cache['rgbd_with_pose_list'].append(rgbd_with_pose)
goal.rgbd_with_pose_list = self.state.cache['rgbd_with_pose_list']
if 'rgbd_with_pose_list' in self.state.cache:
goal.rgbd_with_pose_list = self.state.cache['rgbd_with_pose_list']
if MANIP_TYPE == CategoryManipulationType.SHOE_ON_RACK:
print("applying T_adjust")
print("self._shoe_manipulation_counter", self._shoe_manipulation_counter)
goal.apply_T_adjust = True
pos = np.array([self.graspingParams["shoe_offset"], 0, 0]) * self._shoe_manipulation_counter
quat = [1,0,0,0]
T_adjust_vtk = transformUtils.transformFromPose(pos, quat)
T_adjust = transformUtils.getNumpyFromTransform(T_adjust_vtk)
goal.T_adjust = ros_numpy.msgify(geometry_msgs.msg.Pose, T_adjust)
else:
goal.apply_T_adjust =False
rospy.loginfo("waiting for CategoryManip server")
self.category_manip_client.wait_for_server()
rospy.loginfo("connected to CategoryManip server")
self.category_manip_client.send_goal(goal)
if wait_for_result:
self.wait_for_category_manipulation_goal_result()
return True
def wait_for_category_manipulation_goal_result(self):
"""
Waits for category manipulation goal result
"""
print("waiting for category manipulation result")
self.category_manip_client.wait_for_result()
result = self.category_manip_client.get_result()
state = self.category_manip_client.get_state()
T_goal_obs = ros_numpy.numpify(result.T_goal_obs)
print "T_goal_obs:\n", T_goal_obs
T_goal_obs_vtk = transformUtils.getTransformFromNumpy(T_goal_obs)
print transformUtils.poseFromTransform(T_goal_obs_vtk)
self.state.cache['category_manipulation_goal'] = dict()
self.state.cache['category_manipulation_goal']['result'] = result
self.state.cache['category_manipulation_goal']["T_goal_obs"] = T_goal_obs_vtk
self.state.cache['category_manipulation_goal']['state'] = state
self.state.cache['category_manipulation_goal']["type"] = CategoryManipulationType.from_string(result.category_manipulation_type)
def run_mug_shelf_3D_pipeline(self):
"""
Runs entire pipeline for mug shelf 3D
:return:
:rtype:
"""
self.state.clear()
self._clear_cache()
# move home
speed = self.graspingParams['speed']['fast']
super_fast_speed = self.graspingParams['speed']['fast']
# q = self._stored_poses_director["General"]["home"]
# q = self._stored_poses_director["mug"]["image_capture_for_mug_shelf"]
q = self._stored_poses_director["General"]["center_back"]
self.robotService.moveToJointPosition(q,
maxJointDegreesPerSecond=super_fast_speed)
self.run_keypoint_detection(wait_for_result=False, move_to_stored_pose=False, clear_state=False)
# run keypoint detection
# move to center back to capture another RGBD image
q = self._stored_poses_director["General"]["home"]
self.robotService.moveToJointPosition(q,
maxJointDegreesPerSecond=super_fast_speed)
rgbd_with_pose = self.captureRgbdAndCameraTransform()
self.state.cache['rgbd_with_pose_list'].append(rgbd_with_pose)
self.wait_for_keypoint_detection_result()
if not self.check_keypoint_detection_succeeded():
self.state.set_status("FAILED")
return False
# run category manip
code = self.run_category_manipulation_goal_estimation(capture_rgbd=False)
if not code:
self.state.set_status("FAILED")
return False
self.wait_for_category_manipulation_goal_result()
if not self.check_category_goal_estimation_succeeded():
self.state.set_status("PLANNING_FAILED")
return False
# run the manipulation
# need safety checks in there before running autonomously
code = self.run_mug_shelf_manipulation()
if not (code == True):
self.state.set_status("FAILED")
return False
# if the place was successful then retract
self.retract_from_mug_shelf()
if EXPERIMENT_MODE:
output_dir = self.state.cache['keypoint_detection_result']['output_dir']
print "\n\n", os.path.split(output_dir)[1]
def run_mug_on_rack_pipeline(self, side_view=False):
"""
Runs entire pipeline for mug shelf 3D
:return:
:rtype:
"""
self.state.clear()
self._clear_cache()
# move home
speed = self.graspingParams['speed']['fast']
q = self._stored_poses_director["General"]["home"]
if side_view:
print "\nusing side view\n"
q = self._stored_poses_director["General"]["center_back"]
self.robotService.moveToJointPosition(q,
maxJointDegreesPerSecond=speed)
# run keypoint detection
self.run_keypoint_detection(wait_for_result=False, move_to_stored_pose=False, clear_state=False)
self.wait_for_keypoint_detection_result()
# move to center back to capture another RGBD image
q = self._stored_poses_director["General"]["center_back"]
if side_view:
q = self._stored_poses_director["General"]["home"]
self.robotService.moveToJointPosition(q,
maxJointDegreesPerSecond=speed)
rgbd_with_pose = self.captureRgbdAndCameraTransform()
self.state.cache['rgbd_with_pose_list'].append(rgbd_with_pose)
q = self._stored_poses_director["General"]["home"]
self.robotService.moveToJointPosition(q,
maxJointDegreesPerSecond=speed)
if not self.check_keypoint_detection_succeeded():
self.state.set_status("FAILED")
return False
# run category manip
code = self.run_category_manipulation_goal_estimation(capture_rgbd=False)
if not code:
self.state.set_status("FAILED")
return False
self.wait_for_category_manipulation_goal_result()
if not self.check_category_goal_estimation_succeeded():
self.state.set_status("PLANNING_FAILED")
return False
# run the manipulation
# need safety checks in there before running autonomously
code = self.run_mug_on_rack_manipulation()
if not (code == True):
self.state.set_status("FAILED")
return False
if EXPERIMENT_MODE:
output_dir = self.state.cache['keypoint_detection_result']['output_dir']
print "\n\n", os.path.split(output_dir)[1]
def run_shoe_on_rack_pipeline(self):
"""
Runs entire pipeline for mug shelf 3D
:return:
:rtype:
"""
if EXPERIMENT_MODE:
self._shoe_manipulation_counter = 0 # for testing
self.state.clear()
self._clear_cache()
# move home
speed = self.graspingParams['speed']['fast']
# q = self._stored_poses_director["General"]["center_back"]
q = self._stored_poses_director["General"]["home"]
self.robotService.moveToJointPosition(q,
maxJointDegreesPerSecond=speed)
# run keypoint detection
self.run_keypoint_detection(wait_for_result=False, move_to_stored_pose=False, clear_state=False)
self.wait_for_keypoint_detection_result()
if not self.check_keypoint_detection_succeeded():
self.state.set_status("FAILED")
return False
# run category manip
code = self.run_category_manipulation_goal_estimation(capture_rgbd=False)
if not code:
self.state.set_status("FAILED")
return False
self.wait_for_category_manipulation_goal_result()
if not self.check_category_goal_estimation_succeeded():
self.state.set_status("PLANNING_FAILED")
return False
# run the manipulation
# need safety checks in there before running autonomously
code = self.run_shoe_rack_manipulation()
if not code:
self.state.set_status("FAILED")
return False
# if the place was successful then retract
self.retract_from_shoe_rack()
if EXPERIMENT_MODE:
print "\n\n", self.state.cache['keypoint_detection_result']['output_dir']
def run_manipulate_object(self, debug=False):
"""
Runs the object manipulation code. Will put the object into the
specified target pose from `run_category_manipulation_goal_estimation`
:return:
"""
# self.taskRunner.callOnMain(self._poser_visualizer.visualize_result)
if not self.check_category_goal_estimation_succeeded():
return False
if debug:
self._object_manipulation = ObjectManipulation()
self._object_manipulation.assign_defaults()
self._object_manipulation.compute_transforms()
return
self.moveHome()
grasp_found, grasp_data = self.request_spartan_grasp(clear_state=False)
if not grasp_found:
print "no grasp found, returning\n"
return False
# execute the grasp
object_in_gripper = self.execute_grasp(self.state.grasp_data, close_gripper=True, use_cartesian_plan=True)
print "object_in_gripper:", object_in_gripper
T_goal_obs = self.state.cache['category_manipulation_T_goal_obs']
T_W_G = self.state.cache['gripper_frame_at_grasp']
self._object_manipulation = ObjectManipulation(T_goal_object=T_goal_obs, T_W_G=T_W_G)
self._object_manipulation.grasp_data = self.state.grasp_data
self._object_manipulation.compute_transforms()
self.taskRunner.callOnMain(self._object_manipulation.visualize)
pre_grasp_pose = self.state.cache['pre_grasp_ik_response'].joint_state.position
pickup_speed = self.graspingParams['speed']['pickup']
if not object_in_gripper:
# open the gripper and back away
self.gripperDriver.send_open_gripper_set_distance_from_current()
self.robotService.moveToJointPosition(pre_grasp_pose,
maxJointDegreesPerSecond=
pickup_speed)
return False
# pickup the object
self.robotService.moveToJointPosition(pre_grasp_pose,
maxJointDegreesPerSecond=
pickup_speed)
# place the object
grasp_data_place = self._object_manipulation.get_place_grasp_data()
self.execute_place(grasp_data_place)
# open the gripper and back away
pre_grasp_pose = self.state.cache['pre_grasp_ik_response'].joint_state.position
pickup_speed = self.graspingParams['speed']['pickup']
self.gripperDriver.send_open_gripper_set_distance_from_current()
# pickup the object
self.robotService.moveToJointPosition(pre_grasp_pose,
maxJointDegreesPerSecond=
pickup_speed)
# move home
self.moveHome()
def run_shoe_rack_manipulation(self, debug=False, push_in_distance=0.00):
"""
Runs the object manipulation code. Will put the object into the
specified target pose from `run_category_manipulation_goal_estimation`
:return:
"""
print("\n\n--- Running Shoe Manipulation-------\n\n")
# self.taskRunner.callOnMain(self._poser_visualizer.visualize_result)
if not self.check_category_goal_estimation_succeeded():
return False
# check that we really are doing mug
category_manipulation_type = self.state.cache['category_manipulation_goal']['type']
assert category_manipulation_type == CategoryManipulationType.SHOE_ON_RACK
speed = self.graspingParams['speed']['fast']
self.moveHome(speed=speed)
result = self.state.cache['category_manipulation_goal']['result']
T_W_fingertip = ros_numpy.numpify(result.T_world_gripper_fingertip)
T_W_fingertip_vtk = transformUtils.getTransformFromNumpy(T_W_fingertip)
grasp_data = GraspData.from_gripper_fingertip_frame(T_W_fingertip)
grasp_data.gripper.params["hand_inner_diameter"] = result.gripper_width
grasp_data.gripper.params["hand_inner_diameter"] = 0.07
self.state.grasp_data = grasp_data
# rotate the grasp to align with nominal
params = self.getParamsForCurrentLocation()
grasp_z_axis_nominal = np.array(params['grasp']['grasp_nominal_direction'])
grasp_data.rotate_grasp_frame_to_nominal(grasp_z_axis_nominal)
def vis_function():
vis.updateFrame(T_W_fingertip_vtk, "gripper fingertip frame", scale=0.15, parent=self._vis_container)
vis.updateFrame(grasp_data.grasp_frame, "grasp frame", scale=0.15, parent=self._vis_container)
self.visualize_grasp(grasp_data)
self.taskRunner.callOnMain(vis_function)
# execute the grasp
force_threshold_magnitude = 30
object_in_gripper = self.execute_grasp(grasp_data, close_gripper=True, use_cartesian_plan=True, force_threshold_magnitude=force_threshold_magnitude, push_in_distance=0.04, ee_speed_m_s=0.1)
if not object_in_gripper:
print("grasp failed, returning")
return False
print "object_in_gripper:", object_in_gripper
T_goal_obs = self.state.cache['category_manipulation_goal']["T_goal_obs"]
T_W_G = self.state.cache['gripper_frame_at_grasp']
pre_grasp_pose = self.state.cache['pre_grasp_ik_response'].joint_state.position
pickup_speed = self.graspingParams['speed']['pickup']
if not object_in_gripper:
# open the gripper and back away
self.gripperDriver.send_open_gripper_set_distance_from_current()
self.robotService.moveToJointPosition(pre_grasp_pose,
maxJointDegreesPerSecond=
pickup_speed)
return False
# pickup the object
self.robotService.moveToJointPosition(pre_grasp_pose,
maxJointDegreesPerSecond=
pickup_speed)
# move home
self.moveHome()
# move to approach pose
speed = self.graspingParams['speed']['fast']
q_approach = np.array(self._stored_poses_director["left_table"]["shoe_approach"])
self.robotService.moveToJointPosition(q_approach, maxJointDegreesPerSecond=speed)
# compute some poses
T_goal_obs = ros_numpy.numpify(result.T_goal_obs) # 4 x 4 numpy matrix
T_goal_obs_vtk = transformUtils.getTransformFromNumpy(T_goal_obs)
object_manip = ObjectManipulation(T_goal_object=T_goal_obs_vtk, T_W_G=T_W_G)
object_manip.compute_transforms()
T_W_Gn_vtk = object_manip.T_W_Gn # gripper to world for place pose
T_pre_goal_obs = ros_numpy.numpify(result.T_pre_goal_obs)
T_pre_goal_obs_vtk = transformUtils.getTransformFromNumpy(T_pre_goal_obs)
object_manip_approach = ObjectManipulation(T_goal_object=T_pre_goal_obs_vtk, T_W_G=T_W_G)
object_manip_approach.compute_transforms()
T_W_Gn_approach_vtk = object_manip_approach.T_W_Gn
# move this down by push_in_distance
pos, quat = transformUtils.poseFromTransform(T_W_Gn_approach_vtk)
T_W_Gn_approach_vtk = transformUtils.transformFromPose(pos, quat)
# now convert these to ee poses for running IK
pos, quat = transformUtils.poseFromTransform(T_W_Gn_vtk)
pos[2] -= push_in_distance
T_W_Gn_vtk = transformUtils.transformFromPose(pos, quat)
T_W_ee_vtk = self.getIiwaLinkEEFrameFromGraspFrame(T_W_Gn_vtk)
T_W_ee = transformUtils.getNumpyFromTransform(T_W_ee_vtk)
T_W_ee_approach_vtk = self.getIiwaLinkEEFrameFromGraspFrame(T_W_Gn_approach_vtk)
T_W_ee_approach = transformUtils.getNumpyFromTransform(T_W_ee_approach_vtk)
# place the object
force_threshold_magnitude = 50 # shoes are heavy
q_nom = np.array(self._stored_poses_director["Grasping"]["above_table_pre_grasp"])
q_nom = np.array(self._stored_poses_director["left_table"]["above_table_pre_grasp"])
code =self.execute_place_new(T_W_ee, T_W_ee_approach, q_nom=q_nom, use_cartesian_plan=True, force_threshold_magnitude=force_threshold_magnitude)
print("\n\n--- Finished Shoe Manipulation-------\n\n")
self._shoe_manipulation_counter += 1
return code
def retract_from_shoe_rack(self):
"""
Retract from the shoe rack
:return:
:rtype:
"""
# open the gripper and back away
self.gripperDriver.send_open_gripper_set_distance_from_current(distance=0.045)
# back away along gripper x-direction
ee_speed_m_s = 0.05
xyz_goal = [-0.15, 0, 0] # 10 cm
duration = np.linalg.norm(xyz_goal) / ee_speed_m_s
ee_frame_id = "iiwa_link_ee"
base_frame_id = "base"
expressed_in_frame = ee_frame_id
cartesian_traj_goal = \
control_utils.make_cartesian_trajectory_goal(xyz_goal,
ee_frame_id,
expressed_in_frame,
duration=duration,
speed=0.1)
action_client = self.robotService.cartesian_trajectory_action_client
action_client.send_goal(cartesian_traj_goal)
# wait for result
action_client.wait_for_result()
result = action_client.get_result()
self.state.cache['cartesian_traj_result'] = result
speed = self.graspingParams['speed']['fast']
if EXPERIMENT_MODE:
# move to pose
q = self._stored_poses_director["left_table"]["shoe_evaluation_side"]
self.robotService.moveToJointPosition(q, maxJointDegreesPerSecond=speed)
msg = self.captureRgbdAndCameraTransform()
save_dir = os.path.join(spartanUtils.get_sandbox_dir(), self.state.cache['keypoint_detection_result']['output_dir'], "evaluation")
self.save_RGBD_client.wait_for_server()
goal = pdc_ros_msgs.msg.KeypointDetectionGoal()
goal.rgbd_with_pose_list.append(msg)
goal.camera_info = self.camera_info_subscriber.waitForNextMessage()
goal.output_dir = save_dir
self.save_RGBD_client.send_goal(goal)
self.save_RGBD_client.wait_for_result()
self.moveHome(speed=speed)
def run_mug_on_rack_manipulation(self):
"""
Runs the object manipulation code. Will put the object into the
specified target pose from `run_category_manipulation_goal_estimation`
:return:
"""
self.wait_for_category_manipulation_goal_result()
if not self.check_category_goal_estimation_succeeded():
return False
category_manipulation_type = self.state.cache['category_manipulation_goal']['type']
assert category_manipulation_type == CategoryManipulationType.MUG_ON_RACK
self.moveHome()
# extract grasp from gripper fingertip pose
result = self.state.cache["category_manipulation_goal"]["result"]
T_W_fingertip = ros_numpy.numpify(result.T_world_gripper_fingertip)
T_W_fingertip_vtk = transformUtils.getTransformFromNumpy(T_W_fingertip)
grasp_data = GraspData.from_gripper_fingertip_frame(T_W_fingertip)
grasp_data.gripper.params["hand_inner_diameter"] = 0.05 # 4
| 0 |
33867677611ceb757f6973eb70368c9f75f3ce92
|
Python
|
cm wide
self.state.grasp_data = grasp_data
self.visualize_grasp(grasp_data)
debug_speed = 10
def vis_function():
vis.updateFrame(T_W_fingertip_vtk, "gripper fingertip frame", scale=0.15, parent=self._vis_container)
vis.updateFrame(grasp_data.grasp_frame, "grasp frame", scale=0.15, parent=self._vis_container)
self.taskRunner.callOnThread(vis_function)
# debugging
print("visualizing grasp")
self.visualize_grasp(grasp_data)
# execute the grasp
object_in_gripper = self.execute_grasp(self.state.grasp_data, close_gripper=True, use_cartesian_plan=True, push_in_distance=0.01, ee_speed_m_s=0.1)
T_W_G = self.state.cache['gripper_frame_at_grasp'] # this is set in execute_grasp
pre_grasp_pose = self.state.cache['pre_grasp_ik_response'].joint_state.position
pickup_speed = self.graspingParams['speed']['pickup']
if not object_in_gripper:
# open the gripper and back away
self.gripperDriver.send_open_gripper_set_distance_from_current()
self.robotService.moveToJointPosition(pre_grasp_pose,
maxJointDegreesPerSecond=
pickup_speed)
return False
# pickup the object
self.robotService.moveToJointPosition(pre_grasp_pose,
maxJointDegreesPerSecond=
pickup_speed)
# now move to nominal position for the place
# speed = self.graspingParams["speed"]["nominal"]
speed = self.graspingParams["speed"]["fast"]
# q_nom_left_table = self._stored_poses_director["left_table"]["above_table_pre_grasp"]
q_nom_left_table = self._stored_poses_director["left_table"]["above_table_pre_grasp_right"]
self.robotService.moveToJointPosition(q_nom_left_table,
maxJointDegreesPerSecond=
speed)
# compute some poses
T_goal_obs = ros_numpy.numpify(result.T_goal_obs) # 4 x 4 numpy matrix
T_goal_obs_vtk = transformUtils.getTransformFromNumpy(T_goal_obs)
object_manip = ObjectManipulation(T_goal_object=T_goal_obs_vtk, T_W_G=T_W_G)
object_manip.compute_transforms()
T_W_Gn_vtk = object_manip.T_W_Gn # gripper to world for place pose
T_pre_goal_obs = ros_numpy.numpify(result.T_pre_goal_obs)
T_pre_goal_obs_vtk = transformUtils.getTransformFromNumpy(T_pre_goal_obs)
object_manip_approach = ObjectManipulation(T_goal_object=T_pre_goal_obs_vtk, T_W_G=T_W_G)
object_manip_approach.compute_transforms()
T_W_Gn_approach_vtk = object_manip_approach.T_W_Gn
# now convert these to ee poses
T_W_ee_vtk = self.getIiwaLinkEEFrameFromGraspFrame(T_W_Gn_vtk)
T_W_ee = transformUtils.getNumpyFromTransform(T_W_ee_vtk)
T_W_ee_approach_vtk = self.getIiwaLinkEEFrameFromGraspFrame(T_W_Gn_approach_vtk)
T_W_ee_approach = transformUtils.getNumpyFromTransform(T_W_ee_approach_vtk)
# execute the place
print("executing place on rack")
return self.execute_place_new(T_W_ee, T_W_ee_approach, q_nom=q_nom_left_table, use_cartesian_plan=True, force_threshold_magnitude=30, ee_speed_m_s=0.1)
def retract_from_mug_rack(self, gripper_open=True):
"""
Move backwards from the mug rack
:return:
:rtype:
"""
category_manipulation_type = self.state.cache['category_manipulation_goal']['type']
assert category_manipulation_type == CategoryManipulationType.MUG_ON_RACK
if gripper_open:
self.gripperDriver.send_open_gripper_set_distance_from_current()
xyz_goal = np.array([-0.10, 0, 0])
ee_frame_id = "iiwa_link_ee"
expressed_in_frame = ee_frame_id
cartesian_grasp_speed = self.graspingParams['speed']['cartesian_grasp']
cartesian_traj_goal = \
control_utils.make_cartesian_trajectory_goal(xyz_goal,
ee_frame_id,
expressed_in_frame,
speed=cartesian_grasp_speed)
action_client = self.robotService.cartesian_trajectory_action_client
action_client.send_goal(cartesian_traj_goal)
# wait for result
action_client.wait_for_result()
result = action_client.get_result()
# now move to nominal position for the place
speed = self.graspingParams["speed"]["fast"]
super_fast_speed = self.graspingParams["speed"]["super_fast"]
# q_nom_left_table = self._stored_poses_director["left_table"]["above_table_pre_grasp"]
q_nom_left_table = self._stored_poses_director["left_table"]["above_table_pre_grasp_right"]
self.robotService.moveToJointPosition(q_nom_left_table,
maxJointDegreesPerSecond=
speed)
if EXPERIMENT_MODE:
q = self._stored_poses_director["left_table"]["mug_rack_evaluation"]
self.robotService.moveToJointPosition(q,
maxJointDegreesPerSecond=
speed)
msg = self.captureRgbdAndCameraTransform()
save_dir = os.path.join(spartanUtils.get_sandbox_dir(),
self.state.cache['keypoint_detection_result']['output_dir'], "evaluation")
self.save_RGBD_client.wait_for_server()
goal = pdc_ros_msgs.msg.KeypointDetectionGoal()
goal.rgbd_with_pose_list.append(msg)
goal.camera_info = self.camera_info_subscriber.waitForNextMessage()
goal.output_dir = save_dir
self.save_RGBD_client.send_goal(goal)
self.save_RGBD_client.wait_for_result()
self.moveHome(speed=super_fast_speed)
if EXPERIMENT_MODE:
output_dir = self.state.cache['keypoint_detection_result']['output_dir']
print "\n\n", os.path.split(output_dir)[1]
# clear the cache, to avoid you doing it twice
self.state.clear()
self._clear_cache()
def run_mug_shelf_manipulation(self, use_debug_speed=True):
"""
Runs the object manipulation code. Will put the object into the
specified target pose from `run_category_manipulation_goal_estimation`
:return:
"""
self.wait_for_category_manipulation_goal_result()
if not self.check_category_goal_estimation_succeeded():
self.state.set_status("PLANNING_FAILED")
return False
category_manipulation_type = self.state.cache['category_manipulation_goal']['type']
assert category_manipulation_type == CategoryManipulationType.MUG_ON_SHELF_3D
self.moveHome()
result = self.state.cache['category_manipulation_goal']['result']
print("\n\n---result----\n\n", result)
print("\n\n\n")
T_W_fingertip = ros_numpy.numpify(result.T_world_gripper_fingertip)
T_W_fingertip_vtk = transformUtils.getTransformFromNumpy(T_W_fingertip)
grasp_data = GraspData.from_gripper_fingertip_frame(T_W_fingertip)
grasp_data.gripper.params["hand_inner_diameter"] = result.gripper_width
# rotate grasp frame to align with nominal if we are doing a vertical grasp
force_threshold_magnitude = 30
push_in_distance = 0.0
if result.mug_orientation == "HORIZONTAL":
push_in_distance = -0.005
force_threshold_magnitude = 30
elif result.mug_orientation == "UPRIGHT":
push_in_distance = 0.01
force_threshold_magnitude = 30
# params = self.getParamsForCurrentLocation()
# grasp_z_axis_nominal = np.array(params['grasp']['grasp_nominal_direction'])
# grasp_data.rotate_grasp_frame_to_nominal(grasp_z_axis_nominal)
self.state.grasp_data = grasp_data
self.visualize_grasp(grasp_data)
def vis_function():
vis.updateFrame(T_W_fingertip_vtk, "gripper fingertip frame", scale=0.15, parent=self._vis_container)
vis.updateFrame(grasp_data.grasp_frame, "grasp frame", scale=0.15, parent=self._vis_container)
self.taskRunner.callOnThread(vis_function)
# debugging
print("visualizing grasp")
self.visualize_grasp(grasp_data)
# execute the grasp
object_in_gripper = self.execute_grasp(self.state.grasp_data, close_gripper=True, use_cartesian_plan=True, push_in_distance=push_in_distance, force_threshold_magnitude=force_threshold_magnitude, ee_speed_m_s=0.1)
T_W_G = self.state.cache['gripper_frame_at_grasp'] # this is set in execute_grasp
pre_grasp_pose = self.state.cache['pre_grasp_ik_response'].joint_state.position
pickup_speed = self.graspingParams['speed']['pickup']
if not object_in_gripper:
# open the gripper and back away
self.gripperDriver.send_open_gripper_set_distance_from_current()
self.robotService.moveToJointPosition(pre_grasp_pose,
maxJointDegreesPerSecond=
pickup_speed)
return False
# pickup the object
self.robotService.moveToJointPosition(pre_grasp_pose,
maxJointDegreesPerSecond=
pickup_speed)
# move to above table pre grasp
speed = self.graspingParams["speed"]["fast"]
q = self._stored_poses_director["Grasping"]["above_table_pre_grasp"]
self.robotService.moveToJointPosition(q,
maxJointDegreesPerSecond=
speed)
q_approach = None
if result.mug_orientation == "HORIZONTAL":
q_nom = self._stored_poses_director["mug"]["horizontal_grasp_nominal"]
q_approach_2 = self._stored_poses_director["mug"]["horizontal_grasp_approach_2"]
self.robotService.moveToJointPosition(q_approach_2,
maxJointDegreesPerSecond=
speed)
elif result.mug_orientation == "UPRIGHT":
q_nom = self._stored_poses_director["mug"]["vertical_grasp_nominal"]
q_approach_1 = self._stored_poses_director["mug"]["vertical_grasp_above_table"]
self.robotService.moveToJointPosition(q_approach_1,
maxJointDegreesPerSecond=
speed)
else:
raise ValueError("unknown mug orientation: %s" %(result.mug_orientation))
# compute some poses
T_goal_obs = ros_numpy.numpify(result.T_goal_obs) # 4 x 4 numpy matrix
T_goal_obs_vtk = transformUtils.getTransformFromNumpy(T_goal_obs)
object_manip = ObjectManipulation(T_goal_object=T_goal_obs_vtk, T_W_G=T_W_G)
object_manip.compute_transforms()
T_W_Gn_vtk = object_manip.T_W_Gn # gripper to world for place pose
T_pre_goal_obs = ros_numpy.numpify(result.T_pre_goal_obs)
T_pre_goal_obs_vtk = transformUtils.getTransformFromNumpy(T_pre_goal_obs)
object_manip_approach = ObjectManipulation(T_goal_object=T_pre_goal_obs_vtk, T_W_G=T_W_G)
object_manip_approach.compute_transforms()
T_W_Gn_approach_vtk = object_manip_approach.T_W_Gn
# now convert these to ee poses
T_W_ee_vtk = self.getIiwaLinkEEFrameFromGraspFrame(T_W_Gn_vtk)
T_W_ee = transformUtils.getNumpyFromTransform(T_W_ee_vtk)
T_W_ee_approach_vtk = self.getIiwaLinkEEFrameFromGraspFrame(T_W_Gn_approach_vtk)
T_W_ee_approach = transformUtils.getNumpyFromTransform(T_W_ee_approach_vtk)
# execute the place
print("executing place on shelf")
code = self.execute_place_new(T_W_ee, T_W_ee_approach, q_nom=q_nom, use_cartesian_plan=True, force_threshold_magnitude=30)
return code
def retract_from_mug_shelf(self, gripper_open=True, use_debug_speed=True):
"""
Move backwards from the rack
:return:
:rtype:
"""
category_manipulation_type = self.state.cache['category_manipulation_goal']['type']
assert category_manipulation_type == CategoryManipulationType.MUG_ON_SHELF_3D
result = self.state.cache['category_manipulation_goal']['result']
if gripper_open:
if result.mug_orientation == "HORIZONTAL":
self.gripperDriver.sendOpenGripperCommand()
else:
self.gripperDriver.send_open_gripper_set_distance_from_current()
# do different things depending on whether it was horizontal or vertical drop
result = self.state.cache['category_manipulation_goal']['result']
mug_orientation = result.mug_orientation
xyz_goal = np.array([-0.10, 0, 0])
ee_frame_id = "iiwa_link_ee"
expressed_in_frame = ee_frame_id
cartesian_grasp_speed = self.graspingParams['speed']['cartesian_grasp']
cartesian_traj_goal = \
control_utils.make_cartesian_trajectory_goal(xyz_goal,
ee_frame_id,
expressed_in_frame,
speed=cartesian_grasp_speed)
action_client = self.robotService.cartesian_trajectory_action_client
action_client.send_goal(cartesian_traj_goal)
# wait for result
action_client.wait_for_result()
result = action_client.get_result()
# now move to nominal position for the place
speed = self.graspingParams["speed"]["fast"]
super_fast_speed = q = self.graspingParams["speed"]["super_fast"]
if use_debug_speed:
speed = DEBUG_SPEED
if mug_orientation == "UPRIGHT":
q_pose_1 = self._stored_poses_director["mug"]["vertical_grasp_above_table"]
self.robotService.moveToJointPosition(q_pose_1,
maxJointDegreesPerSecond=
super_fast_speed)
elif mug_orientation=="HORIZONTAL":
q_pose_1 = self._stored_poses_director["mug"]["horizontal_grasp_approach"]
self.robotService.moveToJointPosition(q_pose_1,
maxJointDegreesPerSecond=
speed)
q_pose_2 = self._stored_poses_director["Grasping"]["above_table_pre_grasp"]
self.robotService.moveToJointPosition(q_pose_2,
maxJointDegreesPerSecond=
super_fast_speed)
if EXPERIMENT_MODE:
# move to pose
q = self._stored_poses_director["left_table"]["look_at_mug_shelf_2"]
self.robotService.moveToJointPosition(q, maxJointDegreesPerSecond=super_fast_speed)
msg = self.captureRgbdAndCameraTransform()
save_dir = os.path.join(spartanUtils.get_sandbox_dir(),
self.state.cache['keypoint_detection_result']['output_dir'], "evaluation")
self.save_RGBD_client.wait_for_server()
goal = pdc_ros_msgs.msg.KeypointDetectionGoal()
goal.rgbd_with_pose_list.append(msg)
goal.camera_info = self.camera_info_subscriber.waitForNextMessage()
goal.output_dir = save_dir
self.save_RGBD_client.send_goal(goal)
self.save_RGBD_client.wait_for_result()
super_fast_speed = q = self.graspingParams["speed"]["super_fast"]
self.moveHome(speed=super_fast_speed)
def run_category_manipulation_pipeline(self):
self._clear_cache()
self.run_keypoint_detection()
self.run_category_manipulation_goal_estimation()
self.run_manipulate_object()
def visualize_poser_result(self):
"""
DEPRECATED (this code is best used from pdc_ros)
Visualize the poser output
"""
# debugging
if self.poser_result is None:
# use the default path for debugging purposes
path_to_poser_output = os.path.join(spartanUtils.get_sandbox_dir(), "poser")
else:
path_to_poser_output = os.path.join(spartanUtils.get_sandbox_dir(), self.poser_result.poser_output_folder)
self._poser_visualizer = PoserVisualizer(path_to_poser_output)
poser_response = self._poser_visualizer.load_poser_response()
self._poser_visualizer.visualize_result(poser_response)
def grasp_best_match(self):
assert self.best_match_result.match_found
best_match_location_msg = self.best_match_result.best_match_location
best_match_location = np.zeros(3)
best_match_location[0] = best_match_location_msg.x
best_match_location[1] = best_match_location_msg.y
best_match_location[2] = best_match_location_msg.z
# check that it is above table
min_pt = np.array([0.4, -0.357198029757, 0.0])
max_pt = np.array([0.822621226311, 0.3723, 0.5])
greater_than_min = (best_match_location > min_pt).all()
less_than_max = (best_match_location < max_pt).all()
if not (greater_than_min and less_than_max):
print "best match location is outside of workspace bounds"
print "best_match_location:", best_match_location
return False
print "requesting Grasp 3D location"
self.grasp_3D_location_request(best_match_location)
result = self.wait_for_grasp_3D_location_result()
print "received Grasp 3D Location Response"
print "result:\n", result
grasp_found = self.processGenerateGraspsResult(result)
if not grasp_found:
print "no grasp found, returning"
return False
print "attempting grasp"
return self.attemptGrasp(self.graspFrame)
def find_best_match_and_grasp_and_stow(self):
# find best match
result = self.findBestBatch()
if not result.match_found:
return False
# attempt grasp best match
grasp_successful = self.grasp_best_match()
if not grasp_successful:
self.gripperDriver.send_open_gripper_set_distance_from_current()
self.moveHome()
print "grasp attempt failed, resetting"
return False
# stow
stow_pose = self.graspingParams["poses"]["hand_to_human_right"]
# stow_pose = self.graspingParams["poses"]["stow_in_bin"]
self.pickupObject(stow=True, stow_pose=stow_pose)
def request_best_match(self):
goal = pdc_ros_msgs.msg.FindBestMatchGoal()
goal.rgbd_with_pose_list = self.list_rgbd_with_pose_msg
goal.camera_info = self.camera_info_subscriber.waitForNextMessage()
self.find_best_match_client.send_goal(goal)
self.moveHome()
# From: https://www.programcreek.com/python/example/99841/sensor_msgs.msg.PointCloud2
def pointcloud2_to_array(self, cloud_msg):
'''
Converts a rospy PointCloud2 message to a numpy recordarray
Assumes all fields 32 bit floats, and there is no padding.
'''
dtype_list = [(f.name, np.float32) for f in cloud_msg.fields]
cloud_arr = np.fromstring(cloud_msg.data, dtype_list)
return cloud_arr
return np.reshape(cloud_arr, (cloud_msg.height, cloud_msg.width))
def processGenerateGraspsResult(self, result):
"""
Takes the result of spartan_grasp and parses it into a usable form
:param result:
:return:
"""
print "num antipodal grasps = ", len(result.antipodal_grasps)
print "num volume grasps = ", len(result.volume_grasps)
if (len(result.antipodal_grasps) == 0) and (len(result.volume_grasps) == 0):
self.topGrasp = None
self._grasp_found = False
rospy.loginfo("no valid grasps found")
return False
if len(result.antipodal_grasps) > 0:
self._grasp_found = True
grasp_msg = result.antipodal_grasps[0]
print "top grasp was ANTIPODAL"
elif len(result.volume_grasps) > 0:
self._grasp_found = True
grasp_msg = result.volume_grasps[0]
print "top grasp was VOLUME"
self.topGrasp = grasp_msg
rospy.loginfo("-------- top grasp score = %.3f", self.topGrasp.score)
self.graspFrame = spartanUtils.transformFromROSPoseMsg(self.topGrasp.pose.pose)
self.rotateGraspFrameToAlignWithNominal(self.graspFrame)
return True
def make_grasp_data_from_spartan_grasp_result(self, result):
"""
Takes the result of spartan_grasp and parses it into a usable form
:param result:
:return: bool, GraspData
"""
print "num antipodal grasps = ", len(result.antipodal_grasps)
print "num volume grasps = ", len(result.volume_grasps)
if (len(result.antipodal_grasps) == 0) and (len(result.volume_grasps) == 0):
rospy.loginfo("no valid grasps found")
return False, False
if len(result.antipodal_grasps) > 0:
grasp_msg = result.antipodal_grasps[0]
type = "antipodal"
print "top grasp was ANTIPODAL"
elif len(result.volume_grasps) > 0:
grasp_msg = result.volume_grasps[0]
type = "volume"
print "top grasp was VOLUME"
rospy.loginfo("-------- top grasp score = %.3f", grasp_msg.score)
grasp_data = GraspData.from_spartan_grasp(grasp_msg)
grasp_data.data['type'] = type
# rotate the grasp to align with nominal
params = self.getParamsForCurrentLocation()
grasp_z_axis_nominal = np.array(params['grasp']['grasp_nominal_direction'])
grasp_data.rotate_grasp_frame_to_nominal(grasp_z_axis_nominal)
return True, grasp_data
def getIiwaLinkEEFrameFromGraspFrame(self, graspFrame):
return transformUtils.concatenateTransforms([self.iiwaLinkEEToGraspFrame, graspFrame])
def get_iiwa_link_ee_from_gripper_fingertip_frame(self, T_W__gripper_fingertip):
"""
:param T_gripper_fingertip__W: gripper fingertip to world transform
:return:
"""
return transformUtils.concatenateTransforms([self.T_gripper_fingertip__iiwa_link_ee, T_W__gripper_fingertip])
def moveToFrame(self, graspFrame, speed=None):
if speed is None:
speed = self.config['grasp_speed']
poseStamped = self.makePoseStampedFromGraspFrame(graspFrame)
return self.robotService.moveToCartesianPosition(poseStamped, speed)
def makePoseStampedFromGraspFrame(self, graspFrame):
"""
Make PoseStamped message for the end effector frame from a given grasp frame
:param graspFrame: vtkTransform of the gripper frame
:return : pose of the end-effector for that grasp frame location
:rtype : geometry_msgs/PoseStamped
"""
iiwaLinkEEFrame = self.getIiwaLinkEEFrameFromGraspFrame(graspFrame)
poseDict = spartanUtils.poseFromTransform(iiwaLinkEEFrame)
poseMsg = rosUtils.ROSPoseMsgFromPose(poseDict)
poseStamped = geometry_msgs.msg.PoseStamped()
poseStamped.pose = poseMsg
poseStamped.header.frame_id = "base"
return poseStamped
def make_ee_pose_stamped_from_grasp(self, T_W_gripper_fingertip):
"""
Make PoseStamped message for the end effector frame from a given grasp frame.
:param T_W_gripper_fingertip: The position of the tips of the fingers, move down 3 cm to get
:return : pose of the end-effector for that grasp frame location
:rtype : geometry_msgs/PoseStamped
"""
iiwaLinkEEFrame = self.get_iiwa_link_ee_from_gripper_fingertip_frame(T_W_gripper_fingertip)
poseDict = spartanUtils.poseFromTransform(iiwaLinkEEFrame)
poseMsg = rosUtils.ROSPoseMsgFromPose(poseDict)
poseStamped = geometry_msgs.msg.PoseStamped()
poseStamped.pose = poseMsg
poseStamped.header.frame_id = "base"
return poseStamped
def execute_grasp(self, grasp_data=None, close_gripper=True, use_cartesian_plan=True, stop_at_pre_grasp=False, push_in_distance=None, use_debug_speed=False, force_threshold_magnitude=None, ee_speed_m_s=0.05):
"""
Moves to pre-grasp frame, then grasp frame
attemps to close gripper if `close_gripper=True` was passed in
:return: bool (whether or not grasp was successful)
"""
if grasp_data is None:
grasp_data = self.state.grasp_data
if push_in_distance is None:
push_in_distance = self.graspingParams['grasp_push_in_distance']
gripper_width = grasp_data.grasp_inner_diameter
if gripper_width is not None:
gripper_driver_width = gripper_width + self.graspingParams['gripper_width_offset']
self.gripperDriver.sendGripperCommand(gripper_driver_width, force=20.0)
else:
self.gripperDriver.send_open_gripper_set_distance_from_current()
rospy.sleep(0.5) # wait for 0.5 for gripper to move
# compute the pre-grasp frame
pre_grasp_distance = self.graspingParams['pre_grasp_distance']
pre_grasp_frame_gripper = grasp_data.compute_pre_grasp_frame(distance=pre_grasp_distance)
pre_grasp_ee_pose_stamped = self.makePoseStampedFromGraspFrame(pre_grasp_frame_gripper)
# safety check
is_safe = (GraspData.grasp_frame_safety_check(grasp_data.grasp_frame) and GraspData.grasp_frame_safety_check(pre_grasp_frame_gripper))
if not is_safe:
self.state.set_status("SAFETY_CHECK_FAILED")
return False
# run the ik for moving to pre-grasp location
graspLocationData = self.graspingParams[self.state.graspingLocation]
above_table_pre_grasp = graspLocationData['poses']['above_table_pre_grasp']
pre_grasp_ik_response = self.robotService.runIK(pre_grasp_ee_pose_stamped,
seedPose=above_table_pre_grasp,
nominalPose=above_table_pre_grasp)
pre_grasp_pose = pre_grasp_ik_response.joint_state.position
if not pre_grasp_ik_response.success:
rospy.loginfo("pre grasp pose ik failed, returning")
self.state.set_status_ik_failed()
self.state.print_status()
return False
# run the ik for moving to grasp location
# for now just do IK, otherwise use cartesian space plan with force guards
grasp_frame_ee_pose_stamped = self.makePoseStampedFromGraspFrame(grasp_data.grasp_frame)
grasp_ik_response = self.robotService.runIK(grasp_frame_ee_pose_stamped,
seedPose=above_table_pre_grasp,
nominalPose=above_table_pre_grasp)
grasp_pose = grasp_ik_response.joint_state.position
if not grasp_ik_response.success:
rospy.loginfo("pre grasp pose ik failed, returning")
self.state.set_status_ik_failed()
self.state.print_status()
return False
# store for later use
self.state.cache['grasp_ik_response'] = grasp_ik_response
self.state.cache['pre_grasp_ik_response'] = pre_grasp_ik_response
# move to pre-grasp position
# we do this using a position trajectory
print "moving to pre-grasp"
pre_grasp_speed = self.graspingParams['speed']['pre_grasp']
#### debugging
speed = pre_grasp_speed
if use_debug_speed:
speed = DEBUG_SPEED
self.robotService.moveToJointPosition(pre_grasp_pose,
maxJointDegreesPerSecond=
speed)
self.state.set_status("PRE_GRASP")
print "at pre-grasp pose"
if stop_at_pre_grasp:
return
if use_cartesian_plan:
# move to grasp position using compliant cartesian plan
move_forward_distance = pre_grasp_distance + push_in_distance
print "move_forward_distance", move_forward_distance
xyz_goal = move_forward_distance * np.array([1, 0, 0])
ee_frame_id = "iiwa_link_ee"
expressed_in_frame = ee_frame_id
cartesian_grasp_speed = self.graspingParams['speed']['cartesian_grasp']
cartesian_grasp_speed = ee_speed_m_s
cartesian_traj_goal = \
control_utils.make_cartesian_trajectory_goal(xyz_goal,
ee_frame_id,
expressed_in_frame,
speed=cartesian_grasp_speed)
# add force guards
# -z (gripper) direction in frame iiwa_link_ee,
if force_threshold_magnitude is None:
force_threshold_magnitude = self.graspingParams['force_threshold_magnitude']
force_vector = force_threshold_magnitude * np.array([-1, 0, 0])
force_guard = control_utils.make_force_guard_msg(force_vector)
cartesian_traj_goal.force_guard.append(force_guard)
action_client = self.robotService.cartesian_trajectory_action_client
action_client.send_goal(cartesian_traj_goal)
# wait for result
action_client.wait_for_result()
result = action_client.get_result()
grasp_data.data['cartesian_trajectory_result'] = result
print "Cartesian Trajectory Result\n", result
else:
# move to grasp pose using standard IK
speed = self.graspingParams['speed']['grasp']
if use_debug_speed:
speed = DEBUG_SPEED
self.robotService.moveToJointPosition(grasp_pose,
maxJointDegreesPerSecond=
speed)
# record current location of gripper (in world frame)
# before closing the gripper
pos, quat = self.get_transform("iiwa_link_ee", "base")
T_world_ee = transformUtils.transformFromPose(pos, quat)
T_world_grasp = transformUtils.concatenateTransforms([self.graspToIiwaLinkEE, T_world_ee])
self.state.cache['gripper_frame_at_grasp'] = T_world_grasp
has_object = False
if close_gripper:
print "closing gripper"
has_object = self.gripperDriver.closeGripper()
if has_object:
self.state.set_status("OBJECT_IN_GRIPPER")
print "object in gripper"
else:
self.state.set_status("GRASP_FAILED")
print "grasp failed"
return has_object
def execute_place(self, grasp_data=None, use_cartesian_plan=True):
if grasp_data is None:
grasp_data = self.state.grasp_data
# compute the pre-grasp frame
pre_grasp_distance = self.graspingParams['pre_grasp_distance']
pre_grasp_frame_gripper = grasp_data.compute_pre_grasp_frame(distance=pre_grasp_distance)
pre_grasp_ee_pose_stamped = self.makePoseStampedFromGraspFrame(pre_grasp_frame_gripper)
# run the ik for moving to pre-grasp location
graspLocationData = self.graspingParams[self.state.graspingLocation]
above_table_pre_grasp = graspLocationData['poses']['above_table_pre_grasp']
pre_grasp_ik_response = self.robotService.runIK(pre_grasp_ee_pose_stamped,
seedPose=above_table_pre_grasp,
nominalPose=above_table_pre_grasp)
pre_grasp_pose = pre_grasp_ik_response.joint_state.position
if not pre_grasp_ik_response.success:
rospy.loginfo("pre grasp pose ik failed, returning")
self.state.set_status_ik_failed()
self.state.print_status()
return False
# run the ik for moving to grasp location
# for now just do IK, otherwise use cartesian space plan with force guards
grasp_frame_ee_pose_stamped = self.makePoseStampedFromGraspFrame(grasp_data.grasp_frame)
grasp_ik_response = self.robotService.runIK(grasp_frame_ee_pose_stamped,
seedPose=above_table_pre_grasp,
nominalPose=above_table_pre_grasp)
grasp_pose = grasp_ik_response.joint_state.position
if not grasp_ik_response.success:
rospy.loginfo("pre grasp pose ik failed, returning")
self.state.set_status_ik_failed()
self.state.print_status()
return False
# store for later use
self.state.cache['grasp_ik_response'] = grasp_ik_response
self.state.cache['pre_grasp_ik_response'] = pre_grasp_ik_response
# move to pre-grasp position
# we do this using a position trajectory
print "moving to pre-grasp"
pre_grasp_speed = self.graspingParams['speed']['pre_grasp']
self.robotService.moveToJointPosition(pre_grasp_pose,
maxJointDegreesPerSecond=
pre_grasp_speed)
self.state.set_status("PRE_GRASP")
print "at pre-grasp pose"
if use_cartesian_plan:
# move to grasp position using compliant cartesian plan
push_distance = self.graspingParams['grasp_push_in_distance']
move_forward_distance = pre_grasp_distance + push_distance
print "move_forward_distance", move_forward_distance
xyz_goal = move_forward_distance * np.array([1, 0, 0])
ee_frame_id = "iiwa_link_ee"
expressed_in_frame = ee_frame_id
cartesian_grasp_speed = self.graspingParams['speed']['cartesian_grasp']
cartesian_traj_goal = \
control_utils.make_cartesian_trajectory_goal(xyz_goal,
ee_frame_id,
expressed_in_frame,
speed=cartesian_grasp_speed)
# add force guards
# -z (gripper) direction in frame iiwa_link_ee,
force_magnitude = self.graspingParams['force_threshold_magnitude']
force_vector = force_magnitude * np.array([-1, 0, 0])
force_guard = control_utils.make_force_guard_msg(force_vector)
cartesian_traj_goal.force_guard.append(force_guard)
action_client = self.robotService.cartesian_trajectory_action_client
action_client.send_goal(cartesian_traj_goal)
# wait for result
action_client.wait_for_result()
result = action_client.get_result()
grasp_data.data['cartesian_trajectory_result'] = result
print "Cartesian Trajectory Result\n", result
else:
# move to grasp pose using standard IK
speed = self.graspingParams['speed']['grasp']
self.robotService.moveToJointPosition(grasp_pose,
maxJointDegreesPerSecond=
speed)
self.gripperDriver.send_open_gripper_set_distance_from_current()
return True
def execute_place_new(self, T_W_ee, T_W_ee_approach, q_nom=None, use_cartesian_plan=False, use_debug_speed=False, force_threshold_magnitude=10, ee_speed_m_s=0.05):
"""
:param T_W_ee: ee location for place
:type T_W_ee:
:param T_W_ee_approach: ee location for approach
:type T_W_ee_approach:
:param q_nom: pose for use as nominal and seed for ik
:type q_nom:
:param use_cartesian_plan: whether or not to use the cartesian plane
:type use_cartesian_plan:
:return:
:rtype:
"""
# safety check
is_safe = (GraspData.grasp_frame_safety_check(T_W_ee) and GraspData.grasp_frame_safety_check(T_W_ee_approach))
if not is_safe:
self.state.set_status("SAFETY_CHECK_FAILED")
return False
# run the ik for moving to pre-grasp location
debug_speed = 10
if q_nom is None:
graspLocationData = self.graspingParams[self.state.graspingLocation]
q_nom = graspLocationData['poses']['above_table_pre_grasp']
T_W_ee_vtk = transformUtils.getTransformFromNumpy(T_W_ee)
T_W_ee_approach_vtk = transformUtils.getTransformFromNumpy(T_W_ee_approach)
# pose stamped
frame_id = "base"
T_W_ee_approach_stamped = geometry_msgs.msg.PoseStamped()
T_W_ee_approach_stamped.pose = ros_numpy.msgify(geometry_msgs.msg.Pose,
T_W_ee_approach)
T_W_ee_approach_stamped.header.frame_id = frame_id
T_W_ee_approach_stamped.header.stamp = rospy.Time.now()
print T_W_ee_approach_stamped
pre_place_ik_response = self.robotService.runIK(T_W_ee_approach_stamped,
seedPose=q_nom,
nominalPose=q_nom)
pre_place_pose = pre_place_ik_response.joint_state.position
self.state.cache["pre_place_ik_response"] = pre_place_ik_response
if not pre_place_ik_response.success:
rospy.loginfo("pre place pose ik failed, returning")
self.state.set_status_ik_failed()
self.state.print_status()
return False
# run the ik for moving to grasp location
frame_id = "base"
T_W_ee_stamped = geometry_msgs.msg.PoseStamped()
T_W_ee_stamped.pose = ros_numpy.msgify(geometry_msgs.msg.Pose,
T_W_ee)
T_W_ee_stamped.header.frame_id = frame_id
T_W_ee_stamped.header.stamp = rospy.Time.now()
# for now just do IK, otherwise use cartesian space plan with force guards
place_ik_response = self.robotService.runIK(T_W_ee_stamped,
seedPose=q_nom,
nominalPose=q_nom)
place_pose = place_ik_response.joint_state.position
if not place_ik_response.success:
rospy.loginfo("place pose ik failed, returning")
self.state.set_status_ik_failed()
self.state.print_status()
return False
# store for later use
self.state.cache['place_ik_response'] = place_ik_response
# move to pre-grasp position
# we do this using a position trajectory
print "moving to approach pose"
# pre_grasp_speed = self.graspingParams['speed']['pre_grasp']
speed = self.graspingParams['speed']['grasp']
if use_debug_speed:
speed = debug_speed
self.robotService.moveToJointPosition(pre_place_pose,
maxJointDegreesPerSecond=
speed)
self.state.set_status("PRE_GRASP")
print "at approach pose"
if use_cartesian_plan:
# move to grasp position using compliant cartesian plan
# for now doesn't deal with orientations
xyz_approach = np.array(T_W_ee_approach_vtk.GetPosition())
xyz_place = np.array(T_W_ee_vtk.GetPosition())
distance = np.linalg.norm(xyz_place - xyz_approach)
duration = distance/ee_speed_m_s
xyz_goal = xyz_place
ee_frame_id = "iiwa_link_ee"
base_frame_id = "base"
expressed_in_frame = base_frame_id
cartesian_grasp_speed = self.graspingParams['speed']['cartesian_grasp']
cartesian_traj_goal = \
control_utils.make_cartesian_trajectory_goal(xyz_goal,
ee_frame_id,
expressed_in_frame,
duration=duration)
# add force guards
# x_axis in frame iiwa_link_ee,
force_vector = force_threshold_magnitude * np.array([-1, 0, 0])
force_guard = control_utils.make_force_guard_msg(force_vector)
cartesian_traj_goal.force_guard.append(force_guard)
# z_axis in frame iiwa_link_ee
force_vector = force_threshold_magnitude * np.array([0, 0, 1])
force_guard = control_utils.make_force_guard_msg(force_vector)
cartesian_traj_goal.force_guard.append(force_guard)
action_client = self.robotService.cartesian_trajectory_action_client
action_client.send_goal(cartesian_traj_goal)
# wait for result
action_client.wait_for_result()
result = action_client.get_result()
self.state.cache['cartesian_traj_result'] = result
print "Cartesian Trajectory Result\n", result
else:
# move to grasp pose using standard IK
speed = self.graspingParams['speed']['grasp']
self.robotService.moveToJointPosition(place_pose,
maxJointDegreesPerSecond=
speed)
# now back off
# self.gripperDriver.send_open_gripper_set_distance_from_current()
return True
def attemptGrasp(self, graspFrame):
"""
Attempt a grasp
return: boolean if it was successful or not
"""
self._clear_cache()
self._cache["grasp_frame"] = graspFrame
preGraspFrame = transformUtils.concatenateTransforms([self.preGraspToGraspTransform, self.graspFrame])
graspLocationData = self.graspingParams[self.state.graspingLocation]
above_table_pre_grasp = graspLocationData['poses']['above_table_pre_grasp']
preGraspFramePoseStamped = self.makePoseStampedFromGraspFrame(preGraspFrame)
preGrasp_ik_response = self.robotService.runIK(preGraspFramePoseStamped, seedPose=above_table_pre_grasp,
nominalPose=above_table_pre_grasp)
if not preGrasp_ik_response.success:
rospy.loginfo("pre grasp pose ik failed, returning")
return False
graspFramePoseStamped = self.makePoseStampedFromGraspFrame(graspFrame)
preGraspPose = preGrasp_ik_response.joint_state.position
grasp_ik_response = self.robotService.runIK(graspFramePoseStamped, seedPose=preGraspPose,
nominalPose=preGraspPose)
self._cache['grasp_ik_response'] = grasp_ik_response
self._cache['pre_grasp_ik_response'] = preGrasp_ik_response
if not grasp_ik_response.success:
rospy.loginfo("grasp pose not reachable, returning")
return False
graspPose = grasp_ik_response.joint_state.position
# store for future use
self.preGraspFrame = preGraspFrame
self.graspFrame = graspFrame
self.gripperDriver.send_open_gripper_set_distance_from_current()
rospy.sleep(0.5) # wait for the gripper to open
self.robotService.moveToJointPosition(preGraspPose,
maxJointDegreesPerSecond=self.graspingParams['speed']['pre_grasp'])
self.robotService.moveToJointPosition(graspPose, maxJointDegreesPerSecond=self.graspingParams['speed']['grasp'])
objectInGripper = self.gripperDriver.closeGripper()
return objectInGripper
def vtkFrameToPoseMsg(self, vtkFrame):
poseDict = spartanUtils.poseFromTransform(vtkFrame)
poseMsg = rosUtils.ROSPoseMsgFromPose(poseDict)
poseStamped = geometry_msgs.msg.PoseStamped()
poseStamped.pose = poseMsg
poseStamped.header.frame_id = "base"
return poseStamped
"""
Moves the gripper up 15cm then moves home
"""
def pickupObject(self, stow=True, stow_pose=None):
endEffectorFrame = self.tfBuffer.lookup_transform(self.config['base_frame_id'],
self.config['end_effector_frame_id'], rospy.Time(0))
eeFrameVtk = spartanUtils.transformFromROSTransformMsg(endEffectorFrame.transform)
eeFrameVtk.PostMultiply()
eeFrameVtk.Translate(0, 0, self.config['pick_up_distance'])
vis.updateFrame(eeFrameVtk, 'pickup frame')
self._cache['eeFrameVtk'] = eeFrameVtk
self._cache['endEffectorFrame'] = endEffectorFrame
poseStamped = self.vtkFrameToPoseMsg(eeFrameVtk)
speed = 10 # joint degrees per second
params = self.getParamsForCurrentLocation()
above_table_pre_grasp = params['poses']['above_table_pre_grasp']
ik_response = self.robotService.runIK(poseStamped, seedPose=above_table_pre_grasp,
nominalPose=above_table_pre_grasp)
if ik_response.success:
self.robotService.moveToJointPosition(ik_response.joint_state.position,
maxJointDegreesPerSecond=self.graspingParams['speed']['slow'])
if stow_pose is None:
stow_pose = self.getStowPose()
# move to above_table_pre_grasp
# self.robotService.moveToJointPosition(above_table_pre_grasp, maxJointDegreesPerSecond=self.graspingParams['speed']['stow'])
# move to stow_pose
if stow:
self.robotService.moveToJointPosition(stow_pose,
maxJointDegreesPerSecond=self.graspingParams['speed']['stow'])
# release object
self.gripperDriver.send_open_gripper_set_distance_from_current()
rospy.sleep(0.5)
# move Home
self.moveHome()
def pickup_object(self):
"""
Just moves to pre-grasp frame
:return:
"""
if "pre_grasp_ik_response" not in self.state.cache:
return False
pre_grasp_ik_response = self.state.cache['pre_grasp_ik_response']
pre_grasp_pose = pre_grasp_ik_response.joint_state.position
pre_grasp_speed = self.graspingParams['speed']['stow']
self.robotService.moveToJointPosition(pre_grasp_pose,
maxJointDegreesPerSecond=
pre_grasp_speed)
def pickup_object_and_reorient_on_table(self):
"""
Places the object back on the table in a random orientation
Relies on variables in self._cache being set from when we picked up the object
:return:
"""
def set_position(t, pos):
_, quat = transformUtils.poseFromTransform(t)
return transformUtils.transformFromPose(pos, quat)
speed = self.config["object_interaction"]["speed"]
pick_up_distance = self.config["object_interaction"]["pickup_distance"]
drop_distance_above_grasp = self.config["object_interaction"]["drop_distance_above_grasp"]
rotate_speed = self.config["object_interaction"]["rotate_speed"]
drop_location = self.config["object_interaction"]["drop_location"] # z coordinate is overwritten later
endEffectorFrame = self.tfBuffer.lookup_transform(self.config['base_frame_id'],
self.config['end_effector_frame_id'], rospy.Time(0))
grasp_ee_frame = spartanUtils.transformFromROSTransformMsg(endEffectorFrame.transform)
# the frame of the end-effector after we have picked up the object
pickup_ee_frame_vtk = transformUtils.copyFrame(grasp_ee_frame)
pickup_ee_frame_vtk.PostMultiply()
pickup_ee_frame_vtk.Translate(0, 0, pick_up_distance)
vis.updateFrame(pickup_ee_frame_vtk, 'pickup frame', scale=0.15)
self._cache['grasped_ee_frame'] = endEffectorFrame
self._cache['pickup_ee_frame_vtk'] = pickup_ee_frame_vtk
poseStamped = self.vtkFrameToPoseMsg(pickup_ee_frame_vtk)
speed = 10 # joint degrees per second
params = self.getParamsForCurrentLocation()
above_table_pre_grasp = params['poses']['above_table_pre_grasp']
pickup_ik_response = self.robotService.runIK(poseStamped, seedPose=above_table_pre_grasp,
nominalPose=above_table_pre_grasp)
# compute the drop frame location
# This is done by rotating along the z-axis of the grasp frame by some random
# amount in [-90, 90] and then just releasing
rotate_x_angle = random.uniform(45, 90)
# if random.random() < 0.5:
# rotate_x_angle *= -1
pre_drop_frame = transformUtils.copyFrame(pickup_ee_frame_vtk)
pre_drop_frame.PreMultiply()
pre_drop_frame.RotateX(rotate_x_angle)
pre_drop_frame_pos, _ = transformUtils.poseFromTransform(pre_drop_frame)
pre_drop_frame_pos[0:2] = drop_location[0:2]
pre_drop_frame = set_position(pre_drop_frame, pre_drop_frame_pos)
grasp_ee_height = grasp_ee_frame.GetPosition()[2]
drop_frame_pos = copy.copy(pre_drop_frame_pos)
drop_frame_pos[2] = grasp_ee_height + drop_distance_above_grasp
print "drop_frame_pos", drop_frame_pos
drop_frame = transformUtils.copyFrame(pre_drop_frame)
drop_frame = set_position(drop_frame, drop_frame_pos)
vis.updateFrame(pre_drop_frame, "pre drop frame", scale=0.15)
vis.updateFrame(drop_frame, "drop frame", scale=0.15)
# run IK
pre_drop_frame_pose_stamped = self.vtkFrameToPoseMsg(pre_drop_frame)
pre_drop_ik_response = self.robotService.runIK(pre_drop_frame_pose_stamped, seedPose=above_table_pre_grasp,
nominalPose=above_table_pre_grasp)
drop_frame_pose_stamped = self.vtkFrameToPoseMsg(drop_frame)
drop_ik_response = self.robotService.runIK(drop_frame_pose_stamped, seedPose=above_table_pre_grasp,
nominalPose=above_table_pre_grasp)
if pickup_ik_response.success and pre_drop_ik_response.success and drop_ik_response.success:
# pickup object
self.robotService.moveToJointPosition(pickup_ik_response.joint_state.position,
maxJointDegreesPerSecond=speed)
# move to pre-drop
self.robotService.moveToJointPosition(pre_drop_ik_response.joint_state.position,
maxJointDegreesPerSecond=rotate_speed)
# move to drop location
self.robotService.moveToJointPosition(drop_ik_response.joint_state.position,
maxJointDegreesPerSecond=speed)
self.gripperDriver.send_open_gripper_set_distance_from_current()
rospy.sleep(0.5)
# move to pre-drop
self.robotService.moveToJointPosition(pre_drop_ik_response.joint_state.position,
maxJointDegreesPerSecond=rotate_speed)
self.moveHome()
else:
print "ik failed"
return False
return True
def planGraspAndPickupObject(self, stow=True):
self.collectSensorData()
self.requestGrasp()
self.moveHome()
result = self.waitForGenerateGraspsResult()
graspFound = self.processGenerate
| 1 |
33867677611ceb757f6973eb70368c9f75f3ce92
|
Python
|
GraspsResult(result)
if not graspFound:
rospy.loginfo("no grasp found, returning")
return False
graspSuccessful = self.attemptGrasp(self.graspFrame)
if not graspSuccessful:
rospy.loginfo("grasp not successful returning")
return False
self.pickupObject(stow)
def graspAndStowObject(self):
graspSuccessful = self.attemptGrasp(self.graspFrame)
if not graspSuccessful:
rospy.loginfo("grasp not successful returning")
return False
stow = True
self.pickupObject(stow)
def askForCaptureScene(self):
"""
This function just waits for, then asks for the capture_scene service
provided by fusion_server.
This only collects fusion data without performing fusion, so it's
fast. See fusion_server for documentation.
"""
rospy.wait_for_service('capture_scene')
print "Found it!, starting capture..."
try:
capture_scene = rospy.ServiceProxy('capture_scene', fusion_server.srv.CaptureScene)
resp = capture_scene()
print "bag_filepath = %s" % resp.bag_filepath
rospy.loginfo("bag_filepath = %s", resp.bag_filepath)
except rospy.ServiceException, e:
print "Service call failed: %s" % e
def interact_with_object(self):
"""
Runs one iteration of picking up the object re-orienting it
and then placing it back on the table
"""
self.collectSensorData()
self.moveHome()
self.requestGrasp()
result = self.waitForGenerateGraspsResult()
graspFound = self.processGenerateGraspsResult(result)
if not graspFound:
print "no grasp found"
return False
grasp_successful = self.attemptGrasp(self.graspFrame)
if not grasp_successful:
print "grasp attemp was not successful"
return False
else:
print "grasped object"
reoriented_object = self.pickup_object_and_reorient_on_table()
if not reoriented_object:
print "didn't manage to reorient object"
return False
return True
def interactAndCollectFusionDataLoop(self, num_interactions):
"""
Attempts to pickup the object and move it around
:param num_interactions:
:return:
"""
for i in range(num_interactions):
success = self.interact_with_object()
if not success:
print "Human, please go move the object? \n"
print "If you don't want to keep doing this,"
print "then go implement a 'smack-the-object' primitive."
# in future:
# self.smackObject()
rospy.sleep(4.0)
rospy.sleep(1.0)
self.askForCaptureScene()
def testMoveToFrame(self):
pos = [0.51148583, 0.0152224, 0.50182436]
quat = [0.68751512, 0.15384615, 0.69882778, -0.12366916]
targetFrame = transformUtils.transformFromPose(pos, quat)
poseDict = spartanUtils.poseFromTransform(targetFrame)
poseMsg = rosUtils.ROSPoseMsgFromPose(poseDict)
poseStamped = geometry_msgs.msg.PoseStamped()
poseStamped.pose = poseMsg
poseStamped.header.frame_id = "base"
self.poseStamped = poseStamped
self.robotService.moveToCartesianPosition(poseStamped, 30)
def showGraspFrame(self):
vis.updateFrame(self.graspFrame, 'grasp frame', scale=0.15)
vis.updateFrame(self.getIiwaLinkEEFrameFromGraspFrame(self.graspFrame), 'iiwa_link_ee_grasp_frame', scale=0.15)
def showGripperFrame(self):
iiwaLinkEE = self.robotSystem.robotStateModel.getLinkFrame('iiwa_link_ee')
gripperFrame = transformUtils.concatenateTransforms([self.graspToIiwaLinkEE, iiwaLinkEE])
vis.updateFrame(gripperFrame, 'Gripper Frame', scale=0.15)
def show_gripper_fingertip_frame(self):
iiwaLinkEE = self.robotSystem.robotStateModel.getLinkFrame('iiwa_link_ee')
gripperFrame = transformUtils.concatenateTransforms([self.gripper_fingertip_to_iiwa_link_ee, iiwaLinkEE])
vis.updateFrame(gripperFrame, 'Gripper Fingertip Frame', scale=0.15)
def getParamsForCurrentLocation(self):
return self.graspingParams[self.state.graspingLocation]
def rotateGraspFrameToAlignWithNominal(self, graspFrame):
"""
Rotate the grasp frame to align with the nominal direction. In this case we want
the ZAxis of the grasp to be aligned with (1,0,0) in world frame.
If it's not aligned rotate it by 180 degrees about the x-axis of the grasp
:param graspFrame:
:return:
"""
graspFrameZAxis = graspFrame.TransformVector(0, 0, 1)
params = self.getParamsForCurrentLocation()
graspNominalDirection = params['grasp']['grasp_nominal_direction']
if (np.dot(graspFrameZAxis, graspNominalDirection) < 0):
graspFrame.PreMultiply()
graspFrame.RotateX(180)
def saveSensorDataToBagFile(self, pointCloudListMsg=None, filename=None, overwrite=True):
"""
Save sensor data to a bag file
"""
if pointCloudListMsg is None:
return
if filename is None:
filename = os.path.join(spartanUtils.get_sandbox_dir(), "rosbag", 'grasp_sensor_data_%s.bag' %(spartanUtils.get_current_time_unique_name()))
if not os.path.isdir(os.path.dirname(filename)):
os.makedirs(os.path.dirname(filename))
if overwrite and os.path.isfile(filename):
os.remove(filename)
bag = rosbag.Bag(filename, 'w')
bag.write('data', pointCloudListMsg)
bag.close()
def requestGrasp(self, pointCloudListMsg=None):
"""
Requests a grasp from the SpartanGrasp ROS service
Doesn't collect new sensor data
"""
# request the grasp via a ROS Action
if pointCloudListMsg is None:
pointCloudListMsg = self.pointCloudListMsg
rospy.loginfo("waiting for spartan grasp server")
self.generate_grasps_client.wait_for_server()
rospy.loginfo("requsting grasps spartan grasp server")
params = self.getParamsForCurrentLocation()
goal = spartan_grasp_msgs.msg.GenerateGraspsFromPointCloudListGoal()
goal.point_clouds = self.pointCloudListMsg
if 'grasp_volume' in params:
node = params['grasp_volume']
rectangle = GraspSupervisor.rectangleMessageFromYamlNode(node)
goal.params.grasp_volume.append(rectangle)
if 'collision_volume' in params:
node = params['collision_volume']
rectangle = GraspSupervisor.rectangleMessageFromYamlNode(node)
goal.params.collision_volume.append(rectangle)
if 'collision_objects' in params:
for key, val in params['collision_objects'].iteritems():
rectangle = GraspSupervisor.rectangleMessageFromYamlNode(val)
goal.params.collision_objects.append(rectangle)
self.generate_grasps_client.send_goal(goal)
def call_spartan_grasp(self):
"""
Better named wrapper method
:return:
"""
self.requestGrasp()
def waitForGenerateGraspsResult(self):
rospy.loginfo("waiting for result")
self.generate_grasps_client.wait_for_result()
result = self.generate_grasps_client.get_result()
self.generate_grasps_result = result
rospy.loginfo("received result")
return result
def wait_for_grasp_3D_location_result(self):
"""
Waits for the result of the Grasp3DLocation action
:return:
"""
rospy.loginfo("waiting for result")
self.grasp_3D_location_client.wait_for_result()
result = self.grasp_3D_location_client.get_result()
self.grasp_3D_location_result = result # debugging
rospy.loginfo("received result")
return result
def request_grasp_3D_location(self, pointCloudListMsg=None, grasp_point=None):
"""
Requests a grasp3DLocation from the SpartanGrasp ROS service
Doesn't collect new sensor data
"""
# request the grasp via a ROS Action
if pointCloudListMsg is None:
pointCloudListMsg = self.pointCloudListMsg
rospy.loginfo("waiting for spartan grasp server")
self.grasp_3D_location_client.wait_for_server()
rospy.loginfo("requsting grasps spartan grasp server")
params = self.getParamsForCurrentLocation()
goal = spartan_grasp_msgs.msg.Grasp3DLocationGoal()
if grasp_point is None:
grasp_point = self.get_clicked_point()
goal.grasp_point = self.get_clicked_point()
goal.point_clouds = pointCloudListMsg
if 'grasp_volume' in params:
node = params['grasp_volume']
rectangle = GraspSupervisor.rectangleMessageFromYamlNode(node)
goal.params.grasp_volume.append(rectangle)
if 'collision_volume' in params:
node = params['collision_volume']
rectangle = GraspSupervisor.rectangleMessageFromYamlNode(node)
goal.params.collision_volume.append(rectangle)
if 'collision_objects' in params:
for key, val in params['collision_objects'].iteritems():
rectangle = GraspSupervisor.rectangleMessageFromYamlNode(val)
goal.params.collision_objects.append(rectangle)
self.grasp_3D_location_client.send_goal(goal)
def request_spartan_grasp(self, clear_state=True):
"""
- collect sensor data
- send request to spartan grasp
:return: bool, GraspData
"""
self.moveHome()
self.collectSensorData()
self.moveHome()
self.requestGrasp()
result = self.waitForGenerateGraspsResult()
grasp_found, grasp_data = self.make_grasp_data_from_spartan_grasp_result(result)
if clear_state:
self.state.clear()
if grasp_found:
self.state.set_status("GRASP_FOUND")
self.state.grasp_data = grasp_data
else:
self.state.set_status("NO_GRASP_FOUND")
if grasp_found and self.debugMode:
# visualize the grasp frame
self.visualize_grasp(grasp_data)
return grasp_found, grasp_data
def grasp_3D_location_request(self, grasp_point, pointCloudListMsg=None):
"""
Sends a request to grasp a specific 3D location
:param : grasp_point is numpy array or list of size [3]
"""
params = self.getParamsForCurrentLocation()
goal = spartan_grasp_msgs.msg.Grasp3DLocationGoal()
if pointCloudListMsg is None:
goal.point_clouds = self.pointCloudListMsg
goal.grasp_point.x = grasp_point[0]
goal.grasp_point.y = grasp_point[1]
goal.grasp_point.z = grasp_point[2]
if 'grasp_volume' in params:
node = params['grasp_volume']
rectangle = GraspSupervisor.rectangleMessageFromYamlNode(node)
goal.params.grasp_volume.append(rectangle)
if 'collision_volume' in params:
node = params['collision_volume']
rectangle = GraspSupervisor.rectangleMessageFromYamlNode(node)
goal.params.collision_volume.append(rectangle)
if 'collision_objects' in params:
for key, val in params['collision_objects'].iteritems():
rectangle = GraspSupervisor.rectangleMessageFromYamlNode(val)
goal.params.collision_objects.append(rectangle)
self.grasp_3D_location_client.send_goal(goal)
def grasp_3D_location(self):
"""
Runs the grasping_3D_location pipeline
1. Checks to make sure there is a clicked_point
2. Collects sensor data
3. Sends off the request to spartan_grasp server
:return: None
"""
self.get_clicked_point()
self.collectSensorData()
self.request_grasp_3D_location()
self.moveHome()
result = self.wait_for_grasp_3D_location_result()
grasp_found = self.processGenerateGraspsResult(result)
def visualize_grasp(self, grasp_data):
stamp = rospy.Time.now()
vis.updateFrame(grasp_data.grasp_frame, "grasp frame", parent=self._vis_container,
scale=0.15)
point_cloud_msg = None
if 'point_cloud_msg' in grasp_data.data:
point_cloud_msg = grasp_data.data['point_cloud_msg']
# publish grasp to world transform
pose = director_utils.poseFromTransform(grasp_data.grasp_frame)
transform_msg = rosUtils.ROSTransformMsgFromPose(pose)
ts = geometry_msgs.msg.TransformStamped()
ts.header.stamp = stamp
ts.header.frame_id = self.config["base_frame_id"]
frame_id = "grasp_frame"
ts.child_frame_id = frame_id
ts.transform = transform_msg
# use the gripper stored in the grasp data if it exists
gripper = grasp_data.gripper
if gripper is None:
gripper = self._gripper
marker_array = gripper.make_rviz_visualization_msg(frame_id, stamp)
for i in xrange(0, 5):
if point_cloud_msg is not None:
self.grasp_pointcloud_publisher.publish(point_cloud_msg)
self.rviz_marker_array_publisher.publish(marker_array)
self.tfBroadcaster.sendTransform(ts)
rospy.sleep(0.02)
def get_ggcnn_grasp(self):
"""
Looks up the ggcnn grasp frame from the tf server
Also need to think about gripper width etc.
:return: tuple (bool, dict)
:rtype:
"""
# just do a transform lookup
return_data = dict()
self.state.clear()
try:
ggcnn_grasp_frame_camera_axes = self.tfBuffer.lookup_transform(self.config["base_frame_id"],
self.ggcnn_grasp_frame_camera_axes_id,
rospy.Time.now(), rospy.Duration(2.0))
except Exception as e:
rospy.loginfo("Unable to get ggcnn grasp frame from tf, returning")
print(e)
return False, return_data
return_data['ggcnn_grasp_frame_camera_axes'] = ggcnn_grasp_frame_camera_axes
# make grasp object
T_W_GC = director_utils.transformFromROSTransformMsg(ggcnn_grasp_frame_camera_axes.transform)
grasp_data = GraspData.from_ggcnn_grasp_frame_camera_axes(T_W_GC)
# get the pointcloud associated with this grasp
point_cloud_msg = self.pointCloudSubscriber.waitForNextMessage()
grasp_data.data['point_cloud_msg'] = point_cloud_msg
# rotate the grasp to align with nominal
params = self.getParamsForCurrentLocation()
grasp_z_axis_nominal = np.array(params['grasp']['grasp_nominal_direction'])
grasp_data.rotate_grasp_frame_to_nominal(grasp_z_axis_nominal)
self.state.grasp_data = grasp_data
return_data['grasp_data'] = grasp_data
if self.debugMode:
# visualize the grasp frame
self.visualize_grasp(grasp_data)
return True, return_data
def start_bagging(self):
print "Waiting for 'start_bagging_fusion_data' service..."
rospy.wait_for_service('start_bagging_fusion_data')
print "Found it!, starting bagging..."
try:
start_bagging_fusion_data = rospy.ServiceProxy('start_bagging_fusion_data', StartBaggingFusionData)
resp1 = start_bagging_fusion_data()
# return resp1.data_filepath
except rospy.ServiceException, e:
print "Service call failed: %s" % e
def stop_bagging(self):
print "Waiting for 'stop_bagging_fusion_data' service..."
rospy.wait_for_service('stop_bagging_fusion_data')
print "Found it!, stopping bagging..."
try:
stop_bagging_fusion_data = rospy.ServiceProxy('stop_bagging_fusion_data', StopBaggingFusionData)
resp1 = stop_bagging_fusion_data()
return resp1.status
except rospy.ServiceException, e:
print "Service call failed: %s" % e
def testInThread(self):
"""
DEPRECATED
Runs the grasping pipeline
1. Move the robot to collect sensor data
2. Request the grasp (via a Ros Action)
3. Move Home
4. Wait for the response from SpartanGrasp
5. Process the result
"""
self.collectSensorData()
self.moveHome()
self.requestGrasp()
result = self.waitForGenerateGraspsResult()
graspFound = self.processGenerateGraspsResult(result)
return graspFound
def testMoveHome(self):
self.taskRunner.callOnThread(self.moveHome)
def test(self):
self.taskRunner.callOnThread(self.testInThread)
def test_grasp_3D_location(self):
"""
Calls grasp_3D_location in a thread
:return:
"""
self.taskRunner.callOnThread(self.grasp_3D_location)
def testAttemptGrasp(self):
self.taskRunner.callOnThread(self.attemptGrasp, self.graspFrame)
def testPickupObject(self):
self.taskRunner.callOnThread(self.pickupObject)
def test_pickup_object(self):
self.taskRunner.callOnThread(self.pickup_object)
def testGraspAndStowObject(self):
self.taskRunner.callOnThread(self.graspAndStowObject)
def testPipeline(self):
self.taskRunner.callOnThread(self.planGraspAndPickupObject)
def testCollectSensorData(self, **kwargs):
self.taskRunner.callOnThread(self.collectSensorData, **kwargs)
def testRequestGrasp(self):
self.taskRunner.callOnThread(self.requestGrasp)
def testInteractionLoop(self, num_interactions=3):
self.taskRunner.callOnThread(self.interactAndCollectFusionDataLoop, num_interactions)
def test_on_clicked_point(self):
self.taskRunner.callOnThread(self.on_clicked_point)
def testFindBestMatch(self):
self.taskRunner.callOnThread(self.findBestBatch)
def test_grasp_best_match(self):
self.taskRunner.callOnThread(self.grasp_best_match)
def test_find_best_match_and_grasp_and_stow(self):
self.taskRunner.callOnThread(self.find_best_match_and_grasp_and_stow)
def test_best_match_no_data(self):
self.taskRunner.callOnThread(self.request_best_match)
def test_reorient(self):
self.taskRunner.callOnThread(self.pickup_object_and_reorient_on_table)
def test_interact_with_object(self):
self.taskRunner.callOnThread(self.interact_with_object)
def test_start_bagging(self):
self.taskRunner.callOnThread(self.start_bagging)
def test_stop_bagging(self):
self.taskRunner.callOnThread(self.stop_bagging)
def test_execute_grasp(self):
self.taskRunner.callOnThread(self.execute_grasp)
def test_request_spartan_grasp(self, *args, **kwargs):
"""
Collect sensor data and send request to spartan_grasp
Visualize resulting grasp
:return:
"""
self.taskRunner.callOnThread(self.request_spartan_grasp, *args, **kwargs)
def test_run_poser(self, *args, **kwargs):
self.taskRunner.callOnThread(self.run_poser, *args, **kwargs)
def test_run_manipulate_object(self, *args, **kwargs):
self.taskRunner.callOnThread(self.run_manipulate_object, *args, **kwargs)
def test_run_category_manipulation_goal_estimation(self,*args, **kwargs):
self.taskRunner.callOnThread(self.run_category_manipulation_goal_estimation, *args, **kwargs)
def test_run_category_manipulation_pipeline(self, *args, **kwargs):
self.taskRunner.callOnThread(self.run_category_manipulation_pipeline, *args, **kwargs)
def test_run_keypoint_detection(self, *args, **kwargs):
self.taskRunner.callOnThread(self.run_keypoint_detection, *args, **kwargs)
def test_run_mug_on_rack_manipulation(self, *args, **kwargs):
self.taskRunner.callOnThread(self.run_mug_on_rack_manipulation, *args, **kwargs)
def test_retract_from_rack(self, *args, **kwargs):
self.taskRunner.callOnThread(self.retract_from_mug_rack, *args, **kwargs)
def test_retract_from_mug_shelf(self, *args, **kwargs):
self.taskRunner.callOnThread(self.retract_from_mug_shelf, *args, **kwargs)
def test_run_mug_shelf_manipulation(self, *args, **kwargs):
self.taskRunner.callOnThread(self.run_mug_shelf_manipulation, *args, **kwargs)
def test_run_shoe_manipulation(self, *args, **kwargs):
self.taskRunner.callOnThread(self.run_shoe_rack_manipulation, *args, **kwargs)
def loadDefaultPointCloud(self):
self.pointCloudListMsg = GraspSupervisor.getDefaultPointCloudListMsg()
def test_dev(self):
def thread_fun():
self.run_keypoint_detection(wait_for_result=False, move_to_stored_pose=True)
speed = self.graspingParams['speed']['fast']
self.moveHome(speed=speed)
self.run_category_manipulation_goal_estimation()
self.taskRunner.callOnThread(thread_fun)
def test_mug_shelf_3D_pipeline(self):
self.taskRunner.callOnThread(self.run_mug_shelf_3D_pipeline)
def test_mug_rack_pipeline(self, *args, **kwargs):
# time.sleep(10.0) # sleep for 10 seconds
self.taskRunner.callOnThread(self.run_mug_on_rack_pipeline, *args, **kwargs)
def test_shoe_rack_pipeline(self):
self.taskRunner.callOnThread(self.run_shoe_on_rack_pipeline)
def test_category_manip_pipeline(self):
"""
Runs the appropriate category manip pipeline
:return:
:rtype:
"""
raise NotImplementedError("")
def test_estimate_mug_rack_pose(self):
self.taskRunner.callOnThread(self.estimate_mug_rack_pose)
def r(self):
self.test_retract_from_rack()
@staticmethod
def rectangleMessageFromYamlNode(node):
msg = spartan_grasp_msgs.msg.Rectangle()
msg.min_pt = rosUtils.listToPointMsg(node['min_pt'])
msg.max_pt = rosUtils.listToPointMsg(node['max_pt'])
msg.pose = rosUtils.ROSPoseMsgFromPose(node)
return msg
@staticmethod
def makeDefault(**kwargs):
graspingParamsFile = os.path.join(spartanUtils.getSpartanSourceDir(), 'src', 'catkin_projects',
'station_config', 'RLG_iiwa_1', 'manipulation', 'params.yaml')
return GraspSupervisor(graspingParamsFile=graspingParamsFile, **kwargs)
@staticmethod
def getPointCloudListMsg(rosBagFilename):
bag = rosbag.Bag(rosBagFilename)
pointCloudListMsg = None
for topic, msg, t in bag.read_messages(topics=['data']):
pointCloudListMsg = msg
bag.close()
return pointCloudListMsg
@staticmethod
def getDefaultPointCloudListMsg():
spartanSourceDir = spartanUtils.getSpartanSourceDir()
# filename = "grasp_sensor_data.bag"
filename = "sr300_box.bag"
rosBagFilename = os.path.join(spartanSourceDir, 'data', 'rosbag', 'iiwa', filename)
return GraspSupervisor.getPointCloudListMsg(rosBagFilename)
| 2 |
bd179fda18551d4f3d8a4d695a9da38ee607ef1d
|
Python
|
import datetime
import json
from dateutil import parser
import mock
from python_http_client.exceptions import ForbiddenError
from rdr_service import clock, config
from rdr_service.api_util import open_cloud_file
from rdr_service.clock import FakeClock
from rdr_service.dao.database_utils import format_datetime
from rdr_service.dao.genomics_dao import GenomicGcDataFileDao, GenomicGCValidationMetricsDao, GenomicIncidentDao, \
GenomicSetMemberDao, UserEventMetricsDao, GenomicJobRunDao, GenomicResultWithdrawalsDao, \
GenomicMemberReportStateDao, GenomicAppointmentEventMetricsDao, GenomicAppointmentEventDao, GenomicResultViewedDao, \
GenomicInformingLoopDao, GenomicAppointmentEventNotifiedDao, GenomicDefaultBaseDao
from rdr_service.dao.message_broker_dao import MessageBrokenEventDataDao
from rdr_service.genomic_enums import GenomicIncidentCode, GenomicJob, GenomicWorkflowState, GenomicSubProcessResult, \
GenomicSubProcessStatus, GenomicManifestTypes, GenomicQcStatus, GenomicReportState
from rdr_service.genomic.genomic_job_components import GenomicFileIngester
from rdr_service.genomic.genomic_job_controller import GenomicJobController
from rdr_service.model.genomics import GenomicGcDataFile, GenomicIncident, GenomicSetMember, GenomicGCValidationMetrics,\
GenomicGCROutreachEscalationNotified
from rdr_service.offline.genomics import genomic_pipeline, genomic_cvl_pipeline
from rdr_service.participant_enums import WithdrawalStatus
from tests import test_data
from tests.genomics_tests.test_genomic_utils import create_ingestion_test_file
from tests.helpers.unittest_base import BaseTestCase
class GenomicJobControllerTest(BaseTestCase):
def setUp(self):
super(GenomicJobControllerTest, self).setUp()
self.data_file_dao = GenomicGcDataFileDao()
self.event_data_dao = MessageBrokenEventDataDao()
self.incident_dao = GenomicIncidentDao()
self.member_dao = GenomicSetMemberDao()
self.metrics_dao = GenomicGCValidationMetricsDao()
self.user_event_metrics_dao = UserEventMetricsDao()
self.job_run_dao = GenomicJobRunDao()
self.report_state_dao = GenomicMemberReportStateDao()
self.appointment_event_dao = GenomicAppointmentEventDao()
self.appointment_metrics_dao = GenomicAppointmentEventMetricsDao()
def test_incident_with_long_message(self):
"""Make sure the length of incident messages doesn't cause issues when recording them"""
incident_message = "1" * (GenomicIncident.message.type.length + 20)
mock_slack_handler = mock.MagicMock()
job_controller = GenomicJobController(job_id=1)
job_controller.genomic_alert_slack = mock_slack_handler
job_controller.create_incident(message=incident_message, slack=True)
# Double check that the incident was saved successfully, with part of the message
incident: GenomicIncident = self.session.query(GenomicIncident).one()
self.assertTrue(incident_message.startswith(incident.message))
# Make sure Slack received the full message
mock_slack_handler.send_message_to_webhook.assert_called_with(
message_data={
'text': incident_message
}
)
def test_gvcf_files_ingestion(self):
job_controller = GenomicJobController(job_id=38)
bucket_name = "test_bucket"
file_path = "Wgs_sample_raw_data/SS_VCF_research/BCM_A100153482_21042005280_SIA0013441__1.hard-filtered.gvcf.gz"
file_path_md5 = "Wgs_sample_raw_data/SS_VCF_research/" \
"BCM_A100153482_21042005280_SIA0013441__1.hard-filtered.gvcf.gz.md5sum"
full_path = f'{bucket_name}/{file_path}'
full_path_md5 = f'{bucket_name}/{file_path_md5}'
gen_set = self.data_generator.create_database_genomic_set(
genomicSetName=".",
genomicSetCriteria=".",
genomicSetVersion=1
)
gen_member = self.data_generator.create_database_genomic_set_member(
genomicSetId=gen_set.id,
biobankId="100153482",
sampleId="21042005280",
genomeType="aou_wgs",
genomicWorkflowState=GenomicWorkflowState.AW1
)
gen_job_run = self.data_generator.create_database_genomic_job_run(
jobId=GenomicJob.AW1_MANIFEST,
startTime=clock.CLOCK.now(),
runResult=GenomicSubProcessResult.SUCCESS
)
gen_processed_file = self.data_generator.create_database_genomic_file_processed(
runId=gen_job_run.id,
startTime=clock.CLOCK.now(),
filePath='/test_file_path',
bucketName='test_bucket',
fileName='test_file_name',
)
self.data_generator.create_database_genomic_gc_validation_metrics(
genomicSetMemberId=gen_member.id,
genomicFileProcessedId=gen_processed_file.id
)
job_controller.ingest_data_files_into_gc_metrics(file_path_md5, bucket_name)
metrics = self.metrics_dao.get_metrics_by_member_id(gen_member.id)
self.assertIsNotNone(metrics.gvcfMd5Path)
self.assertEqual(metrics.gvcfMd5Path, full_path_md5)
job_controller.ingest_data_files_into_gc_metrics(file_path, bucket_name)
metrics = self.metrics_dao.get_metrics_by_member_id(gen_member.id)
self.assertIsNotNone(metrics.gvcfPath)
self.assertEqual(metrics.gvcfPath, full_path)
def test_gvcf_files_ingestion_create_incident(self):
bucket_name = "test_bucket"
file_path = "Wgs_sample_raw_data/SS_VCF_research/BCM_A100153482_21042005280_SIA0013441__1.hard-filtered.gvcf.gz"
gen_set = self.data_generator.create_database_genomic_set(
genomicSetName=".",
genomicSetCriteria=".",
genomicSetVersion=1
)
gen_member = self.data_generator.create_database_genomic_set_member(
genomicSetId=gen_set.id,
biobankId="111111111",
sampleId="222222222222",
genomeType="aou_wgs",
genomicWorkflowState=GenomicWorkflowState.AW1
)
gen_job_run = self.data_generator.create_database_genomic_job_run(
jobId=GenomicJob.AW1_MANIFEST,
startTime=clock.CLOCK.now(),
runResult=GenomicSubProcessResult.SUCCESS
)
gen_processed_file = self.data_generator.create_database_genomic_file_processed(
runId=gen_job_run.id,
startTime=clock.CLOCK.now(),
filePath='/test_file_path',
bucketName=bucket_name,
fileName='test_file_name',
)
self.data_generator.create_database_genomic_gc_validation_metrics(
genomicSetMemberId=gen_member.id,
genomicFileProcessedId=gen_processed_file.id
)
with GenomicJobController(GenomicJob.INGEST_DATA_FILES) as controller:
controller.ingest_data_files_into_gc_metrics(file_path, bucket_name)
incident = self.incident_dao.get(1)
self.assertIsNotNone(incident)
self.assertEqual(incident.code, GenomicIncidentCode.UNABLE_TO_FIND_METRIC.name)
self.assertEqual(incident.data_file_path, file_path)
self.assertEqual(incident.message, 'INGEST_DATA_FILES: Cannot find '
'genomics metric record for sample id: '
'21042005280')
def test_accession_data_files(self):
test_bucket_baylor = "fake-data-bucket-baylor"
test_idat_file = "fake-data-bucket-baylor/Genotyping_sample_raw_data/204027270091_R02C01_Grn.idat"
test_vcf_file = "fake-data-bucket-baylor/Genotyping_sample_raw_data/204027270091_R02C01.vcf.gz"
test_cram_file = "fake-data-bucket-baylor/Wgs_sample_raw_data/" \
"CRAMs_CRAIs/BCM_A100134256_21063006771_SIA0017196_1.cram"
test_files = [test_idat_file, test_vcf_file, test_cram_file]
test_time = datetime.datetime(2021, 7, 9, 14, 1, 1)
# run job controller method on each file
with clock.FakeClock(test_time):
for file_path in test_files:
with GenomicJobController(GenomicJob.ACCESSION_DATA_FILES) as controller:
controller.accession_data_files(file_path, test_bucket_baylor)
inserted_files = self.data_file_dao.get_all()
# idat
expected_idat = GenomicGcDataFile(
id=1,
created=test_time,
modified=test_time,
file_path=test_idat_file,
gc_site_id='jh',
bucket_name='fake-data-bucket-baylor',
file_prefix='Genotyping_sample_raw_data',
file_name='204027270091_R02C01_Grn.idat',
file_type='Grn.idat',
identifier_type='chipwellbarcode',
identifier_value='204027270091_R02C01',
ignore_flag=0,
)
# vcf
expected_vcf = GenomicGcDataFile(
id=2,
created=test_time,
modified=test_time,
file_path=test_vcf_file,
gc_site_id='jh',
bucket_name='fake-data-bucket-baylor',
file_prefix='Genotyping_sample_raw_data',
file_name='204027270091_R02C01.vcf.gz',
file_type='vcf.gz',
identifier_type='chipwellbarcode',
identifier_value='204027270091_R02C01',
ignore_flag=0,
)
# cram
expected_cram = GenomicGcDataFile(
id=3,
created=test_time,
modified=test_time,
file_path=test_cram_file,
gc_site_id='bcm',
bucket_name='fake-data-bucket-baylor',
file_prefix='Wgs_sample_raw_data/CRAMs_CRAIs',
file_name='BCM_A100134256_21063006771_SIA0017196_1.cram',
file_type='cram',
identifier_type='sample_id',
identifier_value='21063006771',
ignore_flag=0,
)
# obj mapping
expected_objs = {
0: expected_idat,
1: expected_vcf,
2: expected_cram
}
# verify test objects match expectations
for i in range(3):
self.assertEqual(expected_objs[i].bucket_name, inserted_files[i].bucket_name)
self.assertEqual(expected_objs[i].created, inserted_files[i].created)
self.assertEqual(expected_objs[i].file_name, inserted_files[i].file_name)
self.assertEqual(expected_objs[i].file_path, inserted_files[i].file_path)
self.assertEqual(expected_objs[i].file_prefix, inserted_files[i].file_prefix)
self.assertEqual(expected_objs[i].file_type, inserted_files[i].file_type)
self.assertEqual(expected_objs[i].gc_site_id, inserted_files[i].gc_site_id)
self.assertEqual(expected_objs[i].id, inserted_files[i].id)
self.assertEqual(expected_objs[i].identifier_type, inserted_files[i].identifier_type)
self.assertEqual(expected_objs[i].identifier_value, inserted_files[i].identifier_value)
self.assertEqual(expected_objs[i].ignore_flag, inserted_files[i].ignore_flag)
self.assertEqual(expected_objs[i].metadata, inserted_files[i].metadata)
self.assertEqual(expected_objs[i].modified, inserted_files[i].modified)
def test_updating_members_blocklists(self):
gen_set = self.data_generator.create_database_genomic_set(
genomicSetName=".",
genomicSetCriteria=".",
genomicSetVersion=1
)
ids_should_be_updated = []
# for just created and wf state query and MATCHES criteria
for i in range(4):
ids_should_be_updated.append(
self.data_generator.create_database_genomic_set_member(
genomicSetId=gen_set.id,
biobankId="100153482",
sampleId="21042005280",
genomeType='test_investigation_one' if i & 2 != 0 else 'aou_wgs',
genomicWorkflowState=GenomicWorkflowState.AW0,
ai_an='Y' if i & 2 == 0 else 'N'
).id
)
# for just created and wf state query and DOES NOT MATCH criteria
for i in range(2):
self.data_generator.create_database_genomic_set_member(
genomicSetId=gen_set.id,
biobankId="100153482",
sampleId="21042005280",
genomeType='aou_array',
genomicWorkflowState=GenomicWorkflowState.AW0,
ai_an='N'
)
with GenomicJobController(GenomicJob.UPDATE_MEMBERS_BLOCKLISTS) as controller:
controller.update_members_blocklists()
# current config json in base_config.json
created_members = self.member_dao.get_all()
blocklisted = list(filter(lambda x: x.blockResults == 1 or x.blockResearch == 1, created_members))
self.assertTrue(ids_should_be_updated.sort() == [obj.id for obj in blocklisted].sort())
# should be RESEARCH blocked
self.assertTrue(all(
obj.blockResearch == 1 and obj.blockResearchReason is not None and obj.blockResearchReason == 'aian'
for obj in created_members if obj.ai_an == 'Y' and obj.genomicWorkflowState == GenomicWorkflowState.AW0)
)
# should NOT be RESULTS blocked
self.assertTrue(all(
obj.blockResults == 0 and obj.blockResultsReason is None
for obj in created_members if obj.ai_an == 'Y' and obj.genomicWorkflowState == GenomicWorkflowState.AW0)
)
# should be RESEARCH blocked
self.assertTrue(all(
obj.blockResearch == 1 and obj.blockResearchReason is not None and obj.blockResearchReason == 'test_sample_swap'
for obj in created_members if obj.genomeType == 'test_investigation_one' and obj.genomicWorkflowState ==
GenomicWorkflowState.AW0)
)
# should be RESULTS blocked
self.assertTrue(all(
obj.blockResults == 1 and obj.blockResultsReason is not None and obj.blockResultsReason == 'test_sample_swap'
for obj in created_members if obj.genomeType == 'test_investigation_one' and obj.genomicWorkflowState ==
GenomicWorkflowState.AW0)
)
# should NOT be RESEARCH/RESULTS blocked
self.assertTrue(all(
obj.blockResearch == 0 and obj.blockResearchReason is None
for obj in created_members if obj.genomeType == 'aou_array' and obj.genomicWorkflowState ==
GenomicWorkflowState.AW0)
)
self.assertTrue(all(
obj.blockResults == 0 and obj.blockResultsReason is None
for obj in created_members if obj.genomeType == 'aou_array' and obj.genomicWorkflowState ==
GenomicWorkflowState.AW0)
)
# clear current set member records
with self.member_dao.session() as session:
session.query(GenomicSetMember).delete()
run_result = self.job_run_dao.get(1)
self.assertEqual(run_result.runStatus, GenomicSubProcessStatus.COMPLETED)
self.assertEqual(run_result.runResult, GenomicSubProcessResult.SUCCESS)
# for modified data query and MATCHES criteria
for i in range(4):
self.data_generator.create_database_genomic_set_member(
genomicSetId=gen_set.id,
biobankId="100153482",
sampleId="21042005280",
genomeType='test_investigation_one' if i & 2 != 0 else 'aou_wgs',
genomicWorkflowState=GenomicWorkflowState.AW1,
ai_an='Y' if i & 2 == 0 else 'N'
)
with GenomicJobController(GenomicJob.UPDATE_MEMBERS_BLOCKLISTS) as controller:
controller.update_members_blocklists()
modified_members = self.member_dao.get_all()
# should be RESEARCH blocked
self.assertTrue(all(
obj.blockResearch == 1 and obj.blockResearchReason is not None and obj.blockResearchReason == 'aian'
for obj in modified_members if obj.ai_an == 'Y' and obj.genomicWorkflowState == GenomicWorkflowState.AW1)
)
# should NOT be RESULTS blocked
self.assertTrue(all(
obj.blockResults == 0 and obj.blockResultsReason is None
for obj in modified_members if obj.ai_an == 'Y' and obj.genomicWorkflowState == GenomicWorkflowState.AW1)
)
# should be RESEARCH blocked
self.assertTrue(all(
obj.blockResearch == 1 and obj.blockResearchReason is not None and obj.blockResearchReason == 'test_sample_swap'
for obj in modified_members if obj.genomeType == 'test_investigation_one' and obj.genomicWorkflowState ==
GenomicWorkflowState.AW1)
)
# should be RESULTS blocked
self.assertTrue(all(
obj.blockResults == 1 and obj.blockResultsReason is not None and obj.blockResultsReason == 'test_sample_swap'
for obj in modified_members if obj.genomeType == 'test_investigation_one' and obj.genomicWorkflowState ==
GenomicWorkflowState.AW1)
)
run_result = self.job_run_dao.get(2)
self.assertEqual(run_result.runStatus, GenomicSubProcessStatus.COMPLETED)
self.assertEqual(run_result.runResult, GenomicSubProcessResult.SUCCESS)
def test_ingest_user_metrics_file(self):
test_file = 'Genomic-Metrics-File-User-Events-Test.csv'
bucket_name = 'test_bucket'
sub_folder = 'user_events'
pids = []
file_ingester = GenomicFileIngester()
for _ in range(2):
pid = self.data_generator.create_database_participant()
pids.append(pid.participantId)
test_metrics_file = create_ingestion_test_file(
test_file,
bucket_name,
sub_folder)
test_file_path = f'{bucket_name}/{sub_folder}/{test_metrics_file}'
with open_cloud_file(test_file_path) as csv_file:
metrics_to_ingest = file_ingester._read_data_to_ingest(csv_file)
with GenomicJobController(GenomicJob.METRICS_FILE_INGEST) as controller:
controller.ingest_metrics_file(
metric_type='user_events',
file_path=test_file_path,
)
job_run_id = controller.job_run.id
metrics = self.user_event_metrics_dao.get_all()
for pid in pids:
file_metrics = list(filter(lambda x: int(x['participant_id'].split('P')[-1]) == pid, metrics_to_ingest[
'rows']))
participant_ingested_metrics = list(filter(lambda x: x.participant_id == pid, metrics))
self.assertEqual(len(file_metrics), len(participant_ingested_metrics))
self.assertTrue(all(obj.run_id == job_run_id for obj in participant_ingested_metrics))
@mock.patch('rdr_service.genomic.genomic_job_controller.GenomicJobController.execute_cloud_task')
def test_reconcile_pdr_data(self, mock_cloud_task):
# init new job run in __enter__
with GenomicJobController(GenomicJob.RECONCILE_PDR_DATA) as controller:
controller.reconcile_pdr_data()
cloud_task_endpoint = 'rebuild_genomic_table_records_task'
first_run = self.job_run_dao.get_all()
self.assertEqual(mock_cloud_task.call_count, 1)
call_args = mock_cloud_task.call_args_list
self.assertEqual(len(call_args), 1)
self.assertEqual(call_args[0].args[0]['table'], self.job_run_dao.model_type.__tablename__)
self.assertTrue(type(call_args[0].args[0]['ids']) is list)
self.assertEqual(call_args[0].args[0]['ids'], [obj.id for obj in first_run])
self.assertEqual(call_args[0].args[1], cloud_task_endpoint)
participant = self.data_generator.create_database_participant()
gen_set = self.data_generator.create_database_genomic_set(
genomicSetName=".",
genomicSetCriteria=".",
genomicSetVersion=1
)
plus_ten = clock.CLOCK.now() + datetime.timedelta(minutes=10)
plus_ten = plus_ten.replace(microsecond=0)
with FakeClock(plus_ten):
for i in range(2):
gen_member = self.data_generator.create_database_genomic_set_member(
genomicSetId=gen_set.id,
biobankId="100153482",
sampleId="21042005280",
genomeType="aou_wgs",
genomicWorkflowState=GenomicWorkflowState.AW1
)
gen_processed_file = self.data_generator.create_database_genomic_file_processed(
runId=first_run[0].id,
startTime=clock.CLOCK.now(),
filePath=f'test_file_path_{i}',
bucketName='test_bucket',
fileName='test_file_name',
)
self.data_generator.create_database_genomic_gc_validation_metrics(
genomicSetMemberId=gen_member.id,
genomicFileProcessedId=gen_processed_file.id
)
manifest = self.data_generator.create_database_genomic_manifest_file(
manifestTypeId=2,
filePath=f'test_file_path_{i}'
)
self.data_generator.create_database_genomic_manifest_feedback(
inputManifestFileId=manifest.id,
feedbackRecordCount=2
)
self.data_generator.create_database_genomic_user_event_metrics(
participant_id=participant.participantId,
event_name='test_event',
run_id=1,
)
self.data_generator.create_database_genomic_informing_loop(
message_record_id=1,
event_type='informing_loop_decision',
module_type='gem',
participant_id=participant.participantId,
decision_value='maybe_later',
event_authored_time=clock.CLOCK.now()
)
self.data_generator.create_database_genomic_cvl_past_due(
cvl_site_id='co',
email_notification_sent=0,
sample_id='sample_test',
results_type='hdr',
genomic_set_member_id=gen_member.id
)
self.data_generator.create_database_genomic_appointment(
message_record_id=i,
appointment_id=i,
event_type='appointment_scheduled',
module_type='hdr',
participant_id=participant.participantId,
event_authored_time=clock.CLOCK.now(),
source='Color',
appointment_timestamp=format_datetime(clock.CLOCK.now()),
appointment_timezone='America/Los_Angeles',
location='123 address st',
contact_number='17348675309',
language='en'
)
self.data_generator.create_database_genomic_member_report_state(
genomic_set_member_id=gen_member.id,
participant_id=participant.participantId,
module='gem',
genomic_report_state=GenomicReportState.GEM_RPT_READY,
event_authored_time=clock.CLOCK.now()
)
self.data_generator.create_genomic_result_viewed(
participant_id=participant.participantId,
event_type='result_viewed',
event_authored_time=clock.CLOCK.now(),
module_type='gem',
sample_id=gen_member.sampleId
)
# gets new records that were created with last job run from above
with GenomicJobController(GenomicJob.RECONCILE_PDR_DATA) as controller:
controller.reconcile_pdr_data()
affected_tables = [
'genomic_set',
'genomic_set_member',
'genomic_job_run',
'genomic_file_processed',
'genomic_gc_validation_metrics',
'genomic_manifest_file',
'genomic_manifest_feedback',
'genomic_informing_loop',
'genomic_cvl_results_past_due',
'user_event_metrics',
'genomic_member_report_state',
'genomic_result_viewed',
'genomic_appointment_event'
]
num_calls = len(affected_tables) + 1
self.assertEqual(mock_cloud_task.call_count, num_calls)
call_args = mock_cloud_task.call_args_list
self.assertEqual(len(call_args), num_calls)
mock_tables = set([obj[0][0]['table'] for obj in call_args])
mock_endpoint = [obj[0][1] for obj in call_args]
self.assertTrue([mock_tables].sort() == affected_tables.sort())
self.assertTrue(all(obj for obj in mock_endpoint if obj == cloud_task_endpoint))
@mock.patch('rdr_service.genomic.genomic_job_controller.GenomicJobController.execute_cloud_task')
def test_retry_manifest_ingestions_if_deltas(self, mock_cloud_task):
bucket_name = "test-bucket"
aw1_file_name = "AW1_wgs_sample_manifests/RDR_AoU_SEQ_PKG-2104-026571.csv"
aw1_manifest_path = f"{bucket_name}/{aw1_file_name}"
aw2_file_name = "AW2_wgs_data_manifests/RDR_AoU_SEQ_DataManifest_04092021.csv"
aw2_manifest_path = f"{bucket_name}/{aw2_file_name}"
gen_set = self.data_generator.create_database_genomic_set(
genomicSetName=".",
genomicSetCriteria=".",
genomicSetVersion=1
)
# Create AW1 job_run
aw1_job_run = self.data_generator.create_database_genomic_job_run(
jobId=GenomicJob.AW1_MANIFEST,
startTime=clock.CLOCK.now(),
endTime=clock.CLOCK.now(),
runResult=GenomicSubProcessResult.SUCCESS
)
# Create AW2 job_run
aw2_job_run = self.data_generator.create_database_genomic_job_run(
jobId=GenomicJob.METRICS_INGESTION,
startTime=clock.CLOCK.now(),
endTime=clock.CLOCK.now(),
runResult=GenomicSubProcessResult.SUCCESS
)
# should have no data
with GenomicJobController(GenomicJob.RETRY_MANIFEST_INGESTIONS) as controller:
controller.retry_manifest_ingestions()
job_run = self.job_run_dao.get(3)
self.assertEqual(job_run.jobId, GenomicJob.RETRY_MANIFEST_INGESTIONS)
self.assertEqual(job_run.runStatus, GenomicSubProcessStatus.COMPLETED)
self.assertEqual(job_run.runResult, GenomicSubProcessResult.NO_FILES)
self.assertEqual(mock_cloud_task.call_count, 0)
self.assertFalse(mock_cloud_task.call_count)
# Create genomic_aw1_raw record
self.data_generator.create_database_genomic_aw1_raw(
file_path=aw1_manifest_path,
package_id="PKG-2104-026571",
biobank_id="A10001",
)
# Create genomic_aw2_raw record
self.data_generator.create_database_genomic_aw2_raw(
file_path=aw2_manifest_path,
biobank_id="A10001",
sample_id="100001",
biobankidsampleid="A10001_100001",
)
# Create AW1 genomic_manifest_file record
aw1_manifest_file = self.data_generator.create_database_genomic_manifest_file(
created=clock.CLOCK.now(),
modified=clock.CLOCK.now(),
uploadDate=clock.CLOCK.now(),
manifestTypeId=GenomicManifestTypes.AW1,
filePath=aw1_manifest_path,
fileName=aw1_file_name,
bucketName=bucket_name,
recordCount=1,
rdrProcessingComplete=1,
rdrProcessingCompleteDate=clock.CLOCK.now(),
)
# Create AW2 genomic_manifest_file record
aw2_manifest_file = self.data_generator.create_database_genomic_manifest_file(
created=clock.CLOCK.now(),
modified=clock.CLOCK.now(),
uploadDate=clock.CLOCK.now(),
manifestTypeId=GenomicManifestTypes.AW2,
filePath=aw2_manifest_path,
fileName=aw2_file_name,
bucketName=bucket_name,
recordCount=1,
rdrProcessingComplete=1,
rdrProcessingCompleteDate=clock.CLOCK.now(),
)
# Create AW1 file_processed
aw1_file_processed = self.data_generator.create_database_genomic_file_processed(
runId=aw1_job_run.id,
startTime=clock.CLOCK.now(),
genomicManifestFileId=aw1_manifest_file.id,
filePath=f"/{aw1_manifest_path}",
bucketName=bucket_name,
fileName=aw1_file_name,
)
# Create AW2 file_processed
aw2_file_processed = self.data_generator.create_database_genomic_file_processed(
runId=aw2_job_run.id,
startTime=clock.CLOCK.now(),
genomicManifestFileId=aw2_manifest_file.id,
filePath=f"/{aw2_manifest_path}",
bucketName=bucket_name,
fileName=aw2_file_name,
)
# genomic_set_member for AW1
gen_member = self.data_generator.create_database_genomic_set_member(
genomicSetId=gen_set.id,
biobankId="100153482",
sampleId="21042005280",
genomeType="aou_wgs",
genomicWorkflowState=GenomicWorkflowState.AW1,
aw1FileProcessedId=aw1_file_processed.id
)
# genomic_gc_validation_metrics for AW1
self.data_generator.create_database_genomic_gc_validation_metrics(
genomicSetMemberId=gen_member.id,
genomicFileProcessedId=aw2_file_processed.id
)
# one AW1/AW2 with no deltas
with GenomicJobController(GenomicJob.RETRY_MANIFEST_INGESTIONS) as controller:
controller.retry_manifest_ingestions()
job_run = self.job_run_dao.get(4)
self.assertEqual(job_run.jobId, GenomicJob.RETRY_MANIFEST_INGESTIONS)
self.assertEqual(job_run.runStatus, GenomicSubProcessStatus.COMPLETED)
self.assertEqual(job_run.runResult, GenomicSubProcessResult.NO_FILES)
self.assertEqual(mock_cloud_task.call_count, 0)
self.assertFalse(mock_cloud_task.call_count)
# empty tables resulting in deltas and cloud task calls
with self.member_dao.session() as session:
session.query(GenomicGCValidationMetrics).delete()
session.query(GenomicSetMember).delete()
with GenomicJobController(GenomicJob.RETRY_MANIFEST_INGESTIONS) as controller:
controller.retry_manifest_ingestions()
job_run = self.job_run_dao.get(5)
self.assertEqual(job_run.jobId, GenomicJob.RETRY_MANIFEST_INGESTIONS)
self.assertEqual(job_run.runStatus, GenomicSubProcessStatus.COMPLETED)
self.assertEqual(job_run.runResult, GenomicSubProcessResult.SUCCESS)
# one AW1/AW2 with deltas
self.assertEqual(mock_cloud_task.call_count, 2)
self.assertTrue(mock_cloud_task.call_count)
call_args = mock_cloud_task.call_args_list
self.assertEqual(len(call_args), 2)
cloud_task_endpoint = ['ingest_aw1_manifest_task', 'ingest_aw2_manifest_task']
mock_endpoint = [obj[0][1] for obj in call_args]
self.assertTrue(all(obj for obj in mock_endpoint if obj == cloud_task_endpoint))
mock_buckets = set([obj[0][0]['bucket_name'] for obj in call_args])
self.assertTrue(len(mock_buckets), 1)
self.assertTrue(list(mock_buckets)[0] == bucket_name)
def test_calculate_informing_loop_ready_flags(self):
num_participants = 4
gen_set = self.data_generator.create_database_genomic_set(
genomicSetName=".",
genomicSetCriteria=".",
genomicSetVersion=1
)
for num in range(num_participants):
plus_num = clock.CLOCK.now() + datetime.timedelta(minutes=num)
plus_num = plus_num.replace(microsecond=0)
with FakeClock(plus_num):
summary = self.data_generator.create_database_participant_summary(
consentForStudyEnrollment=1,
consentForGenomicsROR=1
)
stored_sample = self.data_generator.create_database_biobank_stored_sample(
biobankId=summary.biobankId,
biobankOrderIdentifier=self.fake.pyint()
)
collection_site = self.data_generator.create_database_site(
siteType='Clinic'
)
order = self.data_generator.create_database_biobank_order(
collectedSiteId=collection_site.siteId,
participantId=summary.participantId,
finalizedTime=plus_num
)
self.data_generator.create_database_biobank_order_identifier(
value=stored_sample.biobankOrderIdentifier,
biobankOrderId=order.biobankOrderId,
system="1",
)
self.data_generator.create_database_biobank_order_identifier(
value=stored_sample.biobankOrderIdentifier,
biobankOrderId=order.biobankOrderId,
system="2",
)
member = self.data_generator.create_database_genomic_set_member(
genomicSetId=gen_set.id,
participantId=summary.participantId,
genomeType=config.GENOME_TYPE_WGS,
qcStatus=GenomicQcStatus.PASS,
gcManifestSampleSource='Whole Blood',
collectionTubeId=stored_sample.biobankStoredSampleId
)
self.data_generator.create_database_genomic_gc_validation_metrics(
genomicSetMemberId=member.id,
sexConcordance='True',
drcFpConcordance='Pass',
drcSexConcordance='Pass',
processingStatus='Pass'
)
members_for_ready_loop = self.member_dao.get_members_for_informing_loop_ready()
self.assertEqual(len(members_for_ready_loop), num_participants)
current_set_members = self.member_dao.get_all()
self.assertTrue(all(obj.informingLoopReadyFlag == 0 for obj in current_set_members))
self.assertTrue(all(obj.informingLoopReadyFlagModified is None for obj in current_set_members))
with GenomicJobController(GenomicJob.CALCULATE_INFORMING_LOOP_READY) as controller:
controller.calculate_informing_loop_ready_flags()
# no config object, controller method should return
members_for_ready_loop = self.member_dao.get_members_for_informing_loop_ready()
self.assertEqual(len(members_for_ready_loop), num_participants)
calculation_limit = 2
config.override_setting(config.CALCULATE_READY_FLAG_LIMIT, [calculation_limit])
with GenomicJobController(GenomicJob.CALCULATE_INFORMING_LOOP_READY) as controller:
controller.calculate_informing_loop_ready_flags()
current_set_members = self.member_dao.get_all()
self.assertTrue(any(obj.informingLoopReadyFlag == 1 for obj in current_set_members))
self.assertTrue(any(obj.informingLoopReadyFlagModified is not None for obj in current_set_members))
current_loops_set = [obj for obj in current_set_members if obj.informingLoopReadyFlag == 1
and obj.informingLoopReadyFlagModified is not None]
self.assertEqual(len(current_loops_set), calculation_limit)
members_for_ready_loop = self.member_dao.get_members_for_informing_loop_ready()
self.assertEqual(len(members_for_ready_loop), num_participants // 2)
with GenomicJobController(GenomicJob.CALCULATE_INFORMING_LOOP_READY) as controller:
controller.calculate_informing_loop_ready_flags()
current_set_members = self.member_dao.get_all()
self.assertTrue(all(obj.informingLoopReadyFlag == 1 for obj in current_set_members))
self.assertTrue(all(obj.informingLoopReadyFlagModified is not None for obj in current_set_members))
members_for_ready_loop = self.member_dao.get_members_for_informing_loop_ready()
self.assertEqual(len(members_for_ready_loop), 0)
@mock.patch('rdr_service.services.email_service.EmailService.send_email')
def test_getting_results_withdrawn(self, email_mock):
num_participants = 4
result_withdrawal_dao = GenomicResultWithdrawalsDao()
gen_set = self.data_generator.create_database_genomic_set(
genomicSetName=".",
genomicSetCriteria=".",
genomicSetVersion=1
)
gen_job_run = self.data_generator.create_database_genomic_job_run(
jobId=GenomicJob.AW1_MANIFEST,
startTime=clock.CLOCK.now(),
runResult=GenomicSubProcessResult.SUCCESS
)
pids = []
for num in range(num_participants):
summary = self.data_generator.create_database_participant_summary(
consentForStudyEnrollment=1,
consentForGenomicsROR=1,
withdrawalStatus=WithdrawalStatus.EARLY_OUT
)
self.data_generator.create_database_genomic_set_member(
genomicSetId=gen_set.id,
participantId=summary.participantId,
genomeType=config.GENOME_TYPE_ARRAY,
gemA1ManifestJobRunId=gen_job_run.id if num % 2 == 0 else None
)
self.data_generator.create_database_genomic_set_member(
genomicSetId=gen_set.id,
participantId=summary.participantId,
genomeType=config.GENOME_TYPE_WGS,
cvlW1ilHdrJobRunId=gen_job_run.id
)
pids.append(summary.participantId)
config.override_setting(config.RDR_GENOMICS_NOTIFICATION_EMAIL, '[email protected]')
with GenomicJobController(GenomicJob.RESULTS_PIPELINE_WITHDRAWALS) as controller:
controller.check_results_withdrawals()
# mock checks should be two => 1 GEM 1 HEALTH
self.assertEqual(email_mock.call_count, 2)
call_args = email_mock.call_args_list
self.assertTrue(any('GEM' in call.args[0].subject for call in call_args))
self.assertTrue(any('HEALTH' in call.args[0].subject for call in call_args))
job_runs = self.job_run_dao.get_all()
current_job_run = list(filter(lambda x: x.jobId == GenomicJob.RESULTS_PIPELINE_WITHDRAWALS, job_runs))[0]
self.assertTrue(current_job_run.runResult == GenomicSubProcessResult.SUCCESS)
all_withdrawal_records = result_withdrawal_dao.get_all()
self.assertTrue(len(all_withdrawal_records) == len(pids))
self.assertTrue(all(obj.participant_id in pids for obj in all_withdrawal_records))
array_results = list(filter(lambda x: x.array_results == 1, all_withdrawal_records))
# should only be 2
self.assertTrue(len(array_results), 2)
cvl_results = list(filter(lambda x: x.cvl_results == 1, all_withdrawal_records))
# should be 4 for num of participants
self.assertTrue(len(cvl_results), num_participants)
with GenomicJobController(GenomicJob.RESULTS_PIPELINE_WITHDRAWALS) as controller:
controller.check_results_withdrawals()
# mock checks should still be two on account of no records
self.assertEqual(email_mock.call_count, 2)
job_runs = self.job_run_dao.get_all()
current_job_run = list(filter(lambda x: x.jobId == GenomicJob.RESULTS_PIPELINE_WITHDRAWALS, job_runs))[1]
self.assertTrue(current_job_run.runResult == GenomicSubProcessResult.NO_RESULTS)
def test_gem_results_to_report_state(self):
num_participants = 8
gen_set = self.data_generator.create_database_genomic_set(
genomicSetName=".",
genomicSetCriteria=".",
genomicSetVersion=1
)
gem_a2_job_run = self.data_generator.create_database_genomic_job_run(
jobId=GenomicJob.GEM_A2_MANIFEST,
startTime=clock.CLOCK.now(),
runResult=GenomicSubProcessResult.SUCCESS
)
pids_to_update, member_ids = [], []
for num in range(num_participants):
summary = self.data_generator.create_database_participant_summary(
consentForStudyEnrollment=1,
consentForGenomicsROR=1,
withdrawalStatus=WithdrawalStatus.EARLY_OUT
)
member = self.data_generator.create_database_genomic_set_member(
genomicSetId=gen_set.id,
participantId=summary.participantId,
genomeType=config.GENOME_TYPE_ARRAY
)
if num % 2 == 0:
member_ids.append(member.id)
pids_to_update.append(summary.participantId)
with GenomicJobController(GenomicJob.GEM_RESULT_REPORTS) as controller:
controller.gem_results_to_report_state()
current_job_runs = self.job_run_dao.get_all()
self.assertEqual(len(current_job_runs), 2)
current_job_run = list(filter(lambda x: x.jobId == GenomicJob.GEM_RESULT_REPORTS, current_job_runs))[0]
self.assertTrue(current_job_run.runResult == GenomicSubProcessResult.NO_RESULTS)
current_members = self.member_dao.get_all()
# 4 members updated correctly should return
for member in current_members:
if member.participantId in pids_to_update:
member.gemA2ManifestJobRunId = gem_a2_job_run.id
member.genomicWorkflowState = GenomicWorkflowState.GEM_RPT_READY
self.member_dao.update(member)
with GenomicJobController(GenomicJob.GEM_RESULT_REPORTS) as controller:
controller.gem_results_to_report_state()
current_job_runs = self.job_run_dao.get_all()
self.assertEqual(len(current_job_runs), 3)
current_job_run = list(filter(lambda x: x.jobId == GenomicJob.GEM_RESULT_REPORTS, current_job_runs))[1]
self.assertTrue(current_job_run.runResult == GenomicSubProcessResult.SUCCESS)
current_gem_report_states = self.report_state_dao.get_all()
self.assertEqual(len(current_gem_report_states), len(pids_to_update))
self.assertTrue(all(obj.event_type == 'result_ready' for obj in current_gem_report_states))
self.assertTrue(all(obj.event_authored_time is not None for obj in current_gem_report_states))
self.assertTrue(all(obj.module == 'gem' for obj in current_gem_report_states))
self.assertTrue(
all(obj.genomic_report_state == GenomicReportState.GEM_RPT_READY for obj in current_gem_report_states)
)
self.assertTrue(
all(obj.genomic_report_state_str == GenomicReportState.GEM_RPT_READY.name for obj in
current_gem_report_states)
)
self.assertTrue(
all(obj.genomic_set_member_id in member_ids for obj in
current_gem_report_states)
)
# 4 members inserted already should not return
with GenomicJobController(GenomicJob.GEM_RESULT_REPORTS) as controller:
controller.gem_results_to_report_state()
current_job_runs = self.job_run_dao.get_all()
self.assertEqual(len(current_job_runs), 4)
current_job_run = list(filter(lambda x: x.jobId == GenomicJob.GEM_RESULT_REPORTS, current_job_runs))[2]
self.assertTrue(current_job_run.runResult == GenomicSubProcessResult.NO_RESULTS)
self.clear_table_after_test('genomic_member_report_state')
def test_reconcile_informing_loop(self):
event_dao = UserEventMetricsDao()
event_dao.truncate() # for test suite
il_dao = GenomicInformingLoopDao()
for pid in range(8):
self.data_generator.create_database_participant(participantId=1 + pid, biobankId=1 + pid)
# Set up initial job run ID
self.data_generator.create_database_genomic_job_run(
jobId=GenomicJob.METRICS_FILE_INGEST,
startTime=clock.CLOCK.now()
)
# create genomic set
self.data_generator.create_database_genomic_set(
genomicSetName='test',
genomicSetCriteria='.',
genomicSetVersion=1
)
# insert set members
for b in ["aou_array", "aou_wgs"]:
for i in range(1, 9):
self.data_generator.create_database_genomic_set_member(
participantId=i,
genomicSetId=1,
biobankId=i,
collectionTubeId=100 + i,
sampleId=10 + i,
genomeType=b,
)
# Set up ingested metrics data
events = ['gem.informing_loop.started',
'gem.informing_loop.screen8_no',
'gem.informing_loop.screen8_yes',
'hdr.informing_loop.started',
'gem.informing_loop.screen3',
'pgx.informing_loop.screen8_no',
'hdr.informing_loop.screen10_no']
for p in range(4):
for i in range(len(events)):
self.data_generator.create_database_genomic_user_event_metrics(
created=clock.CLOCK.now(),
modified=clock.CLOCK.now(),
participant_id=p + 1,
created_at=datetime.datetime(2021, 12, 29, 00) + datetime.timedelta(hours=i),
event_name=events[i],
run_id=1,
ignore_flag=0,
)
# Set up informing loop from message broker records
decisions = [None, 'no', 'yes']
for p in range(3):
for i in range(2):
self.data_generator.create_database_genomic_informing_loop(
message_record_id=i,
event_type='informing_loop_started' if i == 0 else 'informing_loop_decision',
module_type='gem',
participant_id=p + 1,
decision_value=decisions[i],
sample_id=100 + p,
event_authored_time=datetime.datetime(2021, 12, 29, 00) + datetime.timedelta(hours=i)
)
# Test for no message but yes user event
self.data_generator.create_database_genomic_user_event_metrics(
created=clock.CLOCK.now(),
modified=clock.CLOCK.now(),
participant_id=6,
created_at=datetime.datetime(2021, 12, 29, 00),
event_name='gem.informing_loop.screen8_yes',
run_id=1,
ignore_flag=0,
)
# Run reconcile job
genomic_pipeline.reconcile_informing_loop_responses()
# Test mismatched GEM data ingested correctly
pid_list = [1, 2, 3, 6]
new_il_values = il_dao.get_latest_il_for_pids(
pid_list=pid_list,
module="gem"
)
for value in new_il_values:
self.assertEqual("yes", value.decision_value)
pid_list = [1, 2, 3, 4]
for module in ["hdr", "pgx"]:
new_il_values = il_dao.get_latest_il_for_pids(
pid_list=pid_list,
module=module
)
for value in new_il_values:
self.assertEqual("no", value.decision_value)
self.assertIsNotNone(value.created_from_metric_id)
def test_reconcile_message_broker_results_ready(self):
# Create Test Participants' data
# create genomic set
self.data_generator.create_database_genomic_set(
genomicSetName='test',
genomicSetCriteria='.',
genomicSetVersion=1
)
# Set up initial job run ID
self.data_generator.create_database_genomic_job_run(
jobId=GenomicJob.METRICS_FILE_INGEST,
startTime=clock.CLOCK.now()
)
for pid in range(7):
self.data_generator.create_database_participant(participantId=1 + pid, biobankId=1 + pid)
# insert set members and event metrics records
for i in range(1, 6):
self.data_generator.create_database_genomic_set_member(
participantId=i,
genomicSetId=1,
biobankId=i,
collectionTubeId=100 + i,
sampleId=10 + i,
genomeType="aou_wgs",
)
# 3 PGX records
if i < 4:
self.data_generator.create_database_genomic_user_event_metrics(
participant_id=i,
created_at=datetime.datetime(2022, 10, 6, 00),
event_name="pgx.result_ready",
run_id=1,
)
# 1 HDR Positive
if i == 4:
self.data_generator.create_database_genomic_user_event_metrics(
participant_id=i,
created_at=datetime.datetime(2022, 10, 6, 00),
event_name="hdr.result_ready.informative",
run_id=1,
)
# 1 HDR uninformative
if i == 5:
self.data_generator.create_database_genomic_user_event_metrics(
participant_id=i,
created_at=datetime.datetime(2022, 10, 6, 00),
event_name="hdr.result_ready.uninformative",
run_id=1,
)
# Run job
genomic_cvl_pipeline.reconcile_message_broker_results_ready()
# Test correct data inserted
report_state_dao = GenomicMemberReportStateDao()
states = report_state_dao.get_all()
self.assertEqual(5, len(states))
pgx_records = [rec for rec in states if rec.module == "pgx_v1"]
hdr_record_uninf = [rec for rec in states
if rec.genomic_report_state == GenomicReportState.HDR_RPT_UNINFORMATIVE][0]
hdr_record_pos = [rec for rec in states
if rec.genomic_report_state == GenomicReportState.HDR_RPT_POSITIVE][0]
for pgx_record in pgx_records:
self.assertEqual(GenomicReportState.PGX_RPT_READY, pgx_record.genomic_report_state)
self.assertEqual("PGX_RPT_READY", pgx_record.genomic_report_state_str)
self.assertEqual(int(pgx_record.sample_id), pgx_record.participant_id + 10)
self.assertEqual("result_ready", pgx_record.event_type)
self.assertEqual(datetime.datetime(2022, 10, 6, 00), pgx_record.event_authored_time)
self.assertIsNotNone(pgx_record.created_from_metric_id)
self.assertEqual("HDR_RPT_UNINFORMATIVE", hdr_record_uninf.genomic_report_state_str)
self.assertEqual(int(hdr_record_uninf.sample_id), hdr_record_uninf.participant_id + 10)
self.assertEqual("result_ready", hdr_record_uninf.event_type)
self.assertEqual(datetime.datetime(2022, 10, 6, 00), hdr_record_uninf.event_authored_time)
self.assertIsNotNone(hdr_record_uninf.created_from_metric_id)
self.assertEqual("
| 0 |
bd179fda18551d4f3d8a4d695a9da38ee607ef1d
|
Python
|
HDR_RPT_POSITIVE", hdr_record_pos.genomic_report_state_str)
self.assertEqual(int(hdr_record_pos.sample_id), hdr_record_pos.participant_id + 10)
self.assertEqual("result_ready", hdr_record_pos.event_type)
self.assertEqual(datetime.datetime(2022, 10, 6, 00), hdr_record_pos.event_authored_time)
self.assertIsNotNone(hdr_record_pos.created_from_metric_id)
def test_reconcile_message_broker_results_viewed(self):
# Create Test Participants' data
# create genomic set
self.data_generator.create_database_genomic_set(
genomicSetName='test',
genomicSetCriteria='.',
genomicSetVersion=1
)
# Set up initial job run ID
self.data_generator.create_database_genomic_job_run(
jobId=GenomicJob.METRICS_FILE_INGEST,
startTime=clock.CLOCK.now()
)
for pid in range(3):
self.data_generator.create_database_participant(participantId=1 + pid, biobankId=1 + pid)
# insert set members and event metrics records
for i in range(1, 3):
self.data_generator.create_database_genomic_set_member(
participantId=i,
genomicSetId=1,
biobankId=i,
collectionTubeId=100 + i,
sampleId=10 + i,
genomeType="aou_wgs",
)
# 1 PGX Viewed
if i == 1:
self.data_generator.create_database_genomic_user_event_metrics(
participant_id=i,
created_at=datetime.datetime(2022, 10, 6, 00),
event_name="pgx.opened_at",
run_id=1,
)
# 1 HDR Viewed
if i == 2:
self.data_generator.create_database_genomic_user_event_metrics(
participant_id=i,
created_at=datetime.datetime(2022, 10, 6, 00),
event_name="hdr.opened_at",
run_id=1,
)
genomic_cvl_pipeline.reconcile_message_broker_results_viewed()
# Test correct data inserted
result_viewed_dao = GenomicResultViewedDao()
results = result_viewed_dao.get_all()
self.assertEqual(2, len(results))
for record in results:
if record.participant_id == 1:
self.assertEqual("pgx_v1", record.module_type)
else:
self.assertEqual("hdr_v1", record.module_type)
self.assertEqual(int(record.sample_id), record.participant_id + 10)
self.assertEqual("result_viewed", record.event_type)
self.assertEqual(datetime.datetime(2022, 10, 6, 00), record.first_viewed)
self.assertIsNotNone(record.created_from_metric_id)
def test_ingest_appointment_metrics_file(self):
test_file = 'Genomic-Metrics-File-Appointment-Events-Test.json'
bucket_name = 'test_bucket'
sub_folder = 'appointment_events'
pids = []
for _ in range(4):
summary = self.data_generator.create_database_participant_summary()
pids.append(summary.participantId)
test_file_path = f'{bucket_name}/{sub_folder}/{test_file}'
appointment_data = test_data.load_test_data_json(
"Genomic-Metrics-File-Appointment-Events-Test.json")
appointment_data_str = json.dumps(appointment_data, indent=4)
with open_cloud_file(test_file_path, mode='wb') as cloud_file:
cloud_file.write(appointment_data_str.encode("utf-8"))
with GenomicJobController(GenomicJob.APPOINTMENT_METRICS_FILE_INGEST) as controller:
controller.ingest_appointment_metrics_file(
file_path=test_file_path,
)
all_metrics = self.appointment_metrics_dao.get_all()
# should be 5 metric records for whats in json file
self.assertEqual(len(all_metrics), 5)
self.assertTrue(all((obj.participant_id in pids for obj in all_metrics)))
self.assertTrue(all((obj.file_path == test_file_path for obj in all_metrics)))
self.assertTrue(all((obj.appointment_event is not None for obj in all_metrics)))
self.assertTrue(all((obj.created is not None for obj in all_metrics)))
self.assertTrue(all((obj.modified is not None for obj in all_metrics)))
self.assertTrue(all((obj.module_type is not None for obj in all_metrics)))
self.assertTrue(all((obj.event_authored_time is not None for obj in all_metrics)))
self.assertTrue(all((obj.event_type is not None for obj in all_metrics)))
current_job_runs = self.job_run_dao.get_all()
self.assertEqual(len(current_job_runs), 1)
current_job_run = current_job_runs[0]
self.assertTrue(current_job_run.jobId == GenomicJob.APPOINTMENT_METRICS_FILE_INGEST)
self.assertTrue(current_job_run.runResult == GenomicSubProcessResult.SUCCESS)
self.clear_table_after_test('genomic_appointment_event_metrics')
def test_reconcile_appointments_with_metrics(self):
fake_date = parser.parse('2020-05-29T08:00:01-05:00')
for num in range(4):
summary = self.data_generator.create_database_participant_summary()
missing_json = {
"event": "appointment_updated",
"eventAuthoredTime": "2022-09-16T17:18:38Z",
"participantId": f'P{summary.participantId}',
"messageBody": {
"module_type": "hdr",
"appointment_timestamp": "2022-09-19T19:30:00+00:00",
"id": 55,
"appointment_timezone": "America/Los_Angeles",
"location": "CA",
"contact_number": "18043704252",
"language": "en",
"source": "Color"
}
}
if num % 2 == 0:
self.data_generator.create_database_genomic_appointment(
message_record_id=num,
appointment_id=num,
event_type='appointment_scheduled',
module_type='hdr',
participant_id=summary.participantId,
event_authored_time=fake_date,
source='Color',
appointment_timestamp=format_datetime(clock.CLOCK.now()),
appointment_timezone='America/Los_Angeles',
location='123 address st',
contact_number='17348675309',
language='en'
)
self.data_generator.create_database_genomic_appointment_metric(
participant_id=summary.participantId,
appointment_event=json.dumps(missing_json, indent=4) if num % 2 != 0 else 'foo',
file_path='test_file_path',
module_type='hdr',
event_authored_time=fake_date,
event_type='appointment_updated' if num % 2 != 0 else 'appointment_scheduled'
)
current_events = self.appointment_event_dao.get_all()
# should be 2 initial appointment events
self.assertEqual(len(current_events), 2)
current_metrics = self.appointment_metrics_dao.get_all()
# should be 4 initial appointment events
self.assertEqual(len(current_metrics), 4)
self.assertTrue(all(obj.reconcile_job_run_id is None for obj in current_metrics))
with GenomicJobController(GenomicJob.APPOINTMENT_METRICS_RECONCILE) as controller:
controller.reconcile_appointment_events_from_metrics()
job_run = self.job_run_dao.get_all()
self.assertEqual(len(job_run), 1)
self.assertTrue(job_run[0].jobId == GenomicJob.APPOINTMENT_METRICS_RECONCILE)
current_events = self.appointment_event_dao.get_all()
# should be 4 appointment events 2 initial + 2 added
self.assertEqual(len(current_events), 4)
scheduled = list(filter(lambda x: x.event_type == 'appointment_scheduled', current_events))
self.assertEqual(len(scheduled), 2)
self.assertTrue(all(obj.created_from_metric_id is None for obj in scheduled))
updated = list(filter(lambda x: x.event_type == 'appointment_updated', current_events))
self.assertEqual(len(updated), 2)
self.assertTrue(all(obj.created_from_metric_id is not None for obj in updated))
current_metrics = self.appointment_metrics_dao.get_all()
# should STILL be 4 initial appointment events
self.assertEqual(len(current_metrics), 4)
self.assertTrue(all(obj.reconcile_job_run_id is not None for obj in current_metrics))
self.assertTrue(all(obj.reconcile_job_run_id == job_run[0].id for obj in current_metrics))
self.clear_table_after_test('genomic_appointment_event_metrics')
@mock.patch('rdr_service.services.email_service.EmailService.send_email')
def test_check_appointments_gror_changed(self, email_mock):
fake_date = parser.parse("2022-09-01T13:43:23")
notified_dao = GenomicAppointmentEventNotifiedDao()
config.override_setting(config.GENOMIC_COLOR_PM_EMAIL, ['[email protected]'])
num_participants = 4
for num in range(num_participants):
gror = num if num > 1 else 1
summary = self.data_generator.create_database_participant_summary(
consentForStudyEnrollment=1,
consentForGenomicsROR=gror
)
self.data_generator.create_database_genomic_appointment(
message_record_id=num,
appointment_id=num,
event_type='appointment_scheduled',
module_type='hdr',
participant_id=summary.participantId,
event_authored_time=fake_date,
source='Color',
appointment_timestamp=format_datetime(clock.CLOCK.now()),
appointment_timezone='America/Los_Angeles',
location='123 address st',
contact_number='17348675309',
language='en'
)
changed_ppts = self.appointment_event_dao.get_appointments_gror_changed()
self.assertEqual(2, len(changed_ppts))
with GenomicJobController(GenomicJob.CHECK_APPOINTMENT_GROR_CHANGED) as controller:
controller.check_appointments_gror_changed()
self.assertEqual(email_mock.call_count, 1)
notified_appointments = notified_dao.get_all()
self.assertEqual(2, len(notified_appointments))
# test notified not returned by query
summary = self.data_generator.create_database_participant_summary(
consentForStudyEnrollment=1,
consentForGenomicsROR=2
)
self.data_generator.create_database_genomic_appointment(
message_record_id=5,
appointment_id=5,
event_type='appointment_scheduled',
module_type='hdr',
participant_id=summary.participantId,
event_authored_time=fake_date,
source='Color',
appointment_timestamp=format_datetime(clock.CLOCK.now()),
appointment_timezone='America/Los_Angeles',
location='123 address st',
contact_number='17348675309',
language='en'
)
changed_ppts = self.appointment_event_dao.get_appointments_gror_changed()
self.assertEqual(1, len(changed_ppts))
@mock.patch('rdr_service.services.email_service.EmailService.send_email')
def test_check_gcr_14day_escalation(self, email_mock):
fake_date = parser.parse("2022-09-01T13:43:23")
fake_date2 = parser.parse("2022-09-02T14:14:00")
fake_date3 = parser.parse("2022-09-03T15:15:00")
config.override_setting(config.GENOMIC_GCR_ESCALATION_EMAILS, ['[email protected]'])
self.data_generator.create_database_genomic_set(
genomicSetName='test',
genomicSetCriteria='.',
genomicSetVersion=1
)
pids = []
for _ in range(6):
summary = self.data_generator.create_database_participant_summary(
consentForStudyEnrollment=1,
consentForGenomicsROR=1
)
set_member = self.data_generator.create_database_genomic_set_member(
participantId=summary.participantId,
genomicSetId=1,
biobankId=1001,
collectionTubeId=100,
sampleId=10,
genomeType="aou_wgs",
)
self.data_generator.create_database_genomic_member_report_state(
participant_id=summary.participantId,
genomic_report_state=GenomicReportState.HDR_RPT_POSITIVE,
genomic_set_member_id=set_member.id,
module='hdr_v1',
event_authored_time=fake_date
)
pids.append(summary.participantId)
# Appointment scheduled in future: don't notify
self.data_generator.create_database_genomic_appointment(
message_record_id=101,
appointment_id=102,
event_type='appointment_scheduled',
module_type='hdr',
participant_id=pids[0],
event_authored_time=fake_date,
source='Color',
appointment_timestamp=format_datetime(clock.CLOCK.now()),
appointment_timezone='America/Los_Angeles',
location='123 address st',
contact_number='17348675309',
language='en'
)
# Appointment completed: don't notify
self.data_generator.create_database_genomic_appointment(
message_record_id=102,
appointment_id=103,
event_type='appointment_completed',
module_type='hdr',
participant_id=pids[1],
event_authored_time=fake_date,
source='Color',
appointment_timestamp=fake_date,
appointment_timezone='America/Los_Angeles',
location='123 address st',
contact_number='17348675309',
language='en'
)
# Appointment scheduled then canceled: notify
self.data_generator.create_database_genomic_appointment(
message_record_id=103,
appointment_id=104,
event_type='appointment_scheduled',
module_type='hdr',
participant_id=pids[2],
event_authored_time=fake_date2,
source='Color',
appointment_timestamp=format_datetime(clock.CLOCK.now()),
appointment_timezone='America/Los_Angeles',
location='123 address st',
contact_number='17348675309',
language='en'
)
self.data_generator.create_database_genomic_appointment(
message_record_id=104,
appointment_id=104,
event_type='appointment_cancelled',
module_type='hdr',
participant_id=pids[2],
event_authored_time=fake_date3,
source='Color',
appointment_timestamp=format_datetime(clock.CLOCK.now()),
appointment_timezone='America/Los_Angeles',
location='123 address st',
contact_number='17348675309',
language='en'
)
notified_dao = GenomicDefaultBaseDao(model_type=GenomicGCROutreachEscalationNotified)
notified_dao.insert_bulk([{
'participant_id': pids[4],
'created': clock.CLOCK.now(),
'modified': clock.CLOCK.now(),
'message_sent': True
},{
'participant_id': pids[5],
'created': clock.CLOCK.now(),
'modified': clock.CLOCK.now(),
'message_sent': False
}])
with clock.FakeClock(parser.parse('2022-11-1T05:15:00')):
escalated_participants = self.report_state_dao.get_hdr_result_positive_no_appointment(num_days=14)
results = [pid[0] for pid in escalated_participants]
self.assertIn(pids[2], results)
self.assertIn(pids[3], results)
self.assertIn(pids[5], results)
self.assertNotIn(pids[0], results)
self.assertNotIn(pids[1], results)
self.assertNotIn(pids[4], results)
with GenomicJobController(GenomicJob.CHECK_GCR_OUTREACH_ESCALATION) as controller:
controller.check_gcr_escalation(controller.job_id)
self.assertEqual(email_mock.call_count, 3)
self.assertEqual(email_mock.call_args.args[0].subject, 'GCR Outreach 14 Day Escalation')
self.clear_table_after_test('genomic_gcr_outreach_escalation_notified')
@mock.patch('rdr_service.services.email_service.EmailService.send_email')
def test_check_gcr_14day_escalation_error(self, email_mock):
email_mock.side_effect = ForbiddenError(mock.Mock(code=403))
mock_slack_handler = mock.MagicMock()
fake_date = parser.parse("2023-06-01T13:43:23")
config.override_setting(config.GENOMIC_GCR_ESCALATION_EMAILS, ['[email protected]'])
self.data_generator.create_database_genomic_set(
genomicSetName='test',
genomicSetCriteria='.',
genomicSetVersion=1
)
pids = []
for _ in range(2):
summary = self.data_generator.create_database_participant_summary(
consentForStudyEnrollment=1,
consentForGenomicsROR=1
)
set_member = self.data_generator.create_database_genomic_set_member(
participantId=summary.participantId,
genomicSetId=1,
biobankId=1001,
collectionTubeId=100,
sampleId=10,
genomeType="aou_wgs",
)
self.data_generator.create_database_genomic_member_report_state(
participant_id=summary.participantId,
genomic_report_state=GenomicReportState.HDR_RPT_POSITIVE,
genomic_set_member_id=set_member.id,
module='hdr_v1',
event_authored_time=fake_date
)
pids.append(summary.participantId)
self.data_generator.create_database_genomic_appointment(
message_record_id=102,
appointment_id=103,
event_type='appointment_completed',
module_type='hdr',
participant_id=pids[1],
event_authored_time=fake_date,
source='Color',
appointment_timestamp=fake_date,
appointment_timezone='America/Los_Angeles',
location='123 address st',
contact_number='17348675309',
language='en'
)
with GenomicJobController(GenomicJob.CHECK_GCR_OUTREACH_ESCALATION) as controller:
controller.genomic_alert_slack = mock_slack_handler
controller.check_gcr_escalation(controller.job_id)
notified_dao = GenomicDefaultBaseDao(model_type=GenomicGCROutreachEscalationNotified)
with notified_dao.session() as session:
notification = session.query(
GenomicGCROutreachEscalationNotified
).filter(
GenomicGCROutreachEscalationNotified.participant_id == pids[0]
).one()
self.assertEqual(email_mock.call_count, 1)
self.assertEqual(mock_slack_handler.send_message_to_webhook.call_count, 1)
self.assertEqual(False, notification.message_sent)
self.clear_table_after_test('genomic_gcr_outreach_escalation_notified')
@mock.patch('rdr_service.services.email_service.EmailService.send_email')
def test_check_gcr_ce_escalation(self, email_mock):
fake_date = parser.parse("2022-09-01T13:43:23")
fake_date2 = parser.parse("2022-09-02T14:14:00")
fake_date3 = parser.parse("2022-09-03T15:15:00")
config.override_setting(config.GENOMIC_GCR_ESCALATION_EMAILS, ['[email protected]'])
self.data_generator.create_database_genomic_set(
genomicSetName='test',
genomicSetCriteria='.',
genomicSetVersion=1
)
pids = []
for _ in range(6):
summary = self.data_generator.create_database_participant_summary(
consentForStudyEnrollment=1,
consentForGenomicsROR=1
)
set_member = self.data_generator.create_database_genomic_set_member(
participantId=summary.participantId,
genomicSetId=1,
biobankId=1001,
collectionTubeId=100,
sampleId=10,
genomeType="aou_wgs",
participantOrigin='careevolution'
)
self.data_generator.create_database_genomic_member_report_state(
participant_id=summary.participantId,
genomic_report_state=GenomicReportState.HDR_RPT_POSITIVE,
genomic_set_member_id=set_member.id,
module='hdr_v1',
event_authored_time=fake_date
)
pids.append(summary.participantId)
# Appointment scheduled in future: don't notify
self.data_generator.create_database_genomic_appointment(
message_record_id=101,
appointment_id=102,
event_type='appointment_scheduled',
module_type='hdr',
participant_id=pids[0],
event_authored_time=fake_date,
source='Color',
appointment_timestamp=format_datetime(clock.CLOCK.now()),
appointment_timezone='America/Los_Angeles',
location='123 address st',
contact_number='17348675309',
language='en'
)
# Appointment completed: don't notify
self.data_generator.create_database_genomic_appointment(
message_record_id=102,
appointment_id=103,
event_type='appointment_completed',
module_type='hdr',
participant_id=pids[1],
event_authored_time=fake_date,
source='Color',
appointment_timestamp=fake_date,
appointment_timezone='America/Los_Angeles',
location='123 address st',
contact_number='17348675309',
language='en'
)
# Appointment scheduled then canceled: notify
self.data_generator.create_database_genomic_appointment(
message_record_id=103,
appointment_id=104,
event_type='appointment_scheduled',
module_type='hdr',
participant_id=pids[2],
event_authored_time=fake_date2,
source='Color',
appointment_timestamp=format_datetime(clock.CLOCK.now()),
appointment_timezone='America/Los_Angeles',
location='123 address st',
contact_number='17348675309',
language='en'
)
self.data_generator.create_database_genomic_appointment(
message_record_id=104,
appointment_id=104,
event_type='appointment_cancelled',
module_type='hdr',
participant_id=pids[2],
event_authored_time=fake_date3,
source='Color',
appointment_timestamp=format_datetime(clock.CLOCK.now()),
appointment_timezone='America/Los_Angeles',
location='123 address st',
contact_number='17348675309',
language='en'
)
notified_dao = GenomicDefaultBaseDao(model_type=GenomicGCROutreachEscalationNotified)
notified_dao.insert_bulk([{
'participant_id': pids[4],
'created': clock.CLOCK.now(),
'modified': clock.CLOCK.now(),
'message_sent': True
},{
'participant_id': pids[5],
'created': clock.CLOCK.now(),
'modified': clock.CLOCK.now(),
'message_sent': False
}])
with clock.FakeClock(parser.parse('2022-11-1T05:15:00')):
escalated_participants = self.report_state_dao.get_hdr_result_positive_no_appointment(
num_days=30,
participant_origin='careevolution'
)
results = [pid[0] for pid in escalated_participants]
self.assertIn(pids[2], results)
self.assertIn(pids[3], results)
self.assertIn(pids[5], results)
self.assertNotIn(pids[0], results)
self.assertNotIn(pids[1], results)
self.assertNotIn(pids[4], results)
with GenomicJobController(GenomicJob.CHECK_GCR_CE_OUTREACH_ESCALATION) as controller:
controller.check_gcr_escalation(controller.job_id)
self.assertEqual(email_mock.call_count, 3)
self.assertEqual(email_mock.call_args.args[0].subject, 'GCR Outreach 30 Day Escalation')
self.clear_table_after_test('genomic_gcr_outreach_escalation_notified')
@mock.patch('rdr_service.genomic.genomic_job_controller.GenomicJobController.execute_cloud_task')
def test_execute_auto_generation_from_last_run(self, cloud_task_mock):
with GenomicJobController(
GenomicJob.PR_PR_WORKFLOW
) as controller:
controller.job_result = GenomicSubProcessResult.ERROR
controller._end_run()
controller.execute_auto_generation_from_cloud_task()
last_job_run_status = self.job_run_dao.get_last_run_status_for_job_id(job_id=GenomicJob.PR_PR_WORKFLOW)
self.assertTrue(last_job_run_status is not None)
self.assertTrue(last_job_run_status[0] == GenomicSubProcessResult.ERROR)
# task SHOULD NOT be called
self.assertEqual(cloud_task_mock.called, False)
self.assertEqual(cloud_task_mock.call_count, 0)
with GenomicJobController(
GenomicJob.PR_PR_WORKFLOW
) as controller:
controller.job_result = GenomicSubProcessResult.SUCCESS
controller._end_run()
controller.execute_auto_generation_from_cloud_task()
last_job_run_status = self.job_run_dao.get_last_run_status_for_job_id(job_id=GenomicJob.PR_PR_WORKFLOW)
self.assertTrue(last_job_run_status is not None)
self.assertTrue(last_job_run_status[0] == GenomicSubProcessResult.SUCCESS)
# task SHOULD be called
self.assertEqual(cloud_task_mock.called, True)
self.assertTrue(cloud_task_mock.call_args[1].get('payload').get('manifest_type') == 'p0')
self.assertTrue(cloud_task_mock.call_args[1].get('task_queue') == 'genomic-generate-manifest')
all_job_runs = self.job_run_dao.get_all()
self.assertEqual(len(all_job_runs), 2)
self.assertTrue(all(obj.runResult in [GenomicSubProcessResult.SUCCESS, GenomicSubProcessResult.ERROR] for obj
in all_job_runs))
self.assertTrue(all(obj.jobId == GenomicJob.PR_PR_WORKFLOW for obj in all_job_runs))
| 1 |
addf92a3d4060fa9464a802a4a4378cf9eeadde4
|
Python
|
# -*- coding: utf-8 -*-
# This file is auto-generated, don't edit it. Thanks.
from Tea.model import TeaModel
class CreateCertificateRequest(TeaModel):
def __init__(self, domain=None, certificate=None, private_key=None, certificate_name=None, instance_id=None):
self.domain = domain # type: str
self.certificate = certificate # type: str
self.private_key = private_key # type: str
self.certificate_name = certificate_name # type: str
self.instance_id = instance_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(CreateCertificateRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.certificate is not None:
result['Certificate'] = self.certificate
if self.private_key is not None:
result['PrivateKey'] = self.private_key
if self.certificate_name is not None:
result['CertificateName'] = self.certificate_name
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('Certificate') is not None:
self.certificate = m.get('Certificate')
if m.get('PrivateKey') is not None:
self.private_key = m.get('PrivateKey')
if m.get('CertificateName') is not None:
self.certificate_name = m.get('CertificateName')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
return self
class CreateCertificateResponseBody(TeaModel):
def __init__(self, request_id=None, certificate_id=None):
self.request_id = request_id # type: str
self.certificate_id = certificate_id # type: long
def validate(self):
pass
def to_map(self):
_map = super(CreateCertificateResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.certificate_id is not None:
result['CertificateId'] = self.certificate_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('CertificateId') is not None:
self.certificate_id = m.get('CertificateId')
return self
class CreateCertificateResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: CreateCertificateResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(CreateCertificateResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = CreateCertificateResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class CreateCertificateByCertificateIdRequest(TeaModel):
def __init__(self, domain=None, certificate_id=None, instance_id=None):
self.domain = domain # type: str
self.certificate_id = certificate_id # type: long
self.instance_id = instance_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(CreateCertificateByCertificateIdRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.certificate_id is not None:
result['CertificateId'] = self.certificate_id
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('CertificateId') is not None:
self.certificate_id = m.get('CertificateId')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
return self
class CreateCertificateByCertificateIdResponseBody(TeaModel):
def __init__(self, request_id=None, certificate_id=None):
self.request_id = request_id # type: str
self.certificate_id = certificate_id # type: long
def validate(self):
pass
def to_map(self):
_map = super(CreateCertificateByCertificateIdResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.certificate_id is not None:
result['CertificateId'] = self.certificate_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('CertificateId') is not None:
self.certificate_id = m.get('CertificateId')
return self
class CreateCertificateByCertificateIdResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: CreateCertificateByCertificateIdResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(CreateCertificateByCertificateIdResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = CreateCertificateByCertificateIdResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class CreateDomainRequest(TeaModel):
def __init__(self, instance_id=None, domain=None, source_ips=None, is_access_product=None,
access_header_mode=None, access_headers=None, load_balancing=None, log_headers=None, http_port=None, https_port=None,
http_2port=None, http_to_user_ip=None, https_redirect=None, cluster_type=None, resource_group_id=None,
connection_time=None, read_time=None, write_time=None, access_type=None, cloud_native_instances=None,
ip_follow_status=None):
self.instance_id = instance_id # type: str
self.domain = domain # type: str
self.source_ips = source_ips # type: str
self.is_access_product = is_access_product # type: int
self.access_header_mode = access_header_mode # type: int
self.access_headers = access_headers # type: str
self.load_balancing = load_balancing # type: int
self.log_headers = log_headers # type: str
self.http_port = http_port # type: str
self.https_port = https_port # type: str
self.http_2port = http_2port # type: str
self.http_to_user_ip = http_to_user_ip # type: int
self.https_redirect = https_redirect # type: int
self.cluster_type = cluster_type # type: int
self.resource_group_id = resource_group_id # type: str
self.connection_time = connection_time # type: int
self.read_time = read_time # type: int
self.write_time = write_time # type: int
self.access_type = access_type # type: str
self.cloud_native_instances = cloud_native_instances # type: str
self.ip_follow_status = ip_follow_status # type: int
def validate(self):
pass
def to_map(self):
_map = super(CreateDomainRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.domain is not None:
result['Domain'] = self.domain
if self.source_ips is not None:
result['SourceIps'] = self.source_ips
if self.is_access_product is not None:
result['IsAccessProduct'] = self.is_access_product
if self.access_header_mode is not None:
result['AccessHeaderMode'] = self.access_header_mode
if self.access_headers is not None:
result['AccessHeaders'] = self.access_headers
if self.load_balancing is not None:
result['LoadBalancing'] = self.load_balancing
if self.log_headers is not None:
result['LogHeaders'] = self.log_headers
if self.http_port is not None:
result['HttpPort'] = self.http_port
if self.https_port is not None:
result['HttpsPort'] = self.https_port
if self.http_2port is not None:
result['Http2Port'] = self.http_2port
if self.http_to_user_ip is not None:
result['HttpToUserIp'] = self.http_to_user_ip
if self.https_redirect is not None:
result['HttpsRedirect'] = self.https_redirect
if self.cluster_type is not None:
result['ClusterType'] = self.cluster_type
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
if self.connection_time is not None:
result['ConnectionTime'] = self.connection_time
if self.read_time is not None:
result['ReadTime'] = self.read_time
if self.write_time is not None:
result['WriteTime'] = self.write_time
if self.access_type is not None:
result['AccessType'] = self.access_type
if self.cloud_native_instances is not None:
result['CloudNativeInstances'] = self.cloud_native_instances
if self.ip_follow_status is not None:
result['IpFollowStatus'] = self.ip_follow_status
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('SourceIps') is not None:
self.source_ips = m.get('SourceIps')
if m.get('IsAccessProduct') is not None:
self.is_access_product = m.get('IsAccessProduct')
if m.get('AccessHeaderMode') is not None:
self.access_header_mode = m.get('AccessHeaderMode')
if m.get('AccessHeaders') is not None:
self.access_headers = m.get('AccessHeaders')
if m.get('LoadBalancing') is not None:
self.load_balancing = m.get('LoadBalancing')
if m.get('LogHeaders') is not None:
self.log_headers = m.get('LogHeaders')
if m.get('HttpPort') is not None:
self.http_port = m.get('HttpPort')
if m.get('HttpsPort') is not None:
self.https_port = m.get('HttpsPort')
if m.get('Http2Port') is not None:
self.http_2port = m.get('Http2Port')
if m.get('HttpToUserIp') is not None:
self.http_to_user_ip = m.get('HttpToUserIp')
if m.get('HttpsRedirect') is not None:
self.https_redirect = m.get('HttpsRedirect')
if m.get('ClusterType') is not None:
self.cluster_type = m.get('ClusterType')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
if m.get('ConnectionTime') is not None:
self.connection_time = m.get('ConnectionTime')
if m.get('ReadTime') is not None:
self.read_time = m.get('ReadTime')
if m.get('WriteTime') is not None:
self.write_time = m.get('WriteTime')
if m.get('AccessType') is not None:
self.access_type = m.get('AccessType')
if m.get('CloudNativeInstances') is not None:
self.cloud_native_instances = m.get('CloudNativeInstances')
if m.get('IpFollowStatus') is not None:
self.ip_follow_status = m.get('IpFollowStatus')
return self
class CreateDomainResponseBody(TeaModel):
def __init__(self, request_id=None, cname=None):
self.request_id = request_id # type: str
self.cname = cname # type: str
def validate(self):
pass
def to_map(self):
_map = super(CreateDomainResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.cname is not None:
result['Cname'] = self.cname
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('Cname') is not None:
self.cname = m.get('Cname')
return self
class CreateDomainResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: CreateDomainResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(CreateDomainResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = CreateDomainResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class CreateProtectionModuleRuleRequest(TeaModel):
def __init__(self, domain=None, defense_type=None, rule=None, instance_id=None):
self.domain = domain # type: str
self.defense_type = defense_type # type: str
self.rule = rule # type: str
self.instance_id = instance_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(CreateProtectionModuleRuleRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.defense_type is not None:
result['DefenseType'] = self.defense_type
if self.rule is not None:
result['Rule'] = self.rule
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('DefenseType') is not None:
self.defense_type = m.get('DefenseType')
if m.get('Rule') is not None:
self.rule = m.get('Rule')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
return self
class CreateProtectionModuleRuleResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(CreateProtectionModuleRuleResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class CreateProtectionModuleRuleResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: CreateProtectionModuleRuleResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(CreateProtectionModuleRuleResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = CreateProtectionModuleRuleResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DeleteDomainRequest(TeaModel):
def __init__(self, instance_id=None, domain=None):
self.instance_id = instance_id # type: str
self.domain = domain # type: str
def validate(self):
pass
def to_map(self):
_map = super(DeleteDomainRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.domain is not None:
result['Domain'] = self.domain
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('Domain') is not None:
self.domain = m.get('Domain')
return self
class DeleteDomainResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(DeleteDomainResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class DeleteDomainResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: DeleteDomainResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DeleteDomainResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DeleteDomainResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DeleteInstanceRequest(TeaModel):
def __init__(self, instance_id=None, resource_group_id=None):
self.instance_id = instance_id # type: str
self.resource_group_id = resource_group_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(DeleteInstanceRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
return self
class DeleteInstanceResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(DeleteInstanceResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class DeleteInstanceResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: DeleteInstanceResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DeleteInstanceResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DeleteInstanceResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DeleteProtectionModuleRuleRequest(TeaModel):
def __init__(self, domain=None, defense_type=None, rule_id=None, instance_id=None):
self.domain = domain # type: str
self.defense_type = defense_type # type: str
self.rule_id = rule_id # type: long
self.instance_id = instance_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(DeleteProtectionModuleRuleRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.defense_type is not None:
result['DefenseType'] = self.defense_type
if self.rule_id is not None:
result['RuleId'] = self.rule_id
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('DefenseType') is not None:
self.defense_type = m.get('DefenseType')
if m.get('RuleId') is not None:
self.rule_id = m.get('RuleId')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
return self
class DeleteProtectionModuleRuleResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(DeleteProtectionModuleRuleResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class DeleteProtectionModuleRuleResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: DeleteProtectionModuleRuleResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DeleteProtectionModuleRuleResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DeleteProtectionModuleRuleResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeCertificatesRequest(TeaModel):
def __init__(self, instance_id=None, domain=None):
self.instance_id = instance_id # type: str
self.domain = domain # type: str
def validate(self):
pass
def to_map(self):
_map = super(DescribeCertificatesRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.domain is not None:
result['Domain'] = self.domain
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('Domain') is not None:
self.domain = m.get('Domain')
return self
class DescribeCertificatesResponseBodyCertificates(TeaModel):
def __init__(self, certificate_name=None, common_name=None, sans=None, is_using=None, certificate_id=None):
self.certificate_name = certificate_name # type: str
self.common_name = common_name # type: str
self.sans = sans # type: list[str]
self.is_using = is_using # type: bool
self.certificate_id = certificate_id # type: long
def validate(self):
pass
def to_map(self):
_map = super(DescribeCertificatesResponseBodyCertificates, self).to_map()
if _map is not None:
return _map
result = dict()
if self.certificate_name is not None:
result['CertificateName'] = self.certificate_name
if self.common_name is not None:
result['CommonName'] = self.common_name
if self.sans is not None:
result['Sans'] = self.sans
if self.is_using is not None:
result['IsUsing'] = self.is_using
if self.certificate_id is not None:
result['CertificateId'] = self.certificate_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('CertificateName') is not None:
self.certificate_name = m.get('CertificateName')
if m.get('CommonName') is not None:
self.common_name = m.get('CommonName')
if m.get('Sans') is not None:
self.sans = m.get('Sans')
if m.get('IsUsing') is not None:
self.is_using = m.get('IsUsing')
if m.get('CertificateId') is not None:
self.certificate_id = m.get('CertificateId')
return self
class DescribeCertificatesResponseBody(TeaModel):
def __init__(self, request_id=None, certificates=None):
self.request_id = request_id # type: str
self.certificates = certificates # type: list[DescribeCertificatesResponseBodyCertificates]
def validate(self):
if self.certificates:
for k in self.certificates:
if k:
k.validate()
def to_map(self):
_map = super(DescribeCertificatesResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
result['Certificates'] = []
if self.certificates is not None:
for k in self.certificates:
result['Certificates'].append(k.to_map() if k else None)
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
self.certificates = []
if m.get('Certificates') is not None:
for k in m.get('Certificates'):
temp_model = DescribeCertificatesResponseBodyCertificates()
self.certificates.append(temp_model.from_map(k))
return self
class DescribeCertificatesResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: DescribeCertificatesResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeCertificatesResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeCertificatesResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeCertMatchStatusRequest(TeaModel):
def __init__(self, domain=None, certificate=None, private_key=None, instance_id=None):
self.domain = domain # type: str
self.certificate = certificate # type: str
self.private_key = private_key # type: str
self.instance_id = instance_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(DescribeCertMatchStatusRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.certificate is not None:
result['Certificate'] = self.certificate
if self.private_key is not None:
result['PrivateKey'] = self.private_key
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('Certificate') is not None:
self.certificate = m.get('Certificate')
if m.get('PrivateKey') is not None:
self.private_key = m.get('PrivateKey')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
return self
class DescribeCertMatchStatusResponseBody(TeaModel):
def __init__(self, request_id=None, match_status=None):
self.request_id = request_id # type: str
self.match_status = match_status # type: bool
def validate(self):
pass
def to_map(self):
_map = super(DescribeCertMatchStatusResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.match_status is not None:
result['MatchStatus'] = self.match_status
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('MatchStatus') is not None:
self.match_status = m.get('MatchStatus')
return self
class DescribeCertMatchStatusResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: DescribeCertMatchStatusResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeCertMatchStatusResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeCertMatchStatusResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeDomainRequest(TeaModel):
def __init__(self, instance_id=None, domain=None):
self.instance_id = instance_id # type: str
self.domain = domain # type: str
def validate(self):
pass
def to_map(self):
_map = super(DescribeDomainRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.domain is not None:
result['Domain'] = self.domain
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('Domain') is not None:
self.domain = m.get('Domain')
return self
class DescribeDomainResponseBodyDomainCloudNativeInstancesProtocolPortConfigs(TeaModel):
def __init__(self, protocol=None, ports=None):
self.protocol = protocol # type: str
self.ports = ports # type: str
def validate(self):
pass
def to_map(self):
_map = super(DescribeDomainResponseBodyDomainCloudNativeInstancesProtocolPortConfigs, self).to_map()
if _map is not None:
return _map
result = dict()
if self.protocol is not None:
result['Protocol'] = self.protocol
if self.ports is not None:
result['Ports'] = self.ports
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Protocol') is not None:
self.protocol = m.get('Protocol')
if m.get('Ports') is not None:
self.ports = m.get('Ports')
return self
class DescribeDomainResponseBodyDomainCloudNativeInstances(TeaModel):
def __init__(self, protocol_port_configs=None, redirection_type_name=None, cloud_native_product_name=None,
instance_id=None, ipaddress_list=None):
self.protocol_port_configs = protocol_port_configs # type: list[DescribeDomainResponseBodyDomainCloudNativeInstancesProtocolPortConfigs]
self.redirection_type_name = redirection_type_name # type: str
self.cloud_native_product_name = cloud_native_product_name # type: str
self.instance_id = instance_id # type: str
self.ipaddress_list = ipaddress_list # type: str
def validate(self):
if self.protocol_port_configs:
for k in self.protocol_port_configs:
if k:
k.validate()
def to_map(self):
_map = super(DescribeDomainResponseBodyDomainCloudNativeInstances, self).to_map()
if _map is not None:
return _map
result = dict()
result['ProtocolPortConfigs'] = []
if self.protocol_port_configs is not None:
for k in self.protocol_port_configs:
result['ProtocolPortConfigs'].append(k.to_map() if k else None)
if self.redirection_type_name is not None:
result['RedirectionTypeName'] = self.redirection_type_name
if self.cloud_native_product_name is not None:
result['CloudNativeProductName'] = self.cloud_native_product_name
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.ipaddress_list is not None:
result['IPAddressList'] = self.ipaddress_list
return result
def from_map(self, m=None):
m = m or dict()
self.protocol_port_configs = []
if m.get('ProtocolPortConfigs') is not None:
for k in m.get('ProtocolPortConfigs'):
temp_model = DescribeDomainResponseBodyDomainCloudNativeInstancesProtocolPortConfigs()
self.protocol_port_configs.append(temp_model.from_map(k))
if m.get('RedirectionTypeName') is not None:
self.redirection_type_name = m.get('RedirectionTypeName')
if m.get('CloudNativeProductName') is not None:
self.cloud_native_product_name = m.get('CloudNativeProductName')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('IPAddressList') is not None:
self.ipaddress_list = m.get('IPAddressList')
return self
class DescribeDomainResponseBodyDomainLogHeaders(TeaModel):
def __init__(self, k=None, v=None):
self.k = k # type: str
self.v = v # type: str
def validate(self):
pass
def to_map(self):
_map = super(DescribeDomainResponseBodyDomainLogHeaders, self).to_map()
if _map is not None:
return _map
result = dict()
if self.k is not None:
result['k'] = self.k
if self.v is not None:
result['v'] = self.v
return result
def from_map(self, m=None):
m = m or dict()
if m.get('k') is not None:
self.k = m.get('k')
if m.get('v') is not None:
self.v = m.get('v')
return self
class DescribeDomainResponseBodyDomain(TeaModel):
def __init__(self, http_2port=None, cloud_native_instances=None, http_to_user_ip=None, http_port=None,
log_headers=None, is_access_product=None, access_headers=None, access_header_mode=None, https_redirect=None,
load_balancing=None, ip_follow_status=None, access_type=None, version=None, cluster_type=None, read_time=None,
write_time=None, resource_group_id=None, cname=None, source_ips=None, connection_time=None, https_port=None):
self.http_2port = http_2port # type: list[str]
self.cloud_native_instances = cloud_native_instances # type: list[DescribeDomainResponseBodyDomainCloudNativeInstances]
self.http_to_user_ip = http_to_user_ip # type: int
self.http_port = http_port # type: list[str]
self.log_headers = log_headers # type: list[DescribeDomainResponseBodyDomainLogHeaders]
self.is_access_product = is_access_product # type: int
self.access_headers = access_headers # type: list[str]
self.access_header_mode = access_header_mode # type: int
self.https_redirect = https_redirect # type: int
self.load_balancing = load_balancing # type: int
self.ip_follow_status = ip_follow_status # type: int
self.access_type = access_type # type: str
self.version = version # type: long
self.cluster_type = cluster_type # type: int
self.read_time = read_time # type: int
self.write_time = write_time # type: int
self.resource_group_id = resource_group_id # type: str
self.cname = cname # type: str
self.source_ips = source_ips # type: list[str]
self.connection_time = connection_time # type: int
self.https_port = https_port # type: list[str]
def validate(self):
if self.cloud_native_instances:
for k in self.cloud_native_instances:
if k:
k.validate()
if self.log_headers:
for k in self.log_headers:
if k:
k.validate()
def to_map(self):
_map = super(DescribeDomainResponseBodyDomain, self).to_map()
if _map is not None:
return _map
result = dict()
if self.http_2port is not None:
result['Http2Port'] = self.http_2port
result['CloudNativeInstances'] = []
if self.cloud_native_instances is not None:
for k in self.cloud_native_instances:
result['CloudNativeInstances'].append(k.to_map() if k else None)
if self.http_to_user_ip is not None:
result['HttpToUserIp'] = self.http_to_user_ip
if self.http_port is not None:
result['HttpPort'] = self.http_port
result['LogHeaders'] = []
if self.log_headers is not None:
for k in self.log_headers:
result['LogHeaders'].append(k.to_map() if k else None)
if self.is_access_product is not None:
result['IsAccessProduct'] = self.is_access_product
if self.access_headers is not None:
result['AccessHeaders'] = self.access_headers
if self.access_header_mode is not None:
result['AccessHeaderMode'] = self.access_header_mode
if self.https_redirect is not None:
result['HttpsRedirect'] = self.https_redirect
if self.load_balancing is not None:
result['LoadBalancing'] = self.load_balancing
if self.ip_follow_status is not None:
result['IpFollowStatus'] = self.ip_follow_status
if self.access_type is not None:
result['AccessType'] = self.access_type
if self.version is not None:
result['Version'] = self.version
if self.cluster_type is not None:
result['ClusterType'] = self.cluster_type
if self.read_time is not None:
result['ReadTime'] = self.read_time
if self.write_time is not None:
result['WriteTime'] = self.write_time
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
if self.cname is not None:
result['Cname'] = self.cname
if self.source_ips is not None:
result['SourceIps'] = self.source_ips
if self.connection_time is not None:
result['ConnectionTime'] = self.connection_time
if self.https_port is not None:
result['HttpsPort'] = self.https_port
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Http2Port') is not None:
self.http_2port = m.get('Http2Port')
self.cloud_native_instances = []
if m.get('CloudNativeInstances') is not None:
for k in m.get('CloudNativeInstances'):
temp_model = DescribeDomainResponseBodyDomainCloudNativeInstances()
self.cloud_native_instances.append(temp_model.from_map(k))
if m.get('HttpToUserIp') is not None:
self.http_to_user_ip = m.get('HttpToUserIp')
if m.get('HttpPort') is not None:
self.http_port = m.get('HttpPort')
self.log_headers = []
if m.get('LogHeaders') is not None:
for k in m.get('LogHeaders'):
temp_model = DescribeDomainResponseBodyDomainLogHeaders()
self.log_headers.append(temp_model.from_map(k))
if m.get('IsAccessProduct') is not None:
self.is_access_product = m.get('IsAccessProduct')
if m.get('AccessHeaders') is not None:
self.access_headers = m.get('AccessHeaders')
if m.get('AccessHeaderMode') is not None:
self.access_header_mode = m.get('AccessHeaderMode')
if m.get('HttpsRedirect') is not None:
self.https_redirect = m.get('HttpsRedirect')
if m.get('LoadBalancing') is not None:
self.load_balancing = m.get('LoadBalancing')
if m.get('IpFollowStatus') is not None:
self.ip_follow_status = m.get('IpFollowStatus')
if m.get('AccessType') is not None:
self.access_type = m.get('AccessType')
if m.get('Version') is not None:
self.version = m.get('Version')
if m.get('ClusterType') is not None:
self.cluster_type = m.get('ClusterType')
if m.get('ReadTime') is not None:
self.read_time = m.get('ReadTime')
if m.get('WriteTime') is not None:
self.write_time = m.get('WriteTime')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
if m.get('Cname') is not None:
self.cname = m.get('Cname')
if m.get('SourceIps') is not None:
self.source_ips = m.get('SourceIps')
if m.get('ConnectionTime') is not None:
self.connection_time = m.get('ConnectionTime')
if m.get('HttpsPort') is not None:
self.https_port = m.get('HttpsPort')
return self
class DescribeDomainResponseBody(TeaModel):
def __init__(self, request_id=None, domain=None):
self.request_id = request_id # type: str
self.domain = domain # type: DescribeDomainResponseBodyDomain
def validate(self):
if self.domain:
self.domain.validate()
def to_map(self):
_map = super(DescribeDomainResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.domain is not None:
result['Domain'] = self.domain.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('Domain') is not None:
temp_model = DescribeDomainResponseBodyDomain()
self.domain = temp_model.from_map(m['Domain'])
return self
class DescribeDomainResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: DescribeDomainResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeDomainResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeDomainResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeDomainAdvanceConfigsRequest(TeaModel):
def __init__(self, instance_id=None, domain_list=None, resource_group_id=None):
self.instance_id = instance_id # type: str
self.domain_list = domain_list # type: str
self.resource_group_id = resource_group_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(DescribeDomainAdvanceConfigsRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.domain_list is not None:
result['DomainList'] = self.domain_list
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('DomainList') is not None:
self.domain_list = m.get('DomainList')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
return self
class DescribeDomainAdvanceConfigsResponseBodyDomainConfigsProfile(TeaModel):
def __init__(self, http_2port=None, ipv_6status=None, http_port=None, gslbstatus=None, rs=None,
vip_service_status=None, cluster_type=None, exclusive_vip_status=None, cname=None, cert_status=None, https_port=None,
resolved_type=None):
self.http_2port = http_2port # type: str
self.ipv_6status = ipv_6status # type: int
self.http_port = http_port # type: str
self.gslbstatus = gslbstatus # type: str
self.rs = rs # type: str
self.vip_service_status = vip_service_status # type: int
self.cluster_type = cluster_type # type: int
self.exclusive_vip_status = exclusive_vip_status # type: int
self.cname = cname # type: str
self.cert_status = cert_status # type: int
self.https_port = https_port # type: str
self.resolved_type = resolved_type # type: int
def validate(self):
pass
def to_map(self):
_map = super(DescribeDomainAdvanceConfigsResponseBodyDomainConfigsProfile, self).to_map()
if _map is not None:
return _map
result = dict()
if self.http_2port is not None:
result['Http2Port'] = self.http_2port
if self.ipv_6status is not None:
result['Ipv6Status'] = self.ipv_6status
if self.http_port is not None:
result['HttpPort'] = self.http_port
if self.gslbstatus is not None:
result['GSLBStatus'] = self.gslbstatus
if self.rs is not None:
result['Rs'] = self.rs
if self.vip_service_status is not None:
result['VipServiceStatus'] = self.vip_service_status
if self.cluster_type is not None:
result['ClusterType'] = self.cluster_type
if self.exclusive_vip_status is not None:
result['ExclusiveVipStatus'] = self.exclusive_vip_status
if self.cname is not None:
result['Cname'] = self.cname
if self.cert_status is not None:
result['CertStatus'] = self.cert_status
if self.https_port is not None:
result['HttpsPort'] = self.https_port
if self.resolved_type is not None:
result['ResolvedType'] = self.resolved_type
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Http2Port') is not None:
self.http_2port = m.get('Http2Port')
if m.get('Ipv6Status') is not None:
self.ipv_6status = m.get('Ipv6Status')
if m.get('HttpPort') is not None:
self.http_port = m.get('HttpPort')
if m.get('GSLBStatus') is not None:
self.gslbstatus = m.get('GSLBStatus')
if m.get('Rs') is not None:
self.rs = m.get('Rs')
if m.get('VipServiceStatus') is not None:
self.vip_service_status = m.get('VipServiceStatus')
if m.get('ClusterType') is not None:
self.cluster_type = m.get('ClusterType')
if m.get('ExclusiveVipStatus') is not None:
self.exclusive_vip_status = m.get('ExclusiveVipStatus')
if m.get('Cname') is not None:
self.cname = m.get('Cname')
if m.get('CertStatus') is not None:
self.cert_status = m.get('CertStatus')
if m.get('HttpsPort') is not None:
self.https_port = m.get('HttpsPort')
if m.get('ResolvedType') is not None:
self.resolved_type = m.get('ResolvedType')
return self
class DescribeDomainAdvanceConfigsResponseBodyDomainConfigs(TeaModel):
def __init__(self, profile=None, domain=None):
self.profile = profile # type: DescribeDomainAdvanceConfigsResponseBodyDomainConfigsProfile
self.domain = domain # type: str
def validate(self):
if self.profile:
self.profile.validate()
def to_map(self):
_map = super(DescribeDomainAdvanceConfigsResponseBodyDomainConfigs, self).to_map()
if _map is not None:
return _map
result = dict()
if self.profile is not None:
result['Profile'] = self.profile.to_map()
if self.domain is not None:
result['Domain'] = self.domain
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Profile') is not None:
temp_model = DescribeDomainAdvanceConfigsResponseBodyDomainConfigsProfile()
self.profile = temp_model.from_map(m['Profile'])
if m.get('Domain') is not None:
self.domain = m.get('Domain')
return self
class DescribeDomainAdvanceConfigsResponseBody(TeaModel):
def __init__(self, request_id=None, domain_configs=None):
self.request_id = request_id # type: str
self.domain_configs = domain_configs # type: list[DescribeDomainAdvanceConfigsResponseBodyDomainConfigs]
def validate(self):
if self.domain_configs:
for k in self.domain_configs:
if k:
k.validate()
def to_map(self):
_map = super(DescribeDomainAdvanceConfigsResponseBody, self).to
| 0 |
addf92a3d4060fa9464a802a4a4378cf9eeadde4
|
Python
|
_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
result['DomainConfigs'] = []
if self.domain_configs is not None:
for k in self.domain_configs:
result['DomainConfigs'].append(k.to_map() if k else None)
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
self.domain_configs = []
if m.get('DomainConfigs') is not None:
for k in m.get('DomainConfigs'):
temp_model = DescribeDomainAdvanceConfigsResponseBodyDomainConfigs()
self.domain_configs.append(temp_model.from_map(k))
return self
class DescribeDomainAdvanceConfigsResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: DescribeDomainAdvanceConfigsResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeDomainAdvanceConfigsResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeDomainAdvanceConfigsResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeDomainBasicConfigsRequest(TeaModel):
def __init__(self, instance_id=None, domain_key=None, access_type=None, cloud_native_product_id=None,
page_number=None, page_size=None, resource_group_id=None):
self.instance_id = instance_id # type: str
self.domain_key = domain_key # type: str
self.access_type = access_type # type: str
self.cloud_native_product_id = cloud_native_product_id # type: int
self.page_number = page_number # type: int
self.page_size = page_size # type: int
self.resource_group_id = resource_group_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(DescribeDomainBasicConfigsRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.domain_key is not None:
result['DomainKey'] = self.domain_key
if self.access_type is not None:
result['AccessType'] = self.access_type
if self.cloud_native_product_id is not None:
result['CloudNativeProductId'] = self.cloud_native_product_id
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('DomainKey') is not None:
self.domain_key = m.get('DomainKey')
if m.get('AccessType') is not None:
self.access_type = m.get('AccessType')
if m.get('CloudNativeProductId') is not None:
self.cloud_native_product_id = m.get('CloudNativeProductId')
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
return self
class DescribeDomainBasicConfigsResponseBodyDomainConfigs(TeaModel):
def __init__(self, status=None, domain=None, owner=None, cc_mode=None, cc_status=None, access_type=None,
version=None, acl_status=None, waf_status=None, waf_mode=None):
self.status = status # type: int
self.domain = domain # type: str
self.owner = owner # type: str
self.cc_mode = cc_mode # type: int
self.cc_status = cc_status # type: int
self.access_type = access_type # type: str
self.version = version # type: long
self.acl_status = acl_status # type: int
self.waf_status = waf_status # type: int
self.waf_mode = waf_mode # type: int
def validate(self):
pass
def to_map(self):
_map = super(DescribeDomainBasicConfigsResponseBodyDomainConfigs, self).to_map()
if _map is not None:
return _map
result = dict()
if self.status is not None:
result['Status'] = self.status
if self.domain is not None:
result['Domain'] = self.domain
if self.owner is not None:
result['Owner'] = self.owner
if self.cc_mode is not None:
result['CcMode'] = self.cc_mode
if self.cc_status is not None:
result['CcStatus'] = self.cc_status
if self.access_type is not None:
result['AccessType'] = self.access_type
if self.version is not None:
result['Version'] = self.version
if self.acl_status is not None:
result['AclStatus'] = self.acl_status
if self.waf_status is not None:
result['WafStatus'] = self.waf_status
if self.waf_mode is not None:
result['WafMode'] = self.waf_mode
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Status') is not None:
self.status = m.get('Status')
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('Owner') is not None:
self.owner = m.get('Owner')
if m.get('CcMode') is not None:
self.cc_mode = m.get('CcMode')
if m.get('CcStatus') is not None:
self.cc_status = m.get('CcStatus')
if m.get('AccessType') is not None:
self.access_type = m.get('AccessType')
if m.get('Version') is not None:
self.version = m.get('Version')
if m.get('AclStatus') is not None:
self.acl_status = m.get('AclStatus')
if m.get('WafStatus') is not None:
self.waf_status = m.get('WafStatus')
if m.get('WafMode') is not None:
self.waf_mode = m.get('WafMode')
return self
class DescribeDomainBasicConfigsResponseBody(TeaModel):
def __init__(self, total_count=None, request_id=None, domain_configs=None):
self.total_count = total_count # type: int
self.request_id = request_id # type: str
self.domain_configs = domain_configs # type: list[DescribeDomainBasicConfigsResponseBodyDomainConfigs]
def validate(self):
if self.domain_configs:
for k in self.domain_configs:
if k:
k.validate()
def to_map(self):
_map = super(DescribeDomainBasicConfigsResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.total_count is not None:
result['TotalCount'] = self.total_count
if self.request_id is not None:
result['RequestId'] = self.request_id
result['DomainConfigs'] = []
if self.domain_configs is not None:
for k in self.domain_configs:
result['DomainConfigs'].append(k.to_map() if k else None)
return result
def from_map(self, m=None):
m = m or dict()
if m.get('TotalCount') is not None:
self.total_count = m.get('TotalCount')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
self.domain_configs = []
if m.get('DomainConfigs') is not None:
for k in m.get('DomainConfigs'):
temp_model = DescribeDomainBasicConfigsResponseBodyDomainConfigs()
self.domain_configs.append(temp_model.from_map(k))
return self
class DescribeDomainBasicConfigsResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: DescribeDomainBasicConfigsResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeDomainBasicConfigsResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeDomainBasicConfigsResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeDomainListRequest(TeaModel):
def __init__(self, resource_group_id=None, instance_id=None, domain_name=None, page_number=None, page_size=None,
is_sub=None, domain_names=None):
self.resource_group_id = resource_group_id # type: str
self.instance_id = instance_id # type: str
self.domain_name = domain_name # type: str
self.page_number = page_number # type: int
self.page_size = page_size # type: int
self.is_sub = is_sub # type: int
self.domain_names = domain_names # type: list[str]
def validate(self):
pass
def to_map(self):
_map = super(DescribeDomainListRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.domain_name is not None:
result['DomainName'] = self.domain_name
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.is_sub is not None:
result['IsSub'] = self.is_sub
if self.domain_names is not None:
result['DomainNames'] = self.domain_names
return result
def from_map(self, m=None):
m = m or dict()
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('DomainName') is not None:
self.domain_name = m.get('DomainName')
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('IsSub') is not None:
self.is_sub = m.get('IsSub')
if m.get('DomainNames') is not None:
self.domain_names = m.get('DomainNames')
return self
class DescribeDomainListResponseBody(TeaModel):
def __init__(self, total_count=None, request_id=None, domain_names=None):
self.total_count = total_count # type: int
self.request_id = request_id # type: str
self.domain_names = domain_names # type: list[str]
def validate(self):
pass
def to_map(self):
_map = super(DescribeDomainListResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.total_count is not None:
result['TotalCount'] = self.total_count
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.domain_names is not None:
result['DomainNames'] = self.domain_names
return result
def from_map(self, m=None):
m = m or dict()
if m.get('TotalCount') is not None:
self.total_count = m.get('TotalCount')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('DomainNames') is not None:
self.domain_names = m.get('DomainNames')
return self
class DescribeDomainListResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: DescribeDomainListResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeDomainListResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeDomainListResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeDomainNamesRequest(TeaModel):
def __init__(self, instance_id=None, resource_group_id=None):
self.instance_id = instance_id # type: str
self.resource_group_id = resource_group_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(DescribeDomainNamesRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
return self
class DescribeDomainNamesResponseBody(TeaModel):
def __init__(self, request_id=None, domain_names=None):
self.request_id = request_id # type: str
self.domain_names = domain_names # type: list[str]
def validate(self):
pass
def to_map(self):
_map = super(DescribeDomainNamesResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.domain_names is not None:
result['DomainNames'] = self.domain_names
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('DomainNames') is not None:
self.domain_names = m.get('DomainNames')
return self
class DescribeDomainNamesResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: DescribeDomainNamesResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeDomainNamesResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeDomainNamesResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeDomainRuleGroupRequest(TeaModel):
def __init__(self, domain=None, instance_id=None):
self.domain = domain # type: str
self.instance_id = instance_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(DescribeDomainRuleGroupRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
return self
class DescribeDomainRuleGroupResponseBody(TeaModel):
def __init__(self, rule_group_id=None, request_id=None):
self.rule_group_id = rule_group_id # type: long
self.request_id = request_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(DescribeDomainRuleGroupResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.rule_group_id is not None:
result['RuleGroupId'] = self.rule_group_id
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RuleGroupId') is not None:
self.rule_group_id = m.get('RuleGroupId')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class DescribeDomainRuleGroupResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: DescribeDomainRuleGroupResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeDomainRuleGroupResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeDomainRuleGroupResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeInstanceInfoRequest(TeaModel):
def __init__(self, instance_id=None, resource_group_id=None):
self.instance_id = instance_id # type: str
self.resource_group_id = resource_group_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(DescribeInstanceInfoRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
return self
class DescribeInstanceInfoResponseBodyInstanceInfo(TeaModel):
def __init__(self, status=None, end_date=None, version=None, remain_day=None, region=None, pay_type=None,
in_debt=None, instance_id=None, subscription_type=None, trial=None):
self.status = status # type: int
self.end_date = end_date # type: long
self.version = version # type: str
self.remain_day = remain_day # type: int
self.region = region # type: str
self.pay_type = pay_type # type: int
self.in_debt = in_debt # type: int
self.instance_id = instance_id # type: str
self.subscription_type = subscription_type # type: str
self.trial = trial # type: int
def validate(self):
pass
def to_map(self):
_map = super(DescribeInstanceInfoResponseBodyInstanceInfo, self).to_map()
if _map is not None:
return _map
result = dict()
if self.status is not None:
result['Status'] = self.status
if self.end_date is not None:
result['EndDate'] = self.end_date
if self.version is not None:
result['Version'] = self.version
if self.remain_day is not None:
result['RemainDay'] = self.remain_day
if self.region is not None:
result['Region'] = self.region
if self.pay_type is not None:
result['PayType'] = self.pay_type
if self.in_debt is not None:
result['InDebt'] = self.in_debt
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.subscription_type is not None:
result['SubscriptionType'] = self.subscription_type
if self.trial is not None:
result['Trial'] = self.trial
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Status') is not None:
self.status = m.get('Status')
if m.get('EndDate') is not None:
self.end_date = m.get('EndDate')
if m.get('Version') is not None:
self.version = m.get('Version')
if m.get('RemainDay') is not None:
self.remain_day = m.get('RemainDay')
if m.get('Region') is not None:
self.region = m.get('Region')
if m.get('PayType') is not None:
self.pay_type = m.get('PayType')
if m.get('InDebt') is not None:
self.in_debt = m.get('InDebt')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('SubscriptionType') is not None:
self.subscription_type = m.get('SubscriptionType')
if m.get('Trial') is not None:
self.trial = m.get('Trial')
return self
class DescribeInstanceInfoResponseBody(TeaModel):
def __init__(self, request_id=None, instance_info=None):
self.request_id = request_id # type: str
self.instance_info = instance_info # type: DescribeInstanceInfoResponseBodyInstanceInfo
def validate(self):
if self.instance_info:
self.instance_info.validate()
def to_map(self):
_map = super(DescribeInstanceInfoResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.instance_info is not None:
result['InstanceInfo'] = self.instance_info.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('InstanceInfo') is not None:
temp_model = DescribeInstanceInfoResponseBodyInstanceInfo()
self.instance_info = temp_model.from_map(m['InstanceInfo'])
return self
class DescribeInstanceInfoResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: DescribeInstanceInfoResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeInstanceInfoResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeInstanceInfoResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeInstanceInfosRequest(TeaModel):
def __init__(self, instance_source=None, instance_id=None, resource_group_id=None):
self.instance_source = instance_source # type: str
self.instance_id = instance_id # type: str
self.resource_group_id = resource_group_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(DescribeInstanceInfosRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_source is not None:
result['InstanceSource'] = self.instance_source
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceSource') is not None:
self.instance_source = m.get('InstanceSource')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
return self
class DescribeInstanceInfosResponseBodyInstanceInfos(TeaModel):
def __init__(self, status=None, end_date=None, remain_day=None, region=None, pay_type=None, in_debt=None,
instance_id=None, subscription_type=None, trial=None):
self.status = status # type: int
self.end_date = end_date # type: long
self.remain_day = remain_day # type: int
self.region = region # type: str
self.pay_type = pay_type # type: int
self.in_debt = in_debt # type: int
self.instance_id = instance_id # type: str
self.subscription_type = subscription_type # type: str
self.trial = trial # type: int
def validate(self):
pass
def to_map(self):
_map = super(DescribeInstanceInfosResponseBodyInstanceInfos, self).to_map()
if _map is not None:
return _map
result = dict()
if self.status is not None:
result['Status'] = self.status
if self.end_date is not None:
result['EndDate'] = self.end_date
if self.remain_day is not None:
result['RemainDay'] = self.remain_day
if self.region is not None:
result['Region'] = self.region
if self.pay_type is not None:
result['PayType'] = self.pay_type
if self.in_debt is not None:
result['InDebt'] = self.in_debt
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.subscription_type is not None:
result['SubscriptionType'] = self.subscription_type
if self.trial is not None:
result['Trial'] = self.trial
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Status') is not None:
self.status = m.get('Status')
if m.get('EndDate') is not None:
self.end_date = m.get('EndDate')
if m.get('RemainDay') is not None:
self.remain_day = m.get('RemainDay')
if m.get('Region') is not None:
self.region = m.get('Region')
if m.get('PayType') is not None:
self.pay_type = m.get('PayType')
if m.get('InDebt') is not None:
self.in_debt = m.get('InDebt')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('SubscriptionType') is not None:
self.subscription_type = m.get('SubscriptionType')
if m.get('Trial') is not None:
self.trial = m.get('Trial')
return self
class DescribeInstanceInfosResponseBody(TeaModel):
def __init__(self, request_id=None, instance_infos=None):
self.request_id = request_id # type: str
self.instance_infos = instance_infos # type: list[DescribeInstanceInfosResponseBodyInstanceInfos]
def validate(self):
if self.instance_infos:
for k in self.instance_infos:
if k:
k.validate()
def to_map(self):
_map = super(DescribeInstanceInfosResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
result['InstanceInfos'] = []
if self.instance_infos is not None:
for k in self.instance_infos:
result['InstanceInfos'].append(k.to_map() if k else None)
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
self.instance_infos = []
if m.get('InstanceInfos') is not None:
for k in m.get('InstanceInfos'):
temp_model = DescribeInstanceInfosResponseBodyInstanceInfos()
self.instance_infos.append(temp_model.from_map(k))
return self
class DescribeInstanceInfosResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: DescribeInstanceInfosResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeInstanceInfosResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeInstanceInfosResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeInstanceSpecInfoRequest(TeaModel):
def __init__(self, instance_id=None, resource_group_id=None):
self.instance_id = instance_id # type: str
self.resource_group_id = resource_group_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(DescribeInstanceSpecInfoRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
return self
class DescribeInstanceSpecInfoResponseBodyInstanceSpecInfos(TeaModel):
def __init__(self, value=None, code=None):
self.value = value # type: str
self.code = code # type: str
def validate(self):
pass
def to_map(self):
_map = super(DescribeInstanceSpecInfoResponseBodyInstanceSpecInfos, self).to_map()
if _map is not None:
return _map
result = dict()
if self.value is not None:
result['Value'] = self.value
if self.code is not None:
result['Code'] = self.code
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Value') is not None:
self.value = m.get('Value')
if m.get('Code') is not None:
self.code = m.get('Code')
return self
class DescribeInstanceSpecInfoResponseBody(TeaModel):
def __init__(self, instance_spec_infos=None, request_id=None, instance_id=None, version=None, expire_time=None):
self.instance_spec_infos = instance_spec_infos # type: list[DescribeInstanceSpecInfoResponseBodyInstanceSpecInfos]
self.request_id = request_id # type: str
self.instance_id = instance_id # type: str
self.version = version # type: str
self.expire_time = expire_time # type: long
def validate(self):
if self.instance_spec_infos:
for k in self.instance_spec_infos:
if k:
k.validate()
def to_map(self):
_map = super(DescribeInstanceSpecInfoResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
result['InstanceSpecInfos'] = []
if self.instance_spec_infos is not None:
for k in self.instance_spec_infos:
result['InstanceSpecInfos'].append(k.to_map() if k else None)
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.version is not None:
result['Version'] = self.version
if self.expire_time is not None:
result['ExpireTime'] = self.expire_time
return result
def from_map(self, m=None):
m = m or dict()
self.instance_spec_infos = []
if m.get('InstanceSpecInfos') is not None:
for k in m.get('InstanceSpecInfos'):
temp_model = DescribeInstanceSpecInfoResponseBodyInstanceSpecInfos()
self.instance_spec_infos.append(temp_model.from_map(k))
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('Version') is not None:
self.version = m.get('Version')
if m.get('ExpireTime') is not None:
self.expire_time = m.get('ExpireTime')
return self
class DescribeInstanceSpecInfoResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: DescribeInstanceSpecInfoResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeInstanceSpecInfoResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeInstanceSpecInfoResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeLogServiceStatusRequest(TeaModel):
def __init__(self, instance_id=None, region=None, resource_group_id=None, page_number=None, page_size=None,
domain_names=None):
self.instance_id = instance_id # type: str
self.region = region # type: str
self.resource_group_id = resource_group_id # type: str
self.page_number = page_number # type: int
self.page_size = page_size # type: int
self.domain_names = domain_names # type: list[str]
def validate(self):
pass
def to_map(self):
_map = super(DescribeLogServiceStatusRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.region is not None:
result['Region'] = self.region
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.domain_names is not None:
result['DomainNames'] = self.domain_names
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('Region') is not None:
self.region = m.get('Region')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('DomainNames') is not None:
self.domain_names = m.get('DomainNames')
return self
class DescribeLogServiceStatusResponseBodyDomainStatus(TeaModel):
def __init__(self, domain=None, sls_log_active=None):
self.domain = domain # type: str
self.sls_log_active = sls_log_active # type: int
def validate(self):
pass
def to_map(self):
_map = super(DescribeLogServiceStatusResponseBodyDomainStatus, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.sls_log_active is not None:
result['SlsLogActive'] = self.sls_log_active
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('SlsLogActive') is not None:
self.sls_log_active = m.get('SlsLogActive')
return self
class DescribeLogServiceStatusResponseBody(TeaModel):
def __init__(self, total_count=None, request_id=None, domain_status=None):
self.total_count = total_count # type: int
self.request_id = request_id # type: str
self.domain_status = domain_status # type: list[DescribeLogServiceStatusResponseBodyDomainStatus]
def validate(self):
if self.domain_status:
for k in self.domain_status:
if k:
k.validate()
def to_map(self):
_map = super(DescribeLogServiceStatusResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.total_count is not None:
result['TotalCount'] = self.total_count
if self.request_id is not None:
result['RequestId'] = self.request_id
result['DomainStatus'] = []
if self.domain_status is not None:
for k in self.domain_status:
result['DomainStatus'].append(k.to_map() if k else None)
return result
def from_map(self, m=None):
m = m or dict()
if m.get('TotalCount') is not None:
self.total_count = m.get('TotalCount')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
self.domain_status = []
if m.get('DomainStatus') is not None:
for k in m.get('DomainStatus'):
temp_model = DescribeLogServiceStatusResponseBodyDomainStatus()
self.domain_status.append(temp_model.from_map(k))
return self
class DescribeLogServiceStatusResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: DescribeLogServiceStatusResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeLogServiceStatusResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeLogServiceStatusResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeProtectionModuleCodeConfigRequest(TeaModel):
def __init__(self, source_ip=None, lang=None, code_type=None, code_value=None, instance_id=None,
resource_group_id=None):
self.source_ip = source_ip # type: str
self.lang = lang # type: str
self.code_type = code_type # type: int
self.code_value = code_value # type: int
self.instance_id = instance_id # type: str
self.resource_group_id = resource_group_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(DescribeProtectionModuleCodeConfigRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.source_ip is not None:
result['SourceIp'] = self.source_ip
if self.lang is not None:
result['Lang'] = self.lang
if self.code_type is not None:
result['CodeType'] = self.code_type
if self.code_value is not None:
result['CodeValue'] = self.code_value
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('SourceIp') is not None:
self.source_ip = m.get('SourceIp')
if m.get('Lang') is not None:
self.lang = m.get('Lang')
if m.get('CodeType') is not None:
self.code_type = m.get('CodeType')
if m.get('CodeValue') is not None:
self.code_value = m.get('CodeValue')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
return self
class DescribeProtectionModuleCodeConfigResponseBody(TeaModel):
def __init__(self, request_id=None, code_configs=None):
self.request_id = request_id # type: str
self.code_configs = code_configs # type: str
def validate(self):
pass
def to_map(self):
_map = super(DescribeProtectionModuleCodeConfigResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.code_configs is not None:
result['CodeConfigs'] = self.code_configs
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('CodeConfigs') is not None:
self.code_configs = m.get('CodeConfigs')
return self
class DescribeProtectionModuleCodeConfigResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: DescribeProtectionModuleCodeConfigResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeProtectionModuleCodeConfigResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeProtectionModuleCodeConfigResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeProtectionModuleModeRequest(TeaModel):
def __init__(self, domain=None, defense_type=None, instance_id=None, resource_group_id=None):
self.domain = domain # type: str
self.defense_type = defense_type # type: str
self.instance_id = instance_id # type: str
self.resource_group_id = resource_group_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(DescribeProtectionModuleModeRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.defense_type is not None:
result['DefenseType'] = self.defense_type
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('DefenseType') is not None:
self.defense_type = m.get('DefenseType')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
return self
class DescribeProtectionModuleModeResponseBody(TeaModel):
def __init__(self, learn_status=None, request_id=None, mode=None):
self.learn_status = learn_status # type: int
self.request_id = request_id # type: str
self.mode = mode # type: int
def validate(self):
pass
def to_map(self):
_map = super(DescribeProtectionModuleModeResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.learn_status is not None:
result['LearnStatus'] = self.learn_status
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.mode is not None:
result['Mode'] = self.mode
return result
def from_map(self, m=None):
m = m or dict()
if m.get('LearnStatus') is not None:
self.learn_status = m.get('LearnStatus')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('Mode') is not None:
self.mode = m.get('Mode')
return self
class DescribeProtectionModuleModeResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: DescribeProtectionModuleModeResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeProtectionModuleModeResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeProtectionModuleModeResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeProtectionModuleRulesRequest(TeaModel):
def __init__(self, page_size=None, page_number=None, domain=None, defense_type=None, query=None, lang=None,
instance_id=None, resource_group_id=None):
self.page_size = page_size # type: int
self.page_number = page_number # type: int
self.domain = domain # type: str
self.defense_type = defense_type # type: str
self.query = query # type: str
self.lang = lang # type: str
self.instance_id = instance_id # type: str
self.resource_group_id = resource_group_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(DescribeProtectionModuleRulesRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.domain is not None:
result['Domain'] = self.domain
if self.defense_type is not None:
result['DefenseType'] = self.defense_type
if self.query is not None:
result['Query'] = self.query
if self.lang is not None:
result['Lang'] = self.lang
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('DefenseType') is not None:
self.defense_type = m.get('DefenseType')
if m.get('Query') is not None:
self.query = m.get('Query')
if m.get('Lang') is not None:
self.lang = m.get('Lang')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
return self
class DescribeProtectionModuleRulesResponseBodyRules(TeaModel):
def __init__(self, status=None, time=None, version=None, content=None, rule_id=None):
self.status = status # type: long
self.time = time # type: long
self.version = version # type: long
self.content = content # type: dict[str, any]
self.rule_id = rule_id # type: long
def validate(self):
pass
def to_map(self):
_map = super(DescribeProtectionModuleRulesResponseBodyRules, self).to_map()
if _map is not None:
return _map
result = dict()
if self.status is not None:
result['Status'] = self.status
if self.time is not None:
result['Time'] = self.time
if self.version is not None:
result['Version'] = self.version
if self.content is not None:
result['Content'] = self.content
if self.rule_id is not None:
result['RuleId'] = self.rule_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Status') is not None:
self.status = m.get('Status')
if m.get('Time') is not None:
self.time = m.get('Time')
if m.get('Version') is not None:
self.version = m.get('Version')
if m.get('Content') is not None:
self.content = m.get('Content')
if m.get('RuleId') is not None:
self.rule_id = m.get('RuleId')
return self
class DescribeProtectionModuleRulesResponseBody(TeaModel):
def __init__(self, total_count=None, request_id=None, rules=None):
self.total_count = total_count # type: int
self.request_id = request_id # type: str
self.rules = rules # type: list[DescribeProtectionModuleRulesResponseBodyRules]
def validate(self):
if self.rules:
for k in self.rules:
if k:
k.validate()
def to_map(self):
_map = super(DescribeProtectionModuleRulesResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.total_count is not None:
result['TotalCount'] = self.total_count
if self.request_id is not None:
result['RequestId'] = self.request_id
result['Rules'] = []
| 1 |
addf92a3d4060fa9464a802a4a4378cf9eeadde4
|
Python
|
if self.rules is not None:
for k in self.rules:
result['Rules'].append(k.to_map() if k else None)
return result
def from_map(self, m=None):
m = m or dict()
if m.get('TotalCount') is not None:
self.total_count = m.get('TotalCount')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
self.rules = []
if m.get('Rules') is not None:
for k in m.get('Rules'):
temp_model = DescribeProtectionModuleRulesResponseBodyRules()
self.rules.append(temp_model.from_map(k))
return self
class DescribeProtectionModuleRulesResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: DescribeProtectionModuleRulesResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeProtectionModuleRulesResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeProtectionModuleRulesResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeProtectionModuleStatusRequest(TeaModel):
def __init__(self, domain=None, defense_type=None, instance_id=None):
self.domain = domain # type: str
self.defense_type = defense_type # type: str
self.instance_id = instance_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(DescribeProtectionModuleStatusRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.defense_type is not None:
result['DefenseType'] = self.defense_type
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('DefenseType') is not None:
self.defense_type = m.get('DefenseType')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
return self
class DescribeProtectionModuleStatusResponseBody(TeaModel):
def __init__(self, request_id=None, module_status=None):
self.request_id = request_id # type: str
self.module_status = module_status # type: int
def validate(self):
pass
def to_map(self):
_map = super(DescribeProtectionModuleStatusResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.module_status is not None:
result['ModuleStatus'] = self.module_status
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('ModuleStatus') is not None:
self.module_status = m.get('ModuleStatus')
return self
class DescribeProtectionModuleStatusResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: DescribeProtectionModuleStatusResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeProtectionModuleStatusResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeProtectionModuleStatusResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class DescribeWafSourceIpSegmentRequest(TeaModel):
def __init__(self, instance_id=None, resource_group_id=None):
self.instance_id = instance_id # type: str
self.resource_group_id = resource_group_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(DescribeWafSourceIpSegmentRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
return self
class DescribeWafSourceIpSegmentResponseBody(TeaModel):
def __init__(self, request_id=None, ip_v6s=None, ips=None):
self.request_id = request_id # type: str
self.ip_v6s = ip_v6s # type: str
self.ips = ips # type: str
def validate(self):
pass
def to_map(self):
_map = super(DescribeWafSourceIpSegmentResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.ip_v6s is not None:
result['IpV6s'] = self.ip_v6s
if self.ips is not None:
result['Ips'] = self.ips
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('IpV6s') is not None:
self.ip_v6s = m.get('IpV6s')
if m.get('Ips') is not None:
self.ips = m.get('Ips')
return self
class DescribeWafSourceIpSegmentResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: DescribeWafSourceIpSegmentResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(DescribeWafSourceIpSegmentResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeWafSourceIpSegmentResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ModifyDomainRequest(TeaModel):
def __init__(self, instance_id=None, domain=None, source_ips=None, load_balancing=None, http_port=None,
https_port=None, http_2port=None, https_redirect=None, http_to_user_ip=None, is_access_product=None,
log_headers=None, cluster_type=None, connection_time=None, read_time=None, write_time=None, access_type=None,
cloud_native_instances=None, ip_follow_status=None):
self.instance_id = instance_id # type: str
self.domain = domain # type: str
self.source_ips = source_ips # type: str
self.load_balancing = load_balancing # type: int
self.http_port = http_port # type: str
self.https_port = https_port # type: str
self.http_2port = http_2port # type: str
self.https_redirect = https_redirect # type: int
self.http_to_user_ip = http_to_user_ip # type: int
self.is_access_product = is_access_product # type: int
self.log_headers = log_headers # type: str
self.cluster_type = cluster_type # type: int
self.connection_time = connection_time # type: int
self.read_time = read_time # type: int
self.write_time = write_time # type: int
self.access_type = access_type # type: str
self.cloud_native_instances = cloud_native_instances # type: str
self.ip_follow_status = ip_follow_status # type: int
def validate(self):
pass
def to_map(self):
_map = super(ModifyDomainRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.domain is not None:
result['Domain'] = self.domain
if self.source_ips is not None:
result['SourceIps'] = self.source_ips
if self.load_balancing is not None:
result['LoadBalancing'] = self.load_balancing
if self.http_port is not None:
result['HttpPort'] = self.http_port
if self.https_port is not None:
result['HttpsPort'] = self.https_port
if self.http_2port is not None:
result['Http2Port'] = self.http_2port
if self.https_redirect is not None:
result['HttpsRedirect'] = self.https_redirect
if self.http_to_user_ip is not None:
result['HttpToUserIp'] = self.http_to_user_ip
if self.is_access_product is not None:
result['IsAccessProduct'] = self.is_access_product
if self.log_headers is not None:
result['LogHeaders'] = self.log_headers
if self.cluster_type is not None:
result['ClusterType'] = self.cluster_type
if self.connection_time is not None:
result['ConnectionTime'] = self.connection_time
if self.read_time is not None:
result['ReadTime'] = self.read_time
if self.write_time is not None:
result['WriteTime'] = self.write_time
if self.access_type is not None:
result['AccessType'] = self.access_type
if self.cloud_native_instances is not None:
result['CloudNativeInstances'] = self.cloud_native_instances
if self.ip_follow_status is not None:
result['IpFollowStatus'] = self.ip_follow_status
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('SourceIps') is not None:
self.source_ips = m.get('SourceIps')
if m.get('LoadBalancing') is not None:
self.load_balancing = m.get('LoadBalancing')
if m.get('HttpPort') is not None:
self.http_port = m.get('HttpPort')
if m.get('HttpsPort') is not None:
self.https_port = m.get('HttpsPort')
if m.get('Http2Port') is not None:
self.http_2port = m.get('Http2Port')
if m.get('HttpsRedirect') is not None:
self.https_redirect = m.get('HttpsRedirect')
if m.get('HttpToUserIp') is not None:
self.http_to_user_ip = m.get('HttpToUserIp')
if m.get('IsAccessProduct') is not None:
self.is_access_product = m.get('IsAccessProduct')
if m.get('LogHeaders') is not None:
self.log_headers = m.get('LogHeaders')
if m.get('ClusterType') is not None:
self.cluster_type = m.get('ClusterType')
if m.get('ConnectionTime') is not None:
self.connection_time = m.get('ConnectionTime')
if m.get('ReadTime') is not None:
self.read_time = m.get('ReadTime')
if m.get('WriteTime') is not None:
self.write_time = m.get('WriteTime')
if m.get('AccessType') is not None:
self.access_type = m.get('AccessType')
if m.get('CloudNativeInstances') is not None:
self.cloud_native_instances = m.get('CloudNativeInstances')
if m.get('IpFollowStatus') is not None:
self.ip_follow_status = m.get('IpFollowStatus')
return self
class ModifyDomainResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(ModifyDomainResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ModifyDomainResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: ModifyDomainResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(ModifyDomainResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = ModifyDomainResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ModifyDomainIpv6StatusRequest(TeaModel):
def __init__(self, instance_id=None, domain=None, enabled=None):
self.instance_id = instance_id # type: str
self.domain = domain # type: str
self.enabled = enabled # type: str
def validate(self):
pass
def to_map(self):
_map = super(ModifyDomainIpv6StatusRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.domain is not None:
result['Domain'] = self.domain
if self.enabled is not None:
result['Enabled'] = self.enabled
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('Enabled') is not None:
self.enabled = m.get('Enabled')
return self
class ModifyDomainIpv6StatusResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(ModifyDomainIpv6StatusResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ModifyDomainIpv6StatusResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: ModifyDomainIpv6StatusResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(ModifyDomainIpv6StatusResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = ModifyDomainIpv6StatusResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ModifyLogRetrievalStatusRequest(TeaModel):
def __init__(self, instance_id=None, domain=None, enabled=None):
self.instance_id = instance_id # type: str
self.domain = domain # type: str
self.enabled = enabled # type: int
def validate(self):
pass
def to_map(self):
_map = super(ModifyLogRetrievalStatusRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.domain is not None:
result['Domain'] = self.domain
if self.enabled is not None:
result['Enabled'] = self.enabled
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('Enabled') is not None:
self.enabled = m.get('Enabled')
return self
class ModifyLogRetrievalStatusResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(ModifyLogRetrievalStatusResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ModifyLogRetrievalStatusResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: ModifyLogRetrievalStatusResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(ModifyLogRetrievalStatusResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = ModifyLogRetrievalStatusResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ModifyLogServiceStatusRequest(TeaModel):
def __init__(self, instance_id=None, domain=None, enabled=None):
self.instance_id = instance_id # type: str
self.domain = domain # type: str
self.enabled = enabled # type: int
def validate(self):
pass
def to_map(self):
_map = super(ModifyLogServiceStatusRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.domain is not None:
result['Domain'] = self.domain
if self.enabled is not None:
result['Enabled'] = self.enabled
return result
def from_map(self, m=None):
m = m or dict()
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('Enabled') is not None:
self.enabled = m.get('Enabled')
return self
class ModifyLogServiceStatusResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(ModifyLogServiceStatusResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ModifyLogServiceStatusResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: ModifyLogServiceStatusResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(ModifyLogServiceStatusResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = ModifyLogServiceStatusResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ModifyProtectionModuleModeRequest(TeaModel):
def __init__(self, domain=None, defense_type=None, mode=None, instance_id=None):
self.domain = domain # type: str
self.defense_type = defense_type # type: str
self.mode = mode # type: int
self.instance_id = instance_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(ModifyProtectionModuleModeRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.defense_type is not None:
result['DefenseType'] = self.defense_type
if self.mode is not None:
result['Mode'] = self.mode
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('DefenseType') is not None:
self.defense_type = m.get('DefenseType')
if m.get('Mode') is not None:
self.mode = m.get('Mode')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
return self
class ModifyProtectionModuleModeResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(ModifyProtectionModuleModeResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ModifyProtectionModuleModeResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: ModifyProtectionModuleModeResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(ModifyProtectionModuleModeResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = ModifyProtectionModuleModeResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ModifyProtectionModuleRuleRequest(TeaModel):
def __init__(self, domain=None, defense_type=None, rule=None, rule_id=None, lock_version=None, instance_id=None):
self.domain = domain # type: str
self.defense_type = defense_type # type: str
self.rule = rule # type: str
self.rule_id = rule_id # type: long
self.lock_version = lock_version # type: long
self.instance_id = instance_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(ModifyProtectionModuleRuleRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.defense_type is not None:
result['DefenseType'] = self.defense_type
if self.rule is not None:
result['Rule'] = self.rule
if self.rule_id is not None:
result['RuleId'] = self.rule_id
if self.lock_version is not None:
result['LockVersion'] = self.lock_version
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('DefenseType') is not None:
self.defense_type = m.get('DefenseType')
if m.get('Rule') is not None:
self.rule = m.get('Rule')
if m.get('RuleId') is not None:
self.rule_id = m.get('RuleId')
if m.get('LockVersion') is not None:
self.lock_version = m.get('LockVersion')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
return self
class ModifyProtectionModuleRuleResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(ModifyProtectionModuleRuleResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ModifyProtectionModuleRuleResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: ModifyProtectionModuleRuleResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(ModifyProtectionModuleRuleResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = ModifyProtectionModuleRuleResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ModifyProtectionModuleStatusRequest(TeaModel):
def __init__(self, domain=None, defense_type=None, module_status=None, instance_id=None):
self.domain = domain # type: str
self.defense_type = defense_type # type: str
self.module_status = module_status # type: int
self.instance_id = instance_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(ModifyProtectionModuleStatusRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.defense_type is not None:
result['DefenseType'] = self.defense_type
if self.module_status is not None:
result['ModuleStatus'] = self.module_status
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('DefenseType') is not None:
self.defense_type = m.get('DefenseType')
if m.get('ModuleStatus') is not None:
self.module_status = m.get('ModuleStatus')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
return self
class ModifyProtectionModuleStatusResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(ModifyProtectionModuleStatusResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ModifyProtectionModuleStatusResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: ModifyProtectionModuleStatusResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(ModifyProtectionModuleStatusResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = ModifyProtectionModuleStatusResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ModifyProtectionRuleCacheStatusRequest(TeaModel):
def __init__(self, domain=None, rule_id=None, defense_type=None, instance_id=None):
self.domain = domain # type: str
self.rule_id = rule_id # type: long
self.defense_type = defense_type # type: str
self.instance_id = instance_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(ModifyProtectionRuleCacheStatusRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.rule_id is not None:
result['RuleId'] = self.rule_id
if self.defense_type is not None:
result['DefenseType'] = self.defense_type
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('RuleId') is not None:
self.rule_id = m.get('RuleId')
if m.get('DefenseType') is not None:
self.defense_type = m.get('DefenseType')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
return self
class ModifyProtectionRuleCacheStatusResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(ModifyProtectionRuleCacheStatusResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ModifyProtectionRuleCacheStatusResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: ModifyProtectionRuleCacheStatusResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(ModifyProtectionRuleCacheStatusResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = ModifyProtectionRuleCacheStatusResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class ModifyProtectionRuleStatusRequest(TeaModel):
def __init__(self, domain=None, defense_type=None, rule_id=None, rule_status=None, lock_version=None,
instance_id=None):
self.domain = domain # type: str
self.defense_type = defense_type # type: str
self.rule_id = rule_id # type: long
self.rule_status = rule_status # type: int
self.lock_version = lock_version # type: long
self.instance_id = instance_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(ModifyProtectionRuleStatusRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domain is not None:
result['Domain'] = self.domain
if self.defense_type is not None:
result['DefenseType'] = self.defense_type
if self.rule_id is not None:
result['RuleId'] = self.rule_id
if self.rule_status is not None:
result['RuleStatus'] = self.rule_status
if self.lock_version is not None:
result['LockVersion'] = self.lock_version
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domain') is not None:
self.domain = m.get('Domain')
if m.get('DefenseType') is not None:
self.defense_type = m.get('DefenseType')
if m.get('RuleId') is not None:
self.rule_id = m.get('RuleId')
if m.get('RuleStatus') is not None:
self.rule_status = m.get('RuleStatus')
if m.get('LockVersion') is not None:
self.lock_version = m.get('LockVersion')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
return self
class ModifyProtectionRuleStatusResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(ModifyProtectionRuleStatusResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class ModifyProtectionRuleStatusResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: ModifyProtectionRuleStatusResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(ModifyProtectionRuleStatusResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = ModifyProtectionRuleStatusResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class SetDomainRuleGroupRequest(TeaModel):
def __init__(self, domains=None, rule_group_id=None, waf_version=None, instance_id=None, resource_group_id=None):
self.domains = domains # type: str
self.rule_group_id = rule_group_id # type: long
self.waf_version = waf_version # type: long
self.instance_id = instance_id # type: str
self.resource_group_id = resource_group_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(SetDomainRuleGroupRequest, self).to_map()
if _map is not None:
return _map
result = dict()
if self.domains is not None:
result['Domains'] = self.domains
if self.rule_group_id is not None:
result['RuleGroupId'] = self.rule_group_id
if self.waf_version is not None:
result['WafVersion'] = self.waf_version
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.resource_group_id is not None:
result['ResourceGroupId'] = self.resource_group_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('Domains') is not None:
self.domains = m.get('Domains')
if m.get('RuleGroupId') is not None:
self.rule_group_id = m.get('RuleGroupId')
if m.get('WafVersion') is not None:
self.waf_version = m.get('WafVersion')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('ResourceGroupId') is not None:
self.resource_group_id = m.get('ResourceGroupId')
return self
class SetDomainRuleGroupResponseBody(TeaModel):
def __init__(self, request_id=None):
self.request_id = request_id # type: str
def validate(self):
pass
def to_map(self):
_map = super(SetDomainRuleGroupResponseBody, self).to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m=None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class SetDomainRuleGroupResponse(TeaModel):
def __init__(self, headers=None, body=None):
self.headers = headers # type: dict[str, str]
self.body = body # type: SetDomainRuleGroupResponseBody
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super(SetDomainRuleGroupResponse, self).to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m=None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = SetDomainRuleGroupResponseBody()
self.body = temp_model.from_map(m['body'])
return self
| 2 |
87baaf4a1b48fa248c65d26cc44e819a2ede1140
|
Python
|
# Python library import
import asyncio, asyncssh, logging
# Module logging logger
log = logging.getLogger(__package__)
# Debug level
# logging.basicConfig(level=logging.WARNING)
# logging.basicConfig(level=logging.INFO)
logging.basicConfig(level=logging.DEBUG)
asyncssh.set_debug_level(2)
# Declaration of constant values
# Max data to read in read function
MAX_BUFFER_DATA = 65535
# Dictonary with all netmasks of IPv4
ipv4_netmask_list = {
"0.0.0.0": "0",
"128.0.0.0": "1",
"192.0.0.0": "2",
"224.0.0.0": "3",
"240.0.0.0": "4",
"248.0.0.0": "5",
"252.0.0.0": "6",
"254.0.0.0": "7",
"255.0.0.0": "8",
"255.128.0.0": "9",
"255.192.0.0": "10",
"255.224.0.0": "11",
"255.240.0.0": "12",
"255.248.0.0": "13",
"255.252.0.0": "14",
"255.254.0.0": "15",
"255.255.0.0": "16",
"255.255.128.0": "17",
"255.255.192.0": "18",
"255.255.224.0": "19",
"255.255.240.0": "20",
"255.255.248.0": "21",
"255.255.252.0": "22",
"255.255.254.0": "23",
"255.255.255.0": "24",
"255.255.255.128": "25",
"255.255.255.192": "26",
"255.255.255.224": "27",
"255.255.255.240": "28",
"255.255.255.248": "29",
"255.255.255.252": "30",
"255.255.255.254": "31",
"255.255.255.255": "32",
}
class NetworkDevice:
"""
Base class for network object
:param ip: IP address of a device
:type ip: str
:param username: Username used to connect to a device
:type username: str
:param password: Password used to connect to a device
:type password: str
:param device_type: Type of device used
:type device_type: str
:param port: TCP port used to connect a device. Default value is "22" for SSH
:type port: int, optional
:param timeout: TCP port used to connect a device. Default value is 10 seconds
:type timeout: int, optional
:param _protocol: Protocol used to connect a device. "ssh" or "telnet" are possible options. Default value is "ssh"
:type _protocol: str, optional
:param enable_mode: Enable mode for devices requiring it. Default value is "False"
:type enable_mode: bool, optional
:param enable_password: Enable password used for enable mode.
:type enable_password: str, optional
:param conn: Variable used for the management of the SSH connection
:type conn: SSHClientConnection object
:param _writer: Variable used for the management of the Telnet connection and writing channel
:type _writer: StreamWriter object
:param _reader: Variable used for the management of the Telnet reading channel
:type _reader: StreamReader object
:param possible_prompts: Used by the connect method to list all possible prompts of the device
:type possible_prompts: list
:param _connect_first_ending_prompt: Default possible ending prompts. Used only the time after login and password to discover the prompt
:type _connect_first_ending_prompt: list
:param list_of_possible_ending_prompts: Different strings at the end of a prompt the device can get. Used for detecting the prompt returned in sent commands
:type list_of_possible_ending_prompts: list
:param _telnet_connect_login: Login prompt for Telnet. Used to detect when a login is expected or when login and password access is failed
:type _telnet_connect_login: str
:param _telnet_connect_password: Password prompt for Telnet. Used to detect when a login is expected or when login and password access is failed
:type _telnet_connect_password: list
:param _telnet_connect_authentication_fail_prompt: Known failing messages or prompts when an authentication has failed. Used to get an answer faster than timeout events
:type _telnet_connect_authentication_fail_prompt: list
:param cmd_enable: Enable command for entering into enable mode
:type cmd_enable: str
:param cmd_disable_paging: Command used to disable paging on a device. That command is run at connection time
:type cmd_disable_paging: str
:param cmd_enter_config_mode: Command used to enter into a configuration mode on a device when this device support that feature.
:type cmd_enter_config_mode: str
:param cmd_exit_config_mode: Command used to leave a configuration mode on a device when this device support that feature.
:type cmd_exit_config_mode: str
:param cmd_get_version: API command used to get the software version of a device
:type cmd_get_version: str
:param cmd_get_hostname: API command used to get the hostname of a device
:type cmd_get_hostname: str
:param cmd_get_model: API command used to get the model of a device
:type cmd_get_model: str
:param cmd_get_serial_number: API command used to get the serial number of a device
:type cmd_get_serial_number: str
:param cmd_get_config: API command used to get the running configuration of a device
:type cmd_get_config: str
:param cmd_save_config: API command used to save the running configuration on the device
:type cmd_save_config: str
"""
def __init__(self, **kwargs):
# Display info message
log.info("__init__")
self.ip = ""
self.username = ""
self.password = ""
self.device_type = ""
self.port = 22
self.timeout = 10
self._protocol = "ssh"
self.enable_mode = False
self.enable_password = ""
self.conn = None
self._writer = None
self._reader = None
self.possible_prompts = []
self._connect_first_ending_prompt = ["#", ">"]
self.list_of_possible_ending_prompts = [
"(config-line)#",
"(config-if)#",
"(config)#",
">",
"#",
]
self._carriage_return_for_send_command = "\n"
self._send_command_error_in_returned_output = []
self._telnet_connect_login = "Username:"
self._telnet_connect_password = "Password:"
self._telnet_connect_authentication_fail_prompt = [":", "%"]
# General commands
self.cmd_enable = "enable"
self.cmd_disable_paging = "terminal length 0"
self.cmd_enter_config_mode = "configure terminal"
self.cmd_exit_config_mode = "exit"
self.cmd_get_version = "show version"
self.cmd_get_hostname = "show version | include uptime"
self.cmd_get_model = "show inventory"
self.cmd_get_serial_number = "show inventory | i SN"
self.cmd_get_config = "show running-config"
self.cmd_save_config = "write memory"
# Layer 1 commands
self.cmd_get_interfaces = [
"interface ethernet print terse without-paging",
"foreach i in=([/interface ethernet find]) do={/interface ethernet monitor $i once without-paging}",
"interface bridge port print terse without-paging",
]
self.cmd_set_interface = [
"interface ethernet enable <INTERFACE>",
"interface ethernet disable <INTERFACE>",
'interface ethernet comment <INTERFACE> "<COMMENT>"',
"interface ethernet set l2mtu=<MAXIMUMFRAMESIZE> <INTERFACE>",
"interface bridge port set frame-types=<MODE> ingress-filtering=<FILTERINGVLAN> [find interface=<INTERFACE>]",
]
# Layer 2 commands
self.cmd_get_mac_address_table = "interface bridge host print without-paging"
self.cmd_get_arp = "ip arp print terse without-paging"
self.cmd_get_lldp_neighbors = "ip neighbor print terse without-paging"
self.cmd_get_vlans = "interface bridge vlan print terse without-paging"
self.cmd_add_vlan = 'interface bridge vlan add vlan-ids=<VLAN> comment="<VLAN_NAME>" bridge=<BRIDGE>'
self.cmd_remove_vlan = "interface bridge vlan remove [find vlan-ids=<VLAN>]"
self.cmd_add_interface_to_vlan = [
"interface bridge vlan print terse",
"interface bridge vlan set [find vlan-ids=<VLAN>] untagged=<INTERFACE>",
"interface bridge vlan set [find vlan-ids=<VLAN>] tagged=<INTERFACE>",
"interface bridge port set [find interface=<INTERFACE>] pvid=<VLAN>",
]
self.cmd_remove_interface_from_vlan = [
"interface bridge vlan print terse",
"interface bridge vlan set [find vlan-ids=<VLAN>] untagged=<INTERFACE>",
"interface bridge vlan set [find vlan-ids=<VLAN>] tagged=<INTERFACE>",
"interface bridge port set [find interface=<INTERFACE>] pvid=<VLAN>",
]
# Layer 3 commands
self.cmd_get_routing_table = "ip route print without-paging terse"
self.cmd_get_interfaces_ip = "ip address print terse without-paging"
self.cmd_add_static_route = "ip route add dst-address=<NETWORK>/<PREFIXLENGTH> gateway=<DESTINATION> distance=<METRIC>"
self.cmd_remove_static_route = (
"ip route remove [find dst-address=<NETWORK>/<PREFIXLENGTH>]"
)
# Display info message
log.debug("__init__: kwargs: " + str(kwargs))
# Get information from dictionary
# "ip" found?
if "ip" in kwargs:
# Save "ip" parameter
self.ip = kwargs["ip"]
# Display info message
log.info("__init__: ip found: " + str(self.ip))
# "username" found?
if "username" in kwargs:
self.username = kwargs["username"]
# Display info message
log.info("__init__: username found: " + str(self.username))
# "password" found?
if "password" in kwargs:
self.password = kwargs["password"]
# Display info message
log.debug("__init__: password found: " + str(self.password))
# "device_type" found?
if "device_type" in kwargs:
self.device_type = kwargs["device_type"]
# Display info message
log.info("__init__: device_type found: " + str(self.device_type))
# "timeout" found?
if "timeout" in kwargs:
self.timeout = kwargs["timeout"]
# Display info message
log.info("__init__: timeout found: " + str(self.timeout))
# "protocol" found?
if "protocol" in kwargs:
self._protocol = kwargs["protocol"].lower()
# Display info message
log.info("__init__: protocol found: " + str(self._protocol))
# By default telnet port is 23
if self._protocol.lower() == "telnet":
self.port = 23
# "port" found?
if "port" in kwargs:
self.port = kwargs["port"]
# Display info message
log.info("__init__: port found: " + str(self.port))
# "enable_mode" found?
if "enable_mode" in kwargs:
self.enable_mode = kwargs["enable_mode"]
# Display info message
log.info("__init__: enable_mode found: " + str(self.enable_mode))
# "enable_password" found?
if "enable_password" in kwargs:
self.enable_password = kwargs["enable_password"]
# Display info message
log.info("__init__: enable_password found: " + str(self.enable_password))
async def __aenter__(self):
"""
Context manager opening connection
"""
try:
# Run an async method to connect a device
await self.connect()
except Exception:
# Disconnection (if needed) in case the connection is done but something failed
await self.disconnect()
# propagate exception if needed
raise
return self
# async def _aexit_(self, exc_type, exc_value, traceback):
async def __aexit__(self, exc_type, exc_value, traceback):
"""
Context manager closing connection
"""
# Close the connection
await self.disconnect()
def find_prompt(self, text):
"""
Method used to find a prompt inside an output string
This method is used during the first communication with the device.
First it find the prompt then caculate the different forms the prompt
can take. This will be useful later on while finding prompt in other
output stream (read).
:param text: data with a prompt
:type text: str
:return: the prompt found
:rtype: str
"""
# Get last line of the data
prompt = text.split("\n")[-1]
# Remove possible \r in the data
# prompt = prompt.replace("\r", "")
prompt = text.split("\r")[-1]
# Display info message
log.info(f"find_prompt: prompt: '{prompt}'")
# Get the possible prompts for future recognition
self.possible_prompts = self.get_possible_prompts(prompt)
# Return the prompt
return prompt
def get_possible_prompts(self, prompt):
"""
Method used to check if a prompt has one of the expected endings then
create a list with all possible prompts for the device
:param prompt: a prompt with a possible ending prompt (eg. "switch#")
:type prompt: str
:return: the list of prompts
:rtype: list
"""
# By default no prompts are returned
list_of_prompts = []
# Get all the ppossible values of the endings of the prompt
list_of_possible_ending_prompts = self.list_of_possible_ending_prompts
# Temporary variable storing the prompt value
my_prompt = prompt
# Test each possible prompt ending (i.e '#', '>', "(config-if)#", "(config)#")
for ending in list_of_possible_ending_prompts:
# Is this current prompt ending at the end of the prompt?
if my_prompt.endswith(ending):
# Yes
# Then remove the ending
my_prompt = my_prompt[: -len(ending)]
# Break the loop
break
# Prompt should be from "switch#" to "switch"
# Display info message
log.info(f"get_possible_prompts: prompt found: '{my_prompt}'")
# Display info message
log.info(f"get_possible_prompts: prompt found size: '{len(my_prompt)}'")
# Now create all the possible prompts for that device
for ending in list_of_possible_ending_prompts:
# Save the prompt name with a possible ending in the list
list_of_prompts.append(my_prompt + ending)
# Display info message
log.info(f"get_possible_prompts: list of possible prompts: {list_of_prompts}")
# Return the list of prompts
return list_of_prompts
def check_if_prompt_is_found(self, text):
"""
Method used to check if a prompt is detected inside a string
:param text: a string with prompt
:type text: str
:return: the prompt found
:rtype: str
"""
# By default the prompt is not found
prompt_found = False
# Check all possible prompts
for prompt in self.possible_prompts:
# Display info message
log.info(f"check_if_prompt_is_found: prompt: '{prompt}'")
# Is this prompt present in the text?
if prompt in text:
# Yes
prompt_found = True
# Display info message
log.info(f"check_if_prompt_is_found: prompt found: '{prompt}'")
# Leave the for loop
break
# Return the prompt found
return prompt_found
def remove_command_in_output(self, text, cmd):
"""
Method removing the command at the beginning of a string
After sending commands an "echo" of the command sent
is display in the output string. This method removes it.
:param text: the text with the command at the beginning
:type text: str
:param cmd: the command previously sent
:type cmd: str
:return: the output string without the command
:rtype: str
"""
# Display info message
log.info(f"remove_command_in_output: cmd = '{cmd}'")
# Display info message
log.info(f"remove_command_in_output: cmd (hex) = '{cmd.encode().hex()}'")
# Remove the command from the beginning of the output
# output = text.lstrip(cmd + "\n")
output = text.split(cmd + "\n")[-1]
# Display info message
log.info(f"remove_command_in_output: output = '{output}'")
# Return the string without the command
return output
def remove_starting_carriage_return_in_output(self, text):
"""
Method removing the carriage return at the beginning of a string
:param text: the text with the command at the beginning
:type text: str
:return: the output string without the starting carriage return
:rtype: str
"""
# Display info message
log.info("remove_starting_carriage_return_in_output")
# Remove the carriage return at the beginning of the string
output = text.lstrip("\r\n\r")
# Display info message
log.info(f"remove_starting_carriage_return_in_output: output = '{output}'")
# Return the string without the starting carriage return
return output
def remove_ending_prompt_in_output(self, text):
"""
Method removing the prompt at the end of a string
:param text: the text with a prompt at the beginning
:type text: str
:return: the output string without the ending prompt
:rtype: str
"""
# Display info message
log.info("remove_ending_prompt_in_output")
# Check all possible prompts
for prompt in self.possible_prompts:
# Display info message
log.info(f"remove_ending_prompt_in_output: prompt: '{prompt}'")
# Prompt found in the text?
if prompt in text:
# Yes
# Then it is removed from the text
# text = text.rstrip(prompt)
text = text[: -len(prompt)]
# Remove also carriage return
text = text.rstrip("\r\n")
# Leave the loop
break
# output = text.rstrip("\r\n" + self.prompt)
# Display info message
log.info(f"remove_ending_prompt_in_output: text without prompt:\n'{text}'")
# Return the text without prompt at the end
return text
def check_error_output(self, output):
"""
Check if an error is returned by the device ("% Unrecognized command", "% Ambiguous command", etc.)
If an error is found, then an exception is raised
"""
# Display info message
log.info("check_error_output")
# Check if output has some data
if output:
# Yes
# Display info message
log.info("check_error_output: output has some data")
# Check all elements in the list of output
for element in self._send_command_error_in_returned_output:
# Display info message
log.info(f"check_error_output: element: {element}")
# Display info message
log.info(f"check_error_output: output[0]: {output[0]}")
# Check if the output starts with a string with an error message (like "% Invalid input detected at '^' marker.")
# Error message?
if output.startswith(element):
# Yes
# Raise an exception
raise Exception(output)
def remove_ansi_escape_sequence(self, text):
"""
Method removing ANSI escape sequence from a string
Just CSI sequences are removed
:param text: the text with a prompt at the beginning
:type text: str
:return: the output string without the ending prompt
:rtype: str
"""
# By default no string returned
output = ""
# By default no escape sequence found
esc_found = 0
# Read char by char a string
for i in text:
# Display char
# log.info(f"{str(i).encode('ascii')}")
# No escape previously found?
if esc_found == 0:
# No escape sequence currently found
# Escape?
if i == "\x1b":
# Yes
log.info("Esc!")
# Escape found
esc_found = 1
else:
# No
# Then the current char can be saved
output += i
# Escape previously found?
elif esc_found == 1:
# Yes
# Then check if this is a CSI sequence
if i == "[":
# Beginning of CSI sequence
log.info("CSI sequence")
# CSI sequence
esc_found = 2
else:
# Another Escape sequence
# Keep the escape sequence in the string
output += "\x1b" + i
# No escape sequence next
esc_found = 0
else:
# Char between 'a' and 'z' or 'A' and 'Z'?
if (i >= "a" and i <= "z") or (i >= "A" and i <= "Z"):
# Yes
# Then it is the end of CSI escape sequence
log.info("End of escape sequence")
# No escape sequence next
esc_found = 0
# Return a string without ANSI escape sequence
return output
async def disable_paging(self):
"""
Async method disabling paging on a device
Use the "cmd_disable_paging" attribute
"""
# Display info message
log.info("disable_paging")
# Send command to the device to disable paging
await self.send_command(self.cmd_disable_paging)
async def connect(self):
"""
Async method used for connecting a device
Currently supported: SSH and Telnet
"""
# Display info message
log.info("connect")
try:
# SSH?
if self._protocol == "ssh":
# Yes
# Then Connect using SSH
await self.connectSSH()
# Telnet?
elif self._protocol == "telnet":
# Yes
# Then Connect using Telnet
await self.connectTelnet()
else:
# Unsupported protocol
# Raise an exception
raise Exception(f"connect: unsupported protocol: {self._protocol}")
except Exception:
# There was a problem with a connection method
# Display info message
log.info("connect: connection error")
raise
async def connectSSH(self):
"""
Async method used for connecting a device using SSH protocol
"""
# Display info message
log.info("connectSSH")
# Parameters of the connection
generator = asyncssh.connect(
self.ip,
username=self.username,
password=self.password,
known_hosts=None,
# encryption_algs="*", # Parameter that includes all encryption algorithms (even the old ones disabled by default)
encryption_algs=[
algs.decode("utf-8") for algs in asyncssh.encryption._enc_algs
], # Parameter that includes all encryption algorithms (even the old ones disabled by default)
)
# Trying to connect to the device
try:
self.conn = await asyncio.wait_for(generator, timeout=self.timeout)
except asyncio.exceptions.TimeoutError as error:
# Timeout
# Display error message
log.error(f"connectSSH: connection failed: {self.ip} timeout: '{error}'")
# Exception propagation
raise asyncio.exceptions.TimeoutError(
"Connection failed: connection timed out."
)
except Exception as error:
# Connection failed
# Display error message
log.error(f"connectSSH: connection failed: {self.ip} '{error}'")
# Exception propagation
raise
# Display info message
log.info("connectSSH: connection success")
# Create a session
self.stdinx, self.stdoutx, _ = await self.conn.open_session(term_type="netscud")
# Display info message
log.info("connectSSH: open_session success")
# By default no data has been read
data = ""
# By default no prompt found
prompt_not_found = True
try:
# Read data
while prompt_not_found:
# Display info message
log.info("connectSSH: beginning of the loop")
# Read the prompt
data += await asyncio.wait_for(
self.stdoutx.read(MAX_BUFFER_DATA), timeout=self.timeout
)
# Display info message
log.info(f"connectSSH: data: '{str(data)}'")
# Display info message
log.info(f"connectSSH: data: hex:'{data.encode('utf-8').hex()}'")
# Check if an initial prompt is found
for prompt in self._connect_first_ending_prompt:
# Ending prompt found?
if data.endswith(prompt):
# Yes
# Display info message
log.info(f"connectSSH: first ending prompt found: '{prompt}'")
# A ending prompt has been found
prompt_not_found = False
# Leave the loop
break
# Display info message
log.info("connectSSH: end of loop")
except Exception as error:
# Fail while reading the prompt
# Display error message
log.error(
f"connectSSH: timeout while reading the prompt: {self.ip} '{error}'"
)
# Exception propagation
raise
# Display info message
log.info(f"connectSSH: end of prompt loop")
# Remove possible escape sequence
data = self.remove_ansi_escape_sequence(data)
# Find prompt
self.prompt = self.find_prompt(str(data))
# Display info message
log.info(f"connectSSH: prompt found: '{self.prompt}'")
# Display info message
log.info(f"connectSSH: prompt found size: '{len(self.prompt)}'")
# Disable paging command available?
if self.cmd_disable_paging:
# Yes
# Disable paging
await self.disable_paging()
async def connectTelnet(self):
"""
Async method used for connecting a device using Telnet protocol
"""
# Display info message
log.info("connectTelnet")
try:
# Prepare connection with Telnet
conn = asyncio.open_connection(self.ip, self.port)
except Exception as error:
# Preparation to the connection failed
# Display error message
log.error(f"connectTelnet: preparation to the connection failed: '{error}'")
# Exception propagation
raise
# Display info message
log.info("connectTelnet: preparation to the connection success")
try:
# Connection with Telnet
self._reader, self._writer = await asyncio.wait_for(
conn, timeout=self.timeout
)
except asyncio.TimeoutError:
# Time out during connection
# Display error message
log.error("connectTelnet: connection: timeout")
# Exception propagation
raise
# Display info message
log.info("connectTelnet: connection success")
# Get prompt for the login
prompt = self._telnet_connect_login
# Get prompt for the password
prompt_password = self._telnet_connect_password
# By default a login is expected
use_login = True
# Temporary string variable
output = ""
# Temporary bytes variable
byte_data = b""
# Read the telnet information and first prompt (for login but a password prompt can be found for IOS for instance)
while True:
# Display info message
log.info(f"connectTelnet: read data for prompt")
# Read returned prompt
byte_data += await asyncio.wait_for(
self._reader.read(MAX_BUFFER_DATA), timeout=self.timeout
)
# Display info message
log.info(f"connectTelnet: byte_data: {byte_data}")
# Temporary convertion in string. This string has the following form: "b'....'"
output = str(byte_data)
# Display info message
log.info(f"connectTelnet: output: {output}")
# Prompt for the username found?
if prompt in output:
# Yes
# Leave the loop
break
# Prompt for the password found?
elif prompt_password in output:
# Yes
# That means only password is required
use_login = False
# Leave the loop
break
# Display info message
log.info(f"connectTelnet: login prompt: '{output}'")
# Login to use?
if use_login:
# Yes
# Display info message
log.info("connectTelnet: sending login")
try:
# Send login
await self.send_command(self.username, prompt_password)
# Display info message
log.info("connectTelnet: login sent")
except Exception:
# Problem with the login
# Propagate the exception
raise
# Display info message
log.info("connectTelnet: sending password")
try:
# Send password
output = await self.telnet_send_command_with_unexpected_pattern(
self.password,
self._connect_first_ending_prompt,
self._telnet_connect_authentication_fail_prompt,
)
except Exception:
# Problem with the password
# Propagate the exception
raise
# Display info message
log.info("connectTelnet: password sent")
# Find prompt
self.prompt = self.find_prompt(str(output))
# Display info message
log.info(f"connectTelnet: prompt found: '{self.prompt}'")
# Password enable?
if self.enable_mode:
# Yes
# Display info message
log.info("connectTelnet: enable mode to be activated")
try:
# Send enable command
await self.send_command(self.cmd_enable, prompt_password)
# Display info message
log.info("connectTelnet: enable command sent")
# Display info message
log.info("connectTelnet: sending enable password")
# Send enable password
await self.telnet_send_command_with_unexpected_pattern(
self.enable_password,
self._connect_first_ending_prompt,
self._telnet_connect_authentication_fail_prompt,
)
# Display info message
log.info("connectTelnet: enable password sent")
except Exception:
# Problem with the enable password
# Display info message
log.info("connectTelnet: enable password failure")
# Propagate the exception
raise
# Disable paging command available?
if self.cmd_disable_paging:
# Yes
# Disable paging
await self.disable_paging()
async def disconnect(self):
"""
Async method used to disconnect a device
If this method is not used then exceptions will happen
when the program will end
"""
# Debug info message
log.info("disconnect")
# SSH?
if self._protocol == "ssh":
# Yes
# Then disconnect using SSH
await self.disconnectSSH()
# Telnet?
elif self._protocol == "telnet":
# Yes
# Then disconnect using Telnet
await self.disconnectTelnet()
else:
# Unsupported protocol
# Raise an exception
raise Exception(f"Unsupported protocol: {self._protocol}")
async def disconnectSSH(self):
"""
Async method used to disconnect a device in SSH
If this method is not used then exceptions will happen
when the program will end
"""
# Debug info message
log.info("disconnectSSH")
# Connection previously open in SSH?
if self.conn:
# Yes
# Then close the SSH connection
self.conn.close()
# No more connection to disconnect
self.conn = None
async def disconnectTelnet(self):
"""
Async method used to disconnect a device in Telnet
If this method is not used then exceptions will happen
when the program will end
"""
# Debug info message
log.info("disconnectTelnet")
# Connection previously open in Telnet?
if self._writer:
# Yes
# Then close the SSH connection
self._writer.close()
# No more connection to disconnect
self._writer = None
async def send_command(self, cmd, pattern=None, timeout=None):
"""
Async method used to send data to a device
:param cmd: command to send
:type cmd: str
:param pattern: optional, a pattern replacing the prompt when the prompt is not expected
:type pattern: str
:param timeout: optional, a timeout for the command sent. Default value is self.timeout
:type timeout: str
:return: the output of command
:rtype: str
"""
# Debug info message
log.info("send_command")
# Default value of timeout variable
if timeout is None:
timeout = self.timeout
# SSH?
if self._protocol == "ssh":
# Yes
# Then disconnect using SSH
output = await self.send_commandSSH(cmd, pattern=pattern, timeout=timeout)
# Telnet?
elif self._protocol == "telnet":
# Yes
# Then disconnect using Telnet
output = await self.send_commandTelnet(
cmd, pattern=pattern, timeout=timeout
)
else:
# Unsupported protocol
# Raise an exception
raise Exception(f"send_command: unsupported protocol: {self._protocol}")
# Return the result of the command
return output
async def send_commandSSH(self, cmd, pattern=None, timeout=None):
"""
Async method used to send data to a device
:param cmd: command to send
:type cmd: str
:param pattern: optional, a pattern replacing the prompt when the prompt is not expected
:type pattern: str
:param timeout: optional, a timeout for the command sent. Default value is self.timeout
:type timeout: str
:return: the output of command
:rtype: str
"""
# Debug info message
log.info("send_commandSSH")
# Default value of timeout variable
if timeout is None:
timeout = self.timeout
# Add carriage return at the end of the command (mandatory to send the command)
# cmd = cmd + "\n"
# cmd = cmd + "\r\n"
# Debug info message
log.info(f"send_commandSSH: cmd = '{cmd}'")
# Sending command
self.stdinx.write(cmd + self._carriage_return_for_send_command)
# Display message
log.info("send_commandSSH: command sent")
# Variable used to gather data
output = ""
# Reading data
while True:
# await asyncio.sleep(1)
# Read the data received
output += await asyncio.wait_for(
self.stdoutx.read(MAX_BUFFER_DATA), timeout=timeout
)
# Debug info message
# log.info(f"send_commandSSH: output hex: '{str(output).encode("utf-8").hex()}'")
# Remove ANSI escape sequence
output = self.remove_ansi_escape_sequence(output)
# Remove possible "\r"
output = output.replace("\r", "")
# data = ""
# for i in output:
# data += i.encode("utf-8").hex()
# print(data)
# Debug info message
log.info(f"send_commandSSH: output: '{output}'")
# Is a patten used?
if pattern:
# Use pattern instead of prompt
if pattern in output:
# Yes
# Leave the loop
break
else:
# Check if prompt is found
if self.check_if_prompt_is_found(output):
# Yes
# Leave the loop
break
# Debug info message
log.debug(
f"send_commandSSH: raw output: '{output}'\nsend_commandSSH: raw output (hex): '{output.encode().hex()}'"
)
# Remove the command sent from the result of the command
output = self.remove_command_in_output(output, str(cmd))
# Remove the carriage return of the output
output = self.remove_starting_carriage_return_in_output(output)
# Remove the ending prompt of the output
output = self.remove_ending_prompt_in_output(output)
# Debug info message
log.debug(
f"send_commandSSH: cleaned output: '{output}'\nsend_commandSSH: cleaned output (hex): '{output.encode().hex()}'"
)
# Check if there is an error in the output string (like "% Unrecognized command")
# and generate an exception if needed
self.check_error_output(output)
# Return the result of the command
return output
async def send_commandTelnet(self, cmd, pattern=None, timeout=None):
"""
Async method used to send data to a device
:param cmd: command to send
:type cmd: str
:param pattern: optional, a pattern replacing the prompt when the prompt is not expected
:type pattern: str
:param timeout: optional, a timeout for the command sent. Default value is self.timeout
:type timeout: str
:return: the output of command
:rtype: str
"""
# Debug info message
log.info("send_commandTelnet")
# Default value of timeout variable
if timeout is None:
timeout = self.timeout
# Add carriage return at the end of the command (mandatory to send the command)
cmd = cmd + "\n"
# Sending command
self._writer.write(cmd.encode())
# Temporary string variable
output = ""
# Temporary bytes variable
byte_data = b""
try:
# Read data
while True:
# Read returned prompt
byte_data += await asyncio.wait_for(
self._reader.read(MAX_BUFFER_DATA), timeout=timeout
)
# Display info message
log.info(f"send_commandTelnet: byte_data: '{byte_data}'")
# Temporary convertion in string. This string has the following form: "b'....'"
output = str(byte_data)
# Display info message
log.info(f"send_commandTelnet: output: '{output}'")
# Is a patten used?
if pattern:
# Use pattern instead of prompt
if pattern in output:
# Yes
# Leave the loop
break
else:
# Check if prompt is found
if self.check_if_prompt_is_found(output):
# Yes
# Leave the loop
break
except asyncio.TimeoutError:
# Time out during when reading prompt
# Display error message
log.error("send_commandTelnet: connection: timeout")
# Exception propagation
raise
except Exception as error:
# Error during when reading prompt
# Display error message
log.error(f"send_commandTelnet: error: {error}")
# Exception propagation
raise
# Convert data (bytes) into string
output = byte_data.decode("utf-8", "ignore")
# Debug info message
log.debug(
f"send_commandTelnet: raw output: '{output}'\nsend_commandTelnet: raw output (hex): '{output.encode().hex()}'"
)
# Remove the command sent from the result of the command
output = self.remove_command_in_output(output, str(cmd))
# Remove the carriage return of the output
output = self.remove_starting_carriage_return_in_output(output)
# Remove the ending prompt of the output
output = self.remove_ending_prompt_in_output(output)
# Debug info message
log.debug(
f"send_commandTelnet: cleaned output: '{output}'\nsend_commandTelnet: cleaned output (hex): '{output.encode().hex()}'"
)
# Check if there is an error in the output string (like "% Unrecognized command")
# and generate an exception if needed
self.check_error_output(output)
# Return the result of the command
return output
async def telnet_send_command_with_unexpected_pattern(
self, cmd, pattern, error_pattern=None, timeout=None
):
"""
Async method used to send command for Telnet connection to a device with possible unexpected patterns
send_command can wait till time out if login and password are wrong. This method
speed up the returned error message when authentication failed is identified.
This method is limited to authentication whem password is required
:param cmd: command to send
:type cmd: str
:param pattern: optional, a list of patterns located at the very end of the a returned string. Can be used
to define a custom or unexpected prompt a the end of a string
:type pattern: str
:param timeout: optional, a timeout for the command sent. Default value is self.timeout
:type timeout: str
:param error_pattern: optional, a list of failed prompts found when the login and password are not correct
:type error_pattern: str
:return: the output of command
:rtype: str
"""
# Debug info message
log.info("telnet_send_command_with_unexpected_pattern")
# Default value of timeout variable
if timeout is None:
timeout = self.timeout
# Add carriage return at the end of the command (mandatory to send the command)
cmd = cmd + self._carriage_return_for_send_command
# Sending command
self._writer.write(cmd.encode())
# Temporary string variable
output = ""
# Temporary bytes variable
byte_data = b""
# By default pattern is not found
pattern_not_found = True
try:
# Read data
while pattern_not_found:
# Read returned prompt
byte_data += await asyncio.wait_for(
self._reader.read(MAX_BUFFER_DATA), timeout=timeout
)
# Display info message
log.info(
f"telnet_send_command_with_unexpected_pattern: byte_data: '{byte_data}'"
)
# Display debug message
log.debug(
f"telnet_send_command_with_unexpected_pattern: byte_data: hex: '{byte_data.hex()}'"
)
# Temporary convertion in string. This string has the following form: "b'....'"
output = str(byte_data)
# Display info message
log.info(
f"telnet_send_command_with_unexpected_pattern: output: '{output}'"
)
# Is a pattern used?
if pattern:
# Check all pattern of prompt in the output
for prompt in pattern:
# Display info message
log.info(
f"telnet_send_command_with_unexpected_pattern: checking prompt: '{prompt}'"
)
# A pattern found?
if prompt in output:
# Yes
# A pattern is found. The main loop can be stopped
pattern_not_found = False
# Display info message
log.info(
f"telnet_send_command_with_unexpected_pattern: prompt found: '{prompt}'"
)
# Leave the loop
break
# Is an unexpected pattern used?
if error_pattern and pattern_not_found:
# Check all unexpected pattern of prompt in the output
for bad_prompt in error_pattern:
# Display info message
log.info(
f"telnet_send_command_with_unexpected_pattern: checking unexpected prompt: '{bad_prompt}'"
)
# An error_pattern pattern found?
if bad_prompt in output:
# Yes
# Display error message
log.error(
"telnet_send_command_with_unexpected_pattern: authentication failed"
)
# Raise exception
raise Exception(
"telnet_send_command_with_unexpected_pattern: authentication failed"
)
# Leave the loop
# break
except asyncio.TimeoutError:
# Time out during when reading prompt
# Close the connection in order to not display RuntimeError
await self.disconnect()
# Display error message
log.error(
"telnet_send_command_with_unexpected_pattern: reading prompt: timeout"
)
# Exception propagation
raise
except Exception as error:
# Error during when reading prompt
# Close the connection in order to not display RuntimeError
await self.disconnect()
# Display error message
log.error(
f"telnet_send_command_with_unexpected_pattern: reading prompt: error: {error}"
)
# Exception propagation
raise
# Convert data (bytes) into string
output = byte_data.decode("utf-8", "ignore")
# Debug info message
log.debug(
f"telnet_send_command_with_unexpected_pattern: raw output: '{output}'\ntelnet_send_command_with_unexpected_pattern: raw output (hex): '{output.encode().hex()}'"
)
# Remove the command sent from the result of the command
output = self.remove_command_in_output(output, str(cmd))
# Remove the carriage return of the output
output = self.remove_starting_carriage_return_in_output(output)
# Remove the ending prompt of the output
output = self.remove_ending_prompt_in_output(output)
# Debug info message
log.debug(
f"telnet_send_command_with_unexpected_pattern: cleaned output: '{output}'\ntelnet_send_command_with_unexpected_pattern: cleaned output (hex): '{output.encode().hex()}'"
)
# Return the result of the command
return output
async def send_config_set(self, cmds=None, timeout=None):
"""
Async method used to send command in config mode
The commands send can be either a string a list of strings. There are
3 steps:
- Entering configuration mode
- Sending the commands
- Leaving configuration mode
:param cmds: The commands to the device
:type cmds: str or list
:param timeout: optional, a timeout for the command sent. Default value is self.timeout
:type timeout: str
:return: the results of the commands sent
:rtype: list of str
"""
# Display info message
log.info("send_config_set")
# Default value of timeout variable
if timeout is None:
timeout = self.timeout
# Debug info message
log.info("send_command")
# SSH?
if self._protocol == "ssh":
# Yes
# Then disconnect using SSH
output = await self.send_config_setSSH(cmds, timeout)
# Telnet?
elif self._protocol == "telnet":
# Yes
# Then disconnect using Telnet
output = await self.send_config_setTelnet(cmds, timeout)
else:
# Unsupported protocol
# Raise an exception
raise Exception(f"send_config_set: unsupported protocol: {self._protocol}")
# Return the result of the commands
return output
async def send_config_setSSH(self, cmds=None, timeout=None):
"""
Async method used to send command in config mode
The commands send can be either a string a list of strings. There are
3 steps:
- Entering configuration mode
- Sending the commands
- Leaving configuration mode
:param cmds: The commands to the device
:type cmds: str or list
:param timeout: optional, a timeout for the command sent. Default value is self.timeout
:type timeout: str
:return: the results of the commands sent
:rtype: list of str
"""
# Display info message
log.info("send_config_setSSH")
# Default value of timeout variable
if timeout is None:
timeout = self.timeout
# Clear returned output
returned_output = ""
# Check if cmds is a string
if isinstance(cmds, str):
# A string
# Convert the string into a list
cmds = [cmds]
# A list?
elif not isinstance(cmds, list):
# Not a list (and not a string)
# Display error message
log.error(
"send_config_setSSH: parameter cmds used in send_config_set is neither a string nor a list"
)
# Leave the method
return returned_output
##############################
# Entering configuration mode
##############################
# Display info message
log.info("send_config_set: entering configuration mode")
# Clear output
output = ""
# Get command for entering in config made
cmd = self.cmd_enter_config_mode
# Add carriage return at the end of the command (mandatory to send the command)
cmd = cmd + self._carriage_return_for_send_command
# Display info message
log.info(f"send_config_setSSH: cmd = '{cmd}'")
# Sending command
self.stdinx.write(cmd)
# Display message
log.info("send_config_setSSH: configuration mode entered")
while True:
# Read the data received
output += await asyncio.wait_for(
self.stdoutx.read(MAX_BUFFER_DATA), timeout=timeout
)
# Display info message
log.info(f"send_config_setSSH: output: '{output}'")
# Check if prompt is found
if self.check_if_prompt_is_found(output):
# Yes
# Leave the loop
break
# Debug info message
log.debug(
f"send_config_setSSH: raw output: '{output}'\nsend_config_setSSH: raw output (hex): '{output.encode().hex()}'"
)
# Add the output to the returned output
returned_output += output
# Remove the command sent from the result of the command
output = self.remove_command_in_output(output, str(cmd))
# Remove the carriage return of the output
output = self.remove_starting_carriage_return_in_output(output)
# Remove the ending prompt of the output
output = self.remove_ending_prompt_in_output(output)
# Display info message
log.debug(
f"send_config_setSSH: cleaned output: '{output}'\nsend_config_setSSH: cleaned output (hex): '{output.encode().hex()}'"
)
# Check if there is an error in the output string (like "% Unrecognized command")
# and generate an exception if needed
self.check_error_output(output)
##############################
# Sending commands
##############################
# Display info message
log.info("send_config_setSSH: sending commands")
# Clear output
output = ""
# Each command
for cmd in cmds:
# Add carriage return at the end of the command (mandatory to send the command)
cmd = cmd + self._carriage_return_for_send_command
# Display info message
log.info(f"send_config_setSSH: cmd = '{cmd}'")
# Sending command
self.stdinx.write(cmd)
# Display info message
log.info("send_config_setSSH: command sent")
while True:
# Read the data received
output += await asyncio.wait_for(
self.stdoutx.read(MAX_BUFFER_DATA), timeout=timeout
)
# Display info message
log.info(f"send_config_setSSH: output: '{output}'")
# Check if prompt is found
if self.check_if_prompt_is_found(output):
# Yes
# Leave the loop
break
# Debug info message
log.debug(
f"send_config_setSSH: raw output: '{output}'\nsend_config_setSSH: raw output (hex): '{output.encode().hex()}'"
)
# Add the output to the returned output
returned_output += output
# Remove the command sent from the result of the command
output = self.remove_command_in_output(output, str(cmd))
# Remove the carriage return of the output
output = self.remove_starting_carriage_return_in_output(output)
# Remove the ending prompt of the output
output = self.remove_ending_prompt_in_output(output)
# Display info message
log.debug(
f"send_config_setSSH: cleaned output: '{output}'\nsend_config_setSSH: cleaned output (hex): '{output.encode().hex()}'"
)
# Check if there is an error in the output string (like "% Unrecognized command")
# and generate an exception if needed
self.check_error_output(output)
##############################
# Leaving configuration mode
##############################
# Display info message
log.info("send_config_setSSH: leaving configuration mode")
# Clear output
output = ""
# Get command to leave config made
cmd = self.cmd_exit_config_mode
# Add carriage return at the end of the command (mandatory to send the command)
cmd = cmd + self._carriage_return_for_send_command
# Display info message
log.info(f"send_config_setSSH: cmd = '{cmd}'")
# Sending command
self.stdinx.write(cmd)
# Display info message
log.info("send_config_setSSH: command to leave configuration mode sent")
while True:
# Read the data received
output += await asyncio.wait_for(
self.stdoutx.read(MAX_BUFFER_DATA), timeout=timeout
)
# Display info message
log.info(f"send_config_setSSH: output: '{output}'")
# Check if prompt is found
if self.check_if_prompt_is_found(output):
# Yes
# Leave the loop
break
# Debug info message
log.debug(
f"send_config_setSSH: raw output: '{output}'\nsend_config_setSSH: raw output (hex): '{output.encode().hex()}'"
)
# Add the output to the returned output
returned_output += output
# Remove the command sent from the result of the command
output = self.remove_command_in_output(output, str(cmd))
# Remove the carriage return of the output
output = self.remove_starting_carriage_return_in_output(output)
# Remove the ending prompt of the output
output = self.remove_ending_prompt_in_output(output)
# Display info message
log.debug(
f"send_config_setSSH: cleaned output: '{output}'\nsend_config_setSSH: cleaned output (hex): '{output.encode().hex()}'"
)
# Check if there is an error in the output string (like "% Unrecognized command")
# and generate an exception if needed
self.check_error_output(output)
# Return the result of the commands
return returned_output
async def send_config_setTelnet(self, cmds=None, timeout=None):
"""
Async method used to send command in config mode
The commands send can be either a string a list of strings. There are
3 steps:
- Entering configuration mode
- Sending the commands
- Leaving configuration mode
:param cmds: The commands to the device
:type cmds: str or list
:param timeout: optional, a timeout for the command sent. Default value is self.timeout
:type timeout: str
:return: the results of the commands sent
:rtype: list of str
"""
# Display info message
log.info("send_config_setTelnet")
# Default value of timeout variable
if timeout is None:
timeout = self.timeout
# Clear returned output
returned_output = ""
# Check if cmds is a string
if isinstance(cmds, str):
# A string
# Convert the string into a list
cmds = [cmds]
# A list?
elif not isinstance(cmds, list):
# Not a list (and not a string)
# Display error message
log.error(
"send_config_setTelnet: parameter cmds used in send_config_set is neither a string or a list"
)
# Leave the method
return returned_output
##############################
# Entering configuration mode
##############################
# Display info message
log.info("send_config_setTelnet: entering configuration mode")
# Clear output
output = ""
# Get command for entering in config made
cmd = self.cmd_enter_config_mode
# Add carriage return at the end of the command (mandatory to send the command)
cmd = cmd + self._carriage_return_for_send_command
# Display info message
log.info(f"send_config_setTelnet: cmd = '{cmd}'")
# Sending command
self._writer.write(cmd.encode())
# Display message
log.info("send_config_setTelnet: configuration mode entered")
# Temporary string variable
output = ""
# Temporary bytes variable
byte_data = b""
try:
# Read data
while True:
# Read the data received
byte_data += await asyncio.wait_for(
self._reader.read(MAX_BUFFER_DATA), timeout=timeout
)
# Temporary convertion in string. This string has the following form: "b'....'"
output = str(byte_data)
# Display info message
log.info(f"send_config_setTelnet: output: '{output}'")
# Check if prompt is found
if self.check_if_prompt_is_found(output):
# Yes
| 0 |
87baaf4a1b48fa248c65d26cc44e819a2ede1140
|
Python
|
# Leave the loop
break
except asyncio.TimeoutError:
# Time out during when reading prompt
# Display error message
log.error("send_config_setTelnet: connection: timeout")
# Exception propagation
raise
except Exception as error:
# Error during when reading prompt
# Display error message
log.error(f"send_config_setTelnet: error: {error}")
# Exception propagation
raise
# Convert data (bytes) into string
output = byte_data.decode("utf-8", "ignore")
# Debug info message
log.debug(
f"send_config_setTelnet: raw output: '{output}'\nsend_config_setTelnet: raw output (hex): '{output.encode().hex()}'"
)
# Add the output to the returned output
returned_output += output
# Remove the command sent from the result of the command
output = self.remove_command_in_output(output, str(cmd))
# Remove the carriage return of the output
output = self.remove_starting_carriage_return_in_output(output)
# Remove the ending prompt of the output
output = self.remove_ending_prompt_in_output(output)
# Display info message
log.debug(
f"send_config_setTelnet: cleaned output: '{output}'\nsend_config_setTelnet: cleaned output (hex): '{output.encode().hex()}'"
)
# Check if there is an error in the output string (like "% Unrecognized command")
# and generate an exception if needed
self.check_error_output(output)
##############################
# Sending commands
##############################
# Display info message
log.info("send_config_setTelnet: sending commands")
# Clear output
output = ""
# Each command
for cmd in cmds:
# Add carriage return at the end of the command (mandatory to send the command)
cmd = cmd + self._carriage_return_for_send_command
# Display info message
log.info(f"send_config_setTelnet: cmd = '{cmd}'")
# Sending command
self._writer.write(cmd.encode())
# Display info message
log.info("send_config_setTelnet: command sent")
# Temporary string variable
output = ""
# Temporary bytes variable
byte_data = b""
try:
# Read data
while True:
# Read the data received
byte_data += await asyncio.wait_for(
self._reader.read(MAX_BUFFER_DATA), timeout=timeout
)
# Temporary convertion in string. This string has the following form: "b'....'"
output = str(byte_data)
# Display info message
log.info(f"send_config_setTelnet: output: '{output}'")
# Check if prompt is found
if self.check_if_prompt_is_found(output):
# Yes
# Leave the loop
break
except asyncio.TimeoutError:
# Time out during when reading prompt
# Display error message
log.error("send_config_setTelnet: connection: timeout")
# Exception propagation
raise
except Exception as error:
# Error during when reading prompt
# Display error message
log.error(f"send_config_setTelnet: error: {error}")
# Exception propagation
raise
# Convert data (bytes) into string
output = byte_data.decode("utf-8", "ignore")
# Debug info message
log.debug(
f"send_config_setTelnet: raw output: '{output}'\nsend_config_setTelnet: raw output (hex): '{output.encode().hex()}'"
)
# Add the output to the returned output
returned_output += output
# Remove the command sent from the result of the command
output = self.remove_command_in_output(output, str(cmd))
# Remove the carriage return of the output
output = self.remove_starting_carriage_return_in_output(output)
# Remove the ending prompt of the output
output = self.remove_ending_prompt_in_output(output)
# Display info message
log.debug(
f"send_config_setTelnet: cleaned output: '{output}'\nsend_config_setTelnet: cleaned output (hex): '{output.encode().hex()}'"
)
# Check if there is an error in the output string (like "% Unrecognized command")
# and generate an exception if needed
self.check_error_output(output)
##############################
# Leaving configuration mode
##############################
# Display info message
log.info("send_config_setTelnet: leaving configuration mode")
# Clear output
output = ""
# Get command to leave config made
cmd = self.cmd_exit_config_mode
# Add carriage return at the end of the command (mandatory to send the command)
cmd = cmd + self._carriage_return_for_send_command
# Display info message
log.info(f"send_config_setTelnet: cmd = '{cmd}'")
# Sending command
self._writer.write(cmd.encode())
# Display info message
log.info("send_config_setTelnet: command to leave configuration mode sent")
# Temporary string variable
output = ""
# Temporary bytes variable
byte_data = b""
# Protection against infinite loop
loop = 3
try:
# Read data
while loop:
# Read the data received
byte_data += await asyncio.wait_for(
self._reader.read(MAX_BUFFER_DATA), timeout=timeout
)
# Temporary convertion in string. This string has the following form: "b'....'"
output = str(byte_data)
# Display info message
log.info(f"send_config_setTelnet: output: '{output}'")
await asyncio.sleep(0.5)
# Check if prompt is found
if self.check_if_prompt_is_found(output):
# Yes
# Leave the loop
break
# Protection for "exit" command infinite loop in Cisco when enable is not activated
loop -= 1
except asyncio.TimeoutError:
# Time out during when reading prompt
# Display error message
log.error("send_config_setTelnet: connection: timeout")
# Exception propagation
raise
except Exception as error:
# Error during when reading prompt
# Display error message
log.error(f"send_config_setTelnet: error: {error}")
# Exception propagation
raise
# Convert data (bytes) into string
output = byte_data.decode("utf-8", "ignore")
# Debug info message
log.debug(
f"send_config_setTelnet: raw output: '{output}'\nsend_config_setTelnet: raw output (hex): '{output.encode().hex()}'"
)
# Add the output to the returned output
returned_output += output
# Remove the command sent from the result of the command
output = self.remove_command_in_output(output, str(cmd))
# Remove the carriage return of the output
output = self.remove_starting_carriage_return_in_output(output)
# Remove the ending prompt of the output
output = self.remove_ending_prompt_in_output(output)
# Display info message
log.debug(
f"send_config_setTelnet: cleaned output: '{output}'\nsend_config_setTelnet: cleaned output (hex): '{output.encode().hex()}'"
)
# Check if there is an error in the output string (like "% Unrecognized command")
# and generate an exception if needed
self.check_error_output(output)
# Return the result of the commands
return returned_output
#########################################################
#
# List of API
#
#########################################################
async def get_version(self):
"""
Asyn method used to get the version of the software of the device
:return: Version of the software of the device
:rtype: str
"""
# Display info message
log.info("get_version")
# By default empty string
version = ""
# Run get version on the device
output = await self.send_command(self.cmd_get_version)
# Seek "Version " and "," to get the version in the returned output
version = output.split("Version ")[1].split(",")[0]
# Display info message
log.info(f"get_version: version: {version}")
# Return the version of the software of the device
return version
async def get_hostname(self):
"""
Asyn method used to get the name of the device
:return: Name of the device
:rtype: str
"""
# Display info message
log.info("get_hostname")
# Get hostname
output = await self.send_command(self.cmd_get_hostname)
# Display info message
log.info(f"get_hostname: output: '{output}'")
# Remove the useless information in the returned string
output = output.split()[0]
# Display info message
log.info(f"get_hostname: hostname found: '{output}'")
# Return the name of the device
return output
async def get_model(self):
"""
Asyn method used to get the model of the device
:return: Model of the device
:rtype: str
"""
# Display info message
log.info("get_model")
# Get model
output = await self.send_command(self.cmd_get_model)
# Display info message
log.info(f"get_model: output: '{output}'")
# Remove the useless information in the returned string
output = output.split('"')[3]
# Display info message
log.info(f"get_model: model found: '{output}'")
# Return the model of the device
return output
async def get_serial_number(self):
"""
Get serial number of the switch or the serial number of the first switch of a stack
:return: Serial number of the device
:rtype: str
"""
# Display info message
log.info("get_serial_number")
# Get serial number
output = await self.send_command(self.cmd_get_serial_number)
# Display info message
log.info(f"get_serial_number: output: '{output}'")
# Remove the useless information in the returned string
output = output.splitlines()[0].split()[-1]
# Display info message
log.info(f"get_hostname: hostname found: '{output}'")
# Return the serial number of the device
return output
async def get_config(self, timeout=None):
"""
Asyn method used to get the configuration of the device
:param timeout: optional, a timeout for the command sent. Default value is self.timeout
:type timeout: str
:return: Configuration of the device
:rtype: str
"""
# Display info message
log.info("get_config")
# Default value of timeout variable
if timeout is None:
timeout = self.timeout
# Get config
output = await self.send_command(self.cmd_get_config, timeout=timeout)
# Return de configuration of the device
return output
async def save_config(self):
"""
Asyn method used to save the current configuration on the device
:return: Commands of the configuration saving process
:rtype: str
"""
# Display info message
log.info("save_config")
# Send command
output = await self.send_command(self.cmd_save_config)
# Return the commands of the configuration saving process
return output
| 1 |
32e904a39d03d3166369420b49db0b9b118110a3
|
Python
|
import hashlib
import json
import logging
import os
import urllib.parse
import uuid
from datetime import datetime
import pytz
from celery import states as celery_states
from django.conf import settings
from django.contrib.auth.base_user import AbstractBaseUser
from django.contrib.auth.base_user import BaseUserManager
from django.contrib.auth.models import PermissionsMixin
from django.contrib.sessions.models import Session
from django.core.cache import cache
from django.core.exceptions import MultipleObjectsReturned
from django.core.exceptions import ObjectDoesNotExist
from django.core.exceptions import PermissionDenied
from django.core.exceptions import ValidationError
from django.core.files.storage import default_storage
from django.core.files.storage import FileSystemStorage
from django.core.mail import send_mail
from django.core.validators import MaxValueValidator
from django.core.validators import MinValueValidator
from django.db import IntegrityError
from django.db import models
from django.db.models import Count
from django.db.models import Exists
from django.db.models import F
from django.db.models import Index
from django.db.models import JSONField
from django.db.models import Max
from django.db.models import OuterRef
from django.db.models import Q
from django.db.models import Subquery
from django.db.models import Sum
from django.db.models import UUIDField as DjangoUUIDField
from django.db.models import Value
from django.db.models.expressions import ExpressionList
from django.db.models.expressions import RawSQL
from django.db.models.functions import Lower
from django.db.models.indexes import IndexExpression
from django.db.models.query_utils import DeferredAttribute
from django.db.models.sql import Query
from django.dispatch import receiver
from django.utils import timezone
from django.utils.translation import gettext as _
from django_celery_results.models import TaskResult
from django_cte import With
from le_utils import proquint
from le_utils.constants import content_kinds
from le_utils.constants import exercises
from le_utils.constants import file_formats
from le_utils.constants import format_presets
from le_utils.constants import languages
from le_utils.constants import roles
from model_utils import FieldTracker
from mptt.models import MPTTModel
from mptt.models import raise_if_unsaved
from mptt.models import TreeForeignKey
from postmark.core import PMMailInactiveRecipientException
from postmark.core import PMMailUnauthorizedException
from rest_framework.authtoken.models import Token
from rest_framework.fields import get_attribute
from rest_framework.utils.encoders import JSONEncoder
from contentcuration.constants import channel_history
from contentcuration.constants import completion_criteria
from contentcuration.constants import user_history
from contentcuration.constants.contentnode import kind_activity_map
from contentcuration.db.models.expressions import Array
from contentcuration.db.models.functions import ArrayRemove
from contentcuration.db.models.functions import Unnest
from contentcuration.db.models.manager import CustomContentNodeTreeManager
from contentcuration.db.models.manager import CustomManager
from contentcuration.statistics import record_channel_stats
from contentcuration.utils.cache import delete_public_channel_cache_keys
from contentcuration.utils.parser import load_json_string
from contentcuration.viewsets.sync.constants import ALL_CHANGES
from contentcuration.viewsets.sync.constants import ALL_TABLES
EDIT_ACCESS = "edit"
VIEW_ACCESS = "view"
DEFAULT_CONTENT_DEFAULTS = {
'license': None,
'language': None,
'author': None,
'aggregator': None,
'provider': None,
'copyright_holder': None,
'license_description': None,
'mastery_model': exercises.NUM_CORRECT_IN_A_ROW_5,
'm_value': 5,
'n_value': 5,
'auto_derive_video_thumbnail': True,
'auto_derive_audio_thumbnail': True,
'auto_derive_document_thumbnail': True,
'auto_derive_html5_thumbnail': True,
'auto_derive_exercise_thumbnail': True,
'auto_randomize_questions': True,
}
DEFAULT_USER_PREFERENCES = json.dumps(DEFAULT_CONTENT_DEFAULTS, ensure_ascii=False)
def to_pk(model_or_pk):
if isinstance(model_or_pk, models.Model):
return model_or_pk.pk
return model_or_pk
class UserManager(BaseUserManager):
def create_user(self, email, first_name, last_name, password=None):
if not email:
raise ValueError('Email address not specified')
new_user = self.model(
email=self.normalize_email(email),
)
new_user.set_password(password)
new_user.first_name = first_name
new_user.last_name = last_name
new_user.save(using=self._db)
return new_user
def create_superuser(self, email, first_name, last_name, password=None):
new_user = self.create_user(email, first_name, last_name, password=password)
new_user.is_admin = True
new_user.save(using=self._db)
return new_user
class UniqueActiveUserIndex(Index):
def create_sql(self, model, schema_editor, using='', **kwargs):
"""
This is a vendored and modified version of the Django create_sql method
We do this so that we can monkey patch in the unique index statement onto the schema_editor
while we create the statement for this index, and then revert it to normal.
We should remove this as soon as Django natively supports UniqueConstraints with Expressions.
This should hopefully be the case in Django 3.3.
"""
include = [model._meta.get_field(field_name).column for field_name in self.include]
condition = self._get_condition_sql(model, schema_editor)
if self.expressions:
index_expressions = []
for expression in self.expressions:
index_expression = IndexExpression(expression)
index_expression.set_wrapper_classes(schema_editor.connection)
index_expressions.append(index_expression)
expressions = ExpressionList(*index_expressions).resolve_expression(
Query(model, alias_cols=False),
)
fields = None
col_suffixes = None
else:
fields = [
model._meta.get_field(field_name)
for field_name, _ in self.fields_orders
]
col_suffixes = [order[1] for order in self.fields_orders]
expressions = None
sql = "CREATE UNIQUE INDEX %(name)s ON %(table)s (%(columns)s)%(include)s%(condition)s"
# Store the normal SQL statement for indexes
old_create_index_sql = schema_editor.sql_create_index
# Replace it with our own unique index so that this index actually adds a constraint
schema_editor.sql_create_index = sql
# Generate the SQL staetment that we want to return
return_statement = schema_editor._create_index_sql(
model, fields=fields, name=self.name, using=using,
db_tablespace=self.db_tablespace, col_suffixes=col_suffixes,
opclasses=self.opclasses, condition=condition, include=include,
expressions=expressions, **kwargs,
)
# Reinstate the previous index SQL statement so that we have done no harm
schema_editor.sql_create_index = old_create_index_sql
# Return our SQL statement
return return_statement
class User(AbstractBaseUser, PermissionsMixin):
email = models.EmailField(max_length=100, unique=True)
first_name = models.CharField(max_length=100)
last_name = models.CharField(max_length=100)
is_admin = models.BooleanField(default=False)
is_active = models.BooleanField('active', default=False,
help_text='Designates whether this user should be treated as active.')
is_staff = models.BooleanField('staff status', default=False,
help_text='Designates whether the user can log into this admin site.')
date_joined = models.DateTimeField('date joined', default=timezone.now)
clipboard_tree = models.ForeignKey('ContentNode', null=True, blank=True, related_name='user_clipboard', on_delete=models.SET_NULL)
preferences = models.TextField(default=DEFAULT_USER_PREFERENCES)
disk_space = models.FloatField(default=524288000, help_text='How many bytes a user can upload')
disk_space_used = models.FloatField(default=0, help_text='How many bytes a user has uploaded')
information = JSONField(null=True)
content_defaults = JSONField(default=dict)
policies = JSONField(default=dict, null=True)
feature_flags = JSONField(default=dict, null=True)
deleted = models.BooleanField(default=False, db_index=True)
_field_updates = FieldTracker(fields=[
# Field to watch for changes
"disk_space",
])
objects = UserManager()
USERNAME_FIELD = 'email'
REQUIRED_FIELDS = ['first_name', 'last_name']
def __unicode__(self):
return self.email
def delete(self):
"""
Soft deletes the user account.
"""
self.deleted = True
# Deactivate the user to disallow authentication and also
# to let the user verify the email again after recovery.
self.is_active = False
self.save()
self.history.create(user_id=self.pk, action=user_history.DELETION)
def recover(self):
"""
Use this method when we want to recover a user.
"""
self.deleted = False
self.save()
self.history.create(user_id=self.pk, action=user_history.RECOVERY)
def hard_delete_user_related_data(self):
"""
Hard delete all user related data. But keeps the user record itself intact.
User related data that gets hard deleted are:
- sole editor non-public channels.
- sole editor non-public channelsets.
- sole editor non-public channels' content nodes and its underlying files that are not
used by any other channel.
- all user invitations.
"""
from contentcuration.viewsets.common import SQCount
# Hard delete invitations associated to this account.
self.sent_to.all().delete()
self.sent_by.all().delete()
editable_channels_user_query = (
User.objects.filter(editable_channels__id=OuterRef('id'))
.values_list('id', flat=True)
.distinct()
)
non_public_channels_sole_editor = self.editable_channels.annotate(num_editors=SQCount(
editable_channels_user_query, field="id")).filter(num_editors=1, public=False)
# Point sole editor non-public channels' contentnodes to orphan tree to let
# our garbage collection delete the nodes and underlying files.
ContentNode._annotate_channel_id(ContentNode.objects).filter(channel_id__in=list(
non_public_channels_sole_editor.values_list("id", flat=True))).update(parent_id=settings.ORPHANAGE_ROOT_ID)
# Hard delete non-public channels associated with this user (if user is the only editor).
non_public_channels_sole_editor.delete()
# Hard delete non-public channel collections associated with this user (if user is the only editor).
user_query = (
User.objects.filter(channel_sets__id=OuterRef('id'))
.values_list('id', flat=True)
.distinct()
)
self.channel_sets.annotate(num_editors=SQCount(user_query, field="id")).filter(num_editors=1, public=False).delete()
# Create history!
self.history.create(user_id=self.pk, action=user_history.RELATED_DATA_HARD_DELETION)
def can_edit(self, channel_id):
return Channel.filter_edit_queryset(Channel.objects.all(), self).filter(pk=channel_id).exists()
def check_space(self, size, checksum):
if self.is_admin:
return True
active_files = self.get_user_active_files()
if active_files.filter(checksum=checksum).exists():
return True
space = self.get_available_space(active_files=active_files)
if space < size:
raise PermissionDenied(_("Not enough space. Check your storage under Settings page."))
def check_channel_space(self, channel):
active_files = self.get_user_active_files()
staging_tree_id = channel.staging_tree.tree_id
channel_files = self.files\
.filter(contentnode__tree_id=staging_tree_id)\
.values('checksum')\
.distinct()\
.exclude(checksum__in=active_files.values_list('checksum', flat=True))
staged_size = float(channel_files.aggregate(used=Sum('file_size'))['used'] or 0)
if self.get_available_space(active_files=active_files) < (staged_size):
raise PermissionDenied(_('Out of storage! Request more space under Settings > Storage.'))
def check_staged_space(self, size, checksum):
if self.staged_files.filter(checksum=checksum).exists():
return True
space = self.get_available_staged_space()
if space < size:
raise PermissionDenied(_('Out of storage! Request more space under Settings > Storage.'))
def get_available_staged_space(self):
space_used = self.staged_files.values('checksum').distinct().aggregate(size=Sum("file_size"))['size'] or 0
return float(max(self.disk_space - space_used, 0))
def get_available_space(self, active_files=None):
return float(max(self.disk_space - self.get_space_used(active_files=active_files), 0))
def get_user_active_trees(self):
return self.editable_channels.exclude(deleted=True)\
.values(tree_id=F("main_tree__tree_id"))
def get_user_active_files(self):
cte = With(self.get_user_active_trees().distinct())
return cte.join(self.files.get_queryset(), contentnode__tree_id=cte.col.tree_id)\
.with_cte(cte)\
.values('checksum')\
.distinct()
def get_space_used(self, active_files=None):
active_files = active_files or self.get_user_active_files()
files = active_files.aggregate(total_used=Sum('file_size'))
return float(files['total_used'] or 0)
def set_space_used(self):
self.disk_space_used = self.get_space_used()
self.save()
return self.disk_space_used
def get_space_used_by_kind(self):
active_files = self.get_user_active_files()
files = active_files.values('preset__kind_id')\
.annotate(space=Sum('file_size'))\
.order_by()
kind_dict = {}
for item in files:
kind_dict[item['preset__kind_id']] = item['space']
return kind_dict
def email_user(self, subject, message, from_email=None, **kwargs):
try:
# msg = EmailMultiAlternatives(subject, message, from_email, [self.email])
# msg.attach_alternative(kwargs["html_message"],"text/html")
# msg.send()
send_mail(subject, message, from_email, [self.email], **kwargs)
except (PMMailInactiveRecipientException, PMMailUnauthorizedException) as e:
logging.error(str(e))
def clean(self):
super(User, self).clean()
self.email = self.__class__.objects.normalize_email(self.email)
def get_full_name(self):
"""
Returns the first_name plus the last_name, with a space in between.
"""
full_name = '%s %s' % (self.first_name, self.last_name)
return full_name.strip()
def get_short_name(self):
"""
Returns the short name for the user.
"""
return self.first_name
def get_token(self):
token, _ = Token.objects.get_or_create(user=self)
return token.key
def save(self, *args, **kwargs):
from contentcuration.utils.user import calculate_user_storage
super(User, self).save(*args, **kwargs)
if 'disk_space' in self._field_updates.changed():
calculate_user_storage(self.pk)
changed = False
if not self.content_defaults:
self.content_defaults = DEFAULT_CONTENT_DEFAULTS
changed = True
if not self.clipboard_tree:
self.clipboard_tree = ContentNode.objects.create(title=self.email + " clipboard", kind_id=content_kinds.TOPIC)
self.clipboard_tree.save()
changed = True
if changed:
self.save()
class Meta:
verbose_name = "User"
verbose_name_plural = "Users"
indexes = [
UniqueActiveUserIndex(Lower('email'), condition=Q(is_active=True), name="contentcura_email_d4d492_idx")
]
@classmethod
def filter_view_queryset(cls, queryset, user):
if user.is_anonymous:
return queryset.none()
if user.is_admin:
return queryset
# all shared editors
all_editable = User.editable_channels.through.objects.all()
editable = all_editable.filter(
channel_id__in=all_editable.filter(user_id=user.pk).values_list("channel_id", flat=True)
)
# all shared viewers
all_view_only = User.view_only_channels.through.objects.all()
view_only = all_view_only.filter(
channel_id__in=all_view_only.filter(user_id=user.pk).values_list("channel_id", flat=True)
)
return queryset.filter(
Q(pk=user.pk)
| Q(pk__in=editable.values_list("user_id", flat=True))
| Q(pk__in=view_only.values_list("user_id", flat=True))
)
@classmethod
def filter_edit_queryset(cls, queryset, user):
if user.is_anonymous:
return queryset.none()
if user.is_admin:
return queryset
return queryset.filter(pk=user.pk)
@classmethod
def get_for_email(cls, email, deleted=False, **filters):
"""
Returns the appropriate User record given an email, ordered by:
- those with is_active=True first, which there should only ever be one
- otherwise by ID DESC so most recent inactive shoud be returned
Filters out deleted User records by default. To include both deleted and
undeleted user records pass None to the deleted argument.
:param email: A string of the user's email
:param filters: Additional filters to filter the User queryset
:return: User or None
"""
user_qs = User.objects.filter(email__iexact=email.strip())
if deleted is not None:
user_qs = user_qs.filter(deleted=deleted)
return user_qs.filter(**filters).order_by("-is_active", "-id").first()
class UUIDField(models.CharField):
def __init__(self, *args, **kwargs):
kwargs['max_length'] = 32
super(UUIDField, self).__init__(*args, **kwargs)
def prepare_value(self, value):
if isinstance(value, uuid.UUID):
return value.hex
return value
def get_default(self):
result = super(UUIDField, self).get_default()
if isinstance(result, uuid.UUID):
result = result.hex
return result
def to_python(self, value):
if isinstance(value, uuid.UUID):
return value.hex
return value
class MPTTTreeIDManager(models.Model):
"""
Because MPTT uses plain integers for tree IDs and does not use an auto-incrementing field for them,
the same ID can sometimes be assigned to two trees if two channel create ops happen concurrently.
As we are using this table only for the ID generation, it does not need any fields.
We resolve this by creating a dummy table and using its ID as the tree index to take advantage of the db's
concurrency-friendly way of generating sequential integer IDs. There is a custom migration that ensures
that the number of records (and thus id) matches the max tree ID number when this table gets added.
"""
def file_on_disk_name(instance, filename):
"""
Create a name spaced file path from the File obejct's checksum property.
This path will be used to store the content copy
:param instance: File (content File model)
:param filename: str
:return: str
"""
return generate_file_on_disk_name(instance.checksum, filename)
def generate_file_on_disk_name(checksum, filename):
""" Separated from file_on_disk_name to allow for simple way to check if has already exists """
h = checksum
basename, ext = os.path.splitext(filename)
directory = os.path.join(settings.STORAGE_ROOT, h[0], h[1])
if not os.path.exists(directory):
os.makedirs(directory)
return os.path.join(directory, h + ext.lower())
def object_storage_name(instance, filename):
"""
Create a name spaced file path from the File obejct's checksum property.
This path will be used to store the content copy
:param instance: File (content File model)
:param filename: str
:return: str
"""
default_ext = ''
if instance.file_format_id:
default_ext = '.{}'.format(instance.file_format_id)
return generate_object_storage_name(instance.checksum, filename, default_ext)
def generate_object_storage_name(checksum, filename, default_ext=''):
""" Separated from file_on_disk_name to allow for simple way to check if has already exists """
h = checksum
basename, actual_ext = os.path.splitext(filename)
ext = actual_ext if actual_ext else default_ext
# Use / instead of os.path.join as Windows makes this \\
directory = "/".join([settings.STORAGE_ROOT, h[0], h[1]])
return os.path.join(directory, h + ext.lower())
def generate_storage_url(filename, request=None, *args):
"""
Generate a storage URL for the given content filename.
"""
path = generate_object_storage_name(os.path.splitext(filename)[0], filename)
# There are three scenarios where Studio might be run as:
#
# 1. In normal kubernetes, nginx will proxy for us. We'll know we're in kubernetes when the
# environment variable RUN_MODE=k8s
#
# 2. In Docker Compose and bare metal runserver, we'll be running in runserver, and minio
# will be exposed in port 9000 in the host's localhost network.
# Note (aron): returning the true storage URL (e.g. https://storage.googleapis.com/storage/a.mp4)
# isn't too important, because we have CDN in front of our servers, so it should be cached.
# But change the logic here in case there is a potential for bandwidth and latency improvement.
# Detect our current state first
run_mode = os.getenv("RUN_MODE")
# if we're running inside k8s, then just serve the normal /content/{storage,databases} URL,
# and let nginx handle proper proxying.
if run_mode == "k8s":
url = "/content/{path}".format(
path=path,
)
# if we're in docker-compose or in baremetal, just return the object storage URL as localhost:9000
elif run_mode == "docker-compose" or run_mode is None:
# generate the minio storage URL, so we can get the GET parameters that give everyone
# access even if they don't need to log in
params = urllib.parse.urlparse(default_storage.url(path)).query
host = "localhost"
port = 9000 # hardcoded to the default minio IP address
url = "http://{host}:{port}/{bucket}/{path}?{params}".format(
host=host,
port=port,
bucket=settings.AWS_S3_BUCKET_NAME,
path=path,
params=params,
)
return url
class FileOnDiskStorage(FileSystemStorage):
"""
Overrider FileSystemStorage's default save method to ignore duplicated file.
"""
def get_available_name(self, name):
return name
def _save(self, name, content):
if self.exists(name):
# if the file exists, do not call the superclasses _save method
logging.warn('Content copy "%s" already exists!' % name)
return name
return super(FileOnDiskStorage, self)._save(name, content)
class SecretToken(models.Model):
"""Tokens for channels"""
token = models.CharField(max_length=100, unique=True)
is_primary = models.BooleanField(default=False)
@classmethod
def exists(cls, token):
"""
Return true when the token string given by string already exists.
Returns false otherwise.
"""
return cls.objects.filter(token=token).exists()
@classmethod
def generate_new_token(cls):
"""
Creates a primary secret token for the current channel using a proquint
string. Creates a secondary token containing the channel id.
These tokens can be used to refer to the channel to download its content
database.
"""
token = proquint.generate()
# Try 100 times to generate a unique token.
TRIALS = 100
for __ in range(TRIALS):
token = proquint.generate()
if SecretToken.exists(token):
continue
break
# after TRIALS attempts and we didn't get a unique token,
# just raise an error.
# See https://stackoverflow.com/a/9980160 on what for-else loop does.
else:
raise ValueError("Cannot generate new token")
# We found a unique token! Save it
return token
def __str__(self):
return "{}-{}".format(self.token[:5], self.token[5:])
def get_channel_thumbnail(channel):
if not isinstance(channel, dict):
channel = channel.__dict__
if channel.get("thumbnail_encoding"):
thumbnail_data = channel.get("thumbnail_encoding")
if thumbnail_data.get("base64"):
return thumbnail_data["base64"]
if channel.get("thumbnail") and 'static' not in channel.get("thumbnail"):
return generate_storage_url(channel.get("thumbnail"))
return '/static/img/kolibri_placeholder.png'
CHANNEL_NAME_INDEX_NAME = "channel_name_idx"
# A list of all the FKs from Channel object
# to ContentNode trees
# used for permissions filtering
CHANNEL_TREES = (
"main_tree",
"chef_tree",
"trash_tree",
"staging_tree",
"previous_tree",
)
def boolean_val(val):
return Value(val, output_field=models.BooleanField())
class PermissionCTE(With):
tree_id_fields = [
"channel__{}__tree_id".format(tree_name)
for tree_name in CHANNEL_TREES
]
def __init__(self, model, user_id, **kwargs):
queryset = model.objects.filter(user_id=user_id)\
.annotate(
tree_id=Unnest(ArrayRemove(Array(*self.tree_id_fields), None), output_field=models.IntegerField())
)
super(PermissionCTE, self).__init__(queryset=queryset.values("user_id", "channel_id", "tree_id"), **kwargs)
@classmethod
def editable_channels(cls, user_id):
return PermissionCTE(User.editable_channels.through, user_id, name="editable_channels_cte")
@classmethod
def view_only_channels(cls, user_id):
return PermissionCTE(User.view_only_channels.through, user_id, name="view_only_channels_cte")
def exists(self, *filters):
return Exists(self.queryset().filter(*filters).values("user_id"))
class Channel(models.Model):
""" Permissions come from association with organizations """
id = UUIDField(primary_key=True, default=uuid.uuid4)
name = models.CharField(max_length=200, blank=True)
description = models.CharField(max_length=400, blank=True)
tagline = models.CharField(max_length=150, blank=True, null=True)
version = models.IntegerField(default=0)
thumbnail = models.TextField(blank=True, null=True)
thumbnail_encoding = JSONField(default=dict)
editors = models.ManyToManyField(
settings.AUTH_USER_MODEL,
related_name='editable_channels',
verbose_name="editors",
help_text="Users with edit rights",
blank=True,
)
viewers = models.ManyToManyField(
settings.AUTH_USER_MODEL,
related_name='view_only_channels',
verbose_name="viewers",
help_text="Users with view only rights",
blank=True,
)
language = models.ForeignKey('Language', null=True, blank=True, related_name='channel_language', on_delete=models.SET_NULL)
trash_tree = models.ForeignKey('ContentNode', null=True, blank=True, related_name='channel_trash', on_delete=models.SET_NULL)
clipboard_tree = models.ForeignKey('ContentNode', null=True, blank=True, related_name='channel_clipboard', on_delete=models.SET_NULL)
main_tree = models.ForeignKey('ContentNode', null=True, blank=True, related_name='channel_main', on_delete=models.SET_NULL)
staging_tree = models.ForeignKey('ContentNode', null=True, blank=True, related_name='channel_staging', on_delete=models.SET_NULL)
chef_tree = models.ForeignKey('ContentNode', null=True, blank=True, related_name='channel_chef', on_delete=models.SET_NULL)
previous_tree = models.ForeignKey('ContentNode', null=True, blank=True, related_name='channel_previous', on_delete=models.SET_NULL)
bookmarked_by = models.ManyToManyField(
settings.AUTH_USER_MODEL,
related_name='bookmarked_channels',
verbose_name="bookmarked by",
)
deleted = models.BooleanField(default=False, db_index=True)
public = models.BooleanField(default=False, db_index=True)
preferences = models.TextField(default=DEFAULT_USER_PREFERENCES)
content_defaults = JSONField(default=dict)
priority = models.IntegerField(default=0, help_text="Order to display public channels")
last_published = models.DateTimeField(blank=True, null=True)
secret_tokens = models.ManyToManyField(
SecretToken,
related_name='channels',
verbose_name="secret tokens",
blank=True,
)
source_url = models.CharField(max_length=200, blank=True, null=True)
demo_server_url = models.CharField(max_length=200, blank=True, null=True)
# Fields specific to content generated by Ricecooker
source_id = models.CharField(max_length=200, blank=True, null=True)
source_domain = models.CharField(max_length=300, blank=True, null=True)
ricecooker_version = models.CharField(max_length=100, blank=True, null=True)
# Fields to calculate when channel is published
published_data = JSONField(default=dict)
icon_encoding = models.TextField(blank=True, null=True)
total_resource_count = models.IntegerField(default=0)
published_kind_count = models.TextField(blank=True, null=True)
published_size = models.FloatField(default=0)
included_languages = models.ManyToManyField(
"Language",
related_name='channels',
verbose_name="languages",
blank=True,
)
_field_updates = FieldTracker(fields=[
# Field to watch for changes
"description",
"language_id",
"thumbnail",
"name",
"thumbnail_encoding",
# watch these fields for changes
# but exclude them from setting changed
# on the main tree
"deleted",
"public",
"main_tree_id",
"version",
])
@classmethod
def get_editable(cls, user, channel_id):
return cls.filter_edit_queryset(cls.objects.all(), user).get(id=channel_id)
@classmethod
def filter_edit_queryset(cls, queryset, user):
user_id = not user.is_anonymous and user.id
# it won't return anything
if not user_id:
return queryset.none()
edit = Exists(User.editable_channels.through.objects.filter(user_id=user_id, channel_id=OuterRef("id")))
queryset = queryset.annotate(edit=edit)
if user.is_admin:
return queryset
return queryset.filter(edit=True)
@classmethod
def filter_view_queryset(cls, queryset, user):
user_id = not user.is_anonymous and user.id
user_email = not user.is_anonymous and user.email
if user_id:
filters = dict(user_id=user_id, channel_id=OuterRef("id"))
edit = Exists(User.editable_channels.through.objects.filter(**filters).values("user_id"))
view = Exists(User.view_only_channels.through.objects.filter(**filters).values("user_id"))
else:
edit = boolean_val(False)
view = boolean_val(False)
queryset = queryset.annotate(
edit=edit,
view=view,
)
if user_id and user.is_admin:
return queryset
permission_filter = Q()
if user_id:
pending_channels = Invitation.objects.filter(email=user_email, revoked=False, declined=False, accepted=False).values_list(
"channel_id", flat=True
)
permission_filter = (
Q(view=True) | Q(edit=True) | Q(deleted=False, id__in=pending_channels)
)
return queryset.filter(permission_filter | Q(deleted=False, public=True))
@classmethod
def get_all_channels(cls):
return cls.objects.select_related('main_tree').prefetch_related('editors', 'viewers').distinct()
def resource_size_key(self):
return "{}_resource_size".format(self.pk)
# Might be good to display resource size, but need to improve query time first
def get_resource_size(self):
cached_data = cache.get(self.resource_size_key())
if cached_data:
return cached_data
tree_id = self.main_tree.tree_id
files = File.objects.select_related('contentnode', 'assessment_item')\
.filter(contentnode__tree_id=tree_id)\
.values('checksum', 'file_size')\
.distinct()\
.aggregate(resource_size=Sum('file_size'))
cache.set(self.resource_size_key(), files['resource_size'] or 0, None)
return files['resource_size'] or 0
def on_create(self):
record_channel_stats(self, None)
if not self.content_defaults:
self.content_defaults = DEFAULT_CONTENT_DEFAULTS
if not self.main_tree:
self.main_tree = ContentNode.objects.create(
title=self.name,
kind_id=content_kinds.TOPIC,
content_id=self.id,
node_id=self.id,
original_channel_id=self.id,
source_channel_id=self.id,
changed=True,
complete=True,
)
# Ensure that locust or unit tests raise if there are any concurrency issues with tree ids.
if settings.DEBUG:
if ContentNode.objects.filter(parent=None, tree_id=self.main_tree.tree_id).count() != 1:
raise AssertionError
if not self.trash_tree:
self.trash_tree = ContentNode.objects.create(
title=self.name,
kind_id=content_kinds.TOPIC,
content_id=self.id,
node_id=self.id,
)
# if this change affects the published channel list, clear the channel cache
if self.public and (self.main_tree and self.main_tree.published):
delete_public_channel_cache_keys()
def on_update(self):
from contentcuration.utils.user import calculate_user_storage
original_values = self._field_updates.changed()
record_channel_stats(self, original_values)
blacklist = set([
"public",
"main_tree_id",
"version",
])
if self.main_tree and original_values and any((True for field in original_values if field not in blacklist)):
# Changing channel metadata should also mark main_tree as changed
self.main_tree.changed = True
# Check if original thumbnail is no longer referenced
if "thumbnail" in original_values and original_values["thumbnail"] and 'static' not in original_values["thumbnail"]:
filename, ext = os.path.splitext(original_values["thumbnail"])
delete_empty_file_reference(filename, ext[1:])
# Refresh storage for all editors on the channel
if "deleted" in original_values:
for editor in self.editors.all():
calculate_user_storage(editor.pk)
# Delete db if channel has been deleted and mark as unpublished
if "deleted" in original_values and not original_values["deleted"]:
self.pending_editors.all().delete()
export_db_storage_path = os.path.join(settings.DB_ROOT, "{channel_id}.sqlite3".format(channel_id=self.id))
if default_storage.exists(export_db_storage_path):
default_storage.delete(export_db_storage_path)
if self.main_tree:
self.main_tree.published = False
if self.main_tree and self.main_tree._field_updates.changed():
self.main_tree.save()
# if this change affects the published channel list, clear the channel cache
if "public" in original_values and (self.main_tree and self.main_tree.published):
delete_public_channel_cache_keys()
def save(self, *args, **kwargs):
if self._state.adding:
self.on_create()
else:
self.on_update()
super(Channel, self).save(*args, **kwargs)
def get_thumbnail(self):
return get_channel_thumbnail(self)
def has_changes(self):
return self.main_tree.get_descendants(include_self=True).filter(changed=True).exists()
def get_date_modified(self):
return self.main_tree.get_descendants(include_self=True).aggregate(last_modified=Max('modified'))['last_modified']
def get_resource_count(self):
return self.main_tree.get_descendants().exclude(kind_id=content_kinds.TOPIC).order_by('content_id').distinct('content_id').count()
def get_human_token(self):
return self.secret_tokens.get(is_primary=True)
def get_channel_id_token(self):
return self.secret_tokens.get(token=self.id)
def make_token(self):
token = self.secret_tokens.create(token=SecretToken.generate_new_token(), is_primary=True)
self.secret_tokens.get_or_create(token=self.id)
return token
def make_public(self, bypass_signals=False):
"""
Sets the current channel object to be public and viewable by anyone.
If bypass_signals is True, update the model in such a way that we
prevent any model signals from running due to the update.
Returns the same channel object.
"""
if bypass_signals:
self.public = True # set this attribute still, so the object will be updated
Channel.objects.filter(id=self.id).update(public=True)
# clear the channel cache
delete_public_channel_cache_keys()
else:
self.public = True
self.save()
return self
def mark_created(self, user):
self.history.create(actor_id=to_pk(user), action=channel_history.CREATION)
def mark_publishing(self, user):
self.history.create(actor_id=to_pk(user), action=channel_history.PUBLICATION)
self.main_tree.publishing = True
self.main_tree.save()
def mark_deleted(self, user):
self.history.create(actor_id=to_pk(user), action=channel_history.DELETION)
self.deleted = True
self.save()
def mark_recovered(self, user):
self.history.create(actor_id=to_pk(user), action=channel_history.RECOVERY)
self.deleted = False
self.save()
@property
def deletion_history(self):
return self.history.filter(action=channel_history.DELETION)
@property
def publishing_history(self):
return self.history.filter(action=channel_history.PUBLICATION)
@classmethod
def get_public_channels(cls, defer_nonmain_trees=False):
"""
Get all public channels.
If defer_nonmain_trees is True, defer the loading of all
trees except for the main_tree."""
if defer_nonmain_trees:
c = (Channel.objects
.filter(public=True)
.exclude(deleted=True)
.select_related('main_tree')
.prefetch_related('editors')
.defer('trash_tree', 'clipboard_tree', 'staging_tree', 'chef_tree', 'previous_tree', 'viewers'))
else:
c = Channel.objects.filter(public=True).exclude(deleted=True)
return c
class Meta:
verbose_name = "Channel"
verbose_name_plural = "Channels"
indexes = [
models.Index(fields=["name"], name=CHANNEL_NAME_INDEX_NAME),
]
index_together = [
["deleted", "public"]
]
CHANNEL_HISTORY_CHANNEL_INDEX_NAME = "idx_channel_history_channel_id"
class ChannelHistory(models.Model):
"""
Model for tracking certain actions performed on a channel
"""
channel = models.ForeignKey('Channel', null=False, blank=False, related_name='history', on_delete=models.CASCADE)
actor = models.ForeignKey('User', null=False, blank=False, related_name='channel_history', on_delete=models.CASCADE)
performed = models.DateTimeField(default=timezone.now)
action = models.CharField(max_length=50, choices=channel_history.choices)
@classmethod
def prune(cls):
"""
Prunes history records by keeping the most recent actions for each channel and type,
and deleting all other older actions
"""
keep_ids = cls.objects.distinct("channel_id", "action").order_by("channel_id", "action", "-performed").values_list("id", flat=True)
cls.objects.exclude(id__in=keep_ids).delete()
class Meta:
verbose_name = "Channel history"
verbose_name_plural = "Channel histories"
indexes = [
models.Index(fields=["channel_id"], name=CHANNEL_HISTORY_CHANNEL_INDEX_NAME),
]
class UserHistory(models.Model):
"""
Model that stores the user's action history.
"""
user = models.ForeignKey(settings.AUTH_USER_MODEL, null=False, blank=False, related_name="history", on_delete=models.CASCADE)
action = models.CharField(max_length=32, choices=user_history.choices)
performed_at = models.DateTimeField(default=timezone.now)
class ChannelSet(models.Model):
# NOTE: this is referred to as "channel collections" on the front-end, but we need to call it
# something else as there is already a ChannelCollection model on the front-end
id = UUIDField(primary_key=True, default=uuid.uuid4)
name = models.CharField(max_length=200, blank=True)
description = models.CharField(max_length=400, blank=True)
public = models.BooleanField(default=False, db_index=True)
editors = models.ManyToManyField(
settings.AUTH_USER_MODEL,
related_name='channel_sets',
verbose_name="editors",
help_text="Users with edit rights",
blank=True,
)
secret_token = models.ForeignKey('SecretToken', null=True, blank=True, related_name='channel_sets', on_delete=models.SET_NULL)
@classmethod
def filter_edit_queryset(cls, queryset, user):
if user.is_anonymous:
return queryset.none()
user_id = not user.is_anonymous and user.id
edit = Exists(User.channel_sets.through.objects.filter(user_id=user_id, channelset_id=OuterRef("id")))
queryset = queryset.annotate(edit=edit)
if user.is_admin:
return queryset
return queryset.filter(edit=True)
@classmethod
def filter_view_queryset(cls, queryset, user):
return cls.filter_edit_queryset(queryset, user)
def get_channels(self):
if self.secret_token:
return self.secret_token.channels.filter(deleted=False)
def save(self, *args, **kwargs):
if self._state.adding:
self.on_create()
super(ChannelSet, self).save()
def on_create(self):
if not self.secret_token:
self.secret_token = SecretToken.objects.create(token=SecretToken.generate_new_token())
def delete(self, *args, **kwargs):
super(ChannelSet, self).delete(*args, **kwargs)
if self.secret_token:
self.secret_token.delete()
class ContentTag(models.Model):
id = UUIDField(primary_key=True, default=uuid.uuid4)
tag_name = models.CharField(max_length=50)
channel = models.ForeignKey('Channel', related_name='tags', blank=True, null=True, db_index=True, on_delete=models.SET_NULL)
objects = CustomManager()
def __str__(self):
return self.tag_name
class Meta:
unique_together = ['tag_name', 'channel']
class License(models.Model):
"""
Normalize the license of ContentNode model
"""
license_name = models.CharField(max_length=50)
license_url = models.URLField(blank=True)
license_description = models.TextField(blank=True)
copyright_holder_required = models.BooleanField(default=True)
is_custom = models.BooleanField(default=False)
exists = models.BooleanField(
default=False,
verbose_name="license exists",
help_text="Tells whether or not a content item is licensed to share",
)
@classmethod
def validate_name(cls, name):
if cls.objects.filter(license_name=name).count() == 0:
raise ValidationError('License `{}` does not exist'.format(name))
def __str__(self):
return self.license_name
NODE_ID_INDEX_NAME = "node_id_idx"
NODE_MODIFIED_INDEX_NAME = "node_modified_idx"
NODE_MODIFIED_DESC_INDEX_NAME = "node_modified_desc_idx"
CONTENTNODE_TREE_ID_CACHE_KEY = "contentnode_{pk}__tree_id"
class ContentNode(MPTTModel, models.Model):
"""
By default, all nodes have a title and can be used as a topic.
"""
# Random id used internally on Studio (See `node_id` for id used in Kolibri)
id = UUIDField(primary_key=True, default=uuid.uuid4)
# the content_id is used for tracking a user's interaction with a piece of
# content, in the face of possibly many copies of that content. When a user
# interacts with a piece of content, all substantially similar pieces of
# content should be marked as such as well. We track these "substantially
# similar" types of content by having them have the same content_id.
content_id = UUIDField(primary_key=False, default=uuid.uuid4, editable=False, db_index=True)
# Note this field is indexed, but we are using the Index API to give it an explicit name, see the model Meta
node_id = UUIDField(primary_key=False, default=uuid.uuid4, editable=False)
# TODO: disallow nulls once existing models have been set
original_channel_id = UUIDField(primary_key=False, editable=False, null=True,
db_index=True) # Original channel copied from
source_channel_id = UUIDField(primary_key=False, editable=False, null=True) # Immediate channel copied from
# Original node_id of node copied from (TODO: original_node_id clashes with original_node field - temporary)
original_source_node_id = UUIDField(primary_key=False, editable=False, null=True,
db_index=True)
source_node_id = UUIDField(primary_key=False, editable=False, null=True) # Immediate node_id of node copied from
# Fields specific to content generated by Ricecooker
source_id = models.CharField(max_length=200, blank=True, null=True)
source_domain = models.CharField(max_length=300, blank=True, null=True)
title = models.CharField(max_length=200, blank=True)
description = models.TextField(blank=True)
kind = models.ForeignKey('ContentKind', related_name='contentnodes', db_index=True, null=True, blank=True, on_delete=models.SET_NULL)
license = models.ForeignKey('License', null=True, blank=True, on_delete=models.SET_NULL)
license_description = models.CharField(max_length=400, null=True, blank=True)
prerequisite = models.ManyToManyField('self', related_name='is_prerequisite_of',
through='PrerequisiteContentRelationship', symmetrical=False, blank=True)
is_related = models.ManyToManyField('self', related_name='relate_to', through='RelatedContentRelationship',
symmetrical=False, blank=True)
language = models.ForeignKey('Language', null=True, blank=True, related_name='content_language', on_delete=models.SET_NULL)
parent = TreeForeignKey('self', null=True, blank=True, related_name='children', db_index=True, on_delete=models.CASCADE)
tags = models.ManyToManyField(ContentTag, symmetrical=False, related_name='tagged_content', blank=True)
# No longer used
sort_order = models.FloatField(max_length=50, default=1, verbose_name="sort order",
help_text="Ascending, lowest number shown first")
copyright_holder = models.CharField(max_length=200, null=True, blank=True, default="",
help_text="Organization of person who holds the essential rights")
# legacy field...
original_node = TreeForeignKey('self', on_delete=models.SET_NULL, null=True, blank=True, related_name='duplicates')
cloned_source = TreeForeignKey('self', on_delete=models.SET_NULL, null=True, blank=True, related_name='clones')
thumbnail_encoding = models.TextField(blank=True, null=True)
created = models.DateTimeField(default=timezone.now, verbose_name="created")
modified = models.DateTimeField(auto_now=True, verbose_name="modified")
published = models.BooleanField(default=False)
publishing = models.BooleanField(default=False)
complete = models.BooleanField(null=True)
changed = models.BooleanField(default=True)
"""
Extra fields for exercises:
- type: mastery model to use to determine completion
- m: m value for M out of N mastery criteria
- n: n value for M out of N mastery criteria
"""
extra_fields = JSONField(default=dict, blank=True, null=True)
author = models.CharField(max_length=200, blank=True, default="", help_text="Who created this content?",
null=True)
aggregator = models.CharField(max_length=200, blank=True, default="", help_text="Who gathered this content together?",
null=True)
provider = models.CharField(max_length=200, blank=True, default="", help_text="Who distributed this content?",
null=True)
role_visibility = models.CharField(max_length=50, choices=roles.choices, default=roles.LEARNER)
freeze_authoring_data = models.BooleanField(default=False)
# Fields for metadata labels
# These fields use a map to store applied labels
# {
# "<label_id1>": true,
# "<label_id2>": true,
# }
grade_levels = models.JSONField(blank=True, null=True)
resource_types = models.JSONField(blank=True, null=True)
learning_activities = models.JSONField(blank=True, null=True)
accessibility_labels = models.JSONField(blank=True, null=True)
categories = models.JSONField(blank=True, null=True)
learner_needs = models.JSONField(blank=True, null=True)
# A field for storing a suggested duration for the content node
# this duration should be in seconds.
suggested_duration = models.IntegerField(blank=True, null=True, help_text="Suggested duration for the content node (in seconds)")
objects = CustomContentNodeTreeManager()
# Track all updates and ignore a blacklist of attributes
# when we check for changes
_field_updates = FieldTracker()
_permission_filter = Q(tree_id=OuterRef("tree_id"))
@classmethod
def _annotate_channel_id(cls, queryset):
# Annotate channel id
return queryset.annotate(
channel_id=Subquery(
Channel.objects.filter(
main_tree__tree_id=OuterRef("tree_id")
).values_list("id", flat=True)[:1]
)
)
@classmethod
def filter_by_pk(cls, pk):
"""
When `settings.IS_CONTENTNODE_TABLE_PARTITIONED` is `False`, this always
returns a queryset filtered by pk.
When `settings.IS_CONTENTNODE_TABLE_PARTITIONED` is `True` and a ContentNode
for `pk` exists, this returns a queryset filtered by `pk` AND `tree_id`. If
a ContentNode does not exist for `pk` then an empty queryset is returned.
"""
query = ContentNode.objects.filter(pk=pk)
if settings.IS_CONTENTNODE_TABLE_PARTITIONED is True:
tree_id = cache.get(CONTENTNODE_TREE_ID_CACHE_KEY.format(pk=pk))
if tree_id:
query = query.filter(tree_id=tree_id)
else:
tree_id = ContentNode.objects.filter(pk=pk).values_list("tree_id", flat=True).first()
if tree_id:
cache.set(CONTENTNODE_TREE_ID_CACHE_KEY.format(pk=pk), tree_id, None)
query = query.filter(tree_id=tree_id)
else:
query = query.none()
return query
@classmethod
def filter_edit_queryset(cls, queryset, user):
user_id = not user.is_anonymous and user.id
if not user_id:
return queryset.none()
edit_cte = PermissionCTE.editable_channels(user_id)
queryset = queryset.with_cte(edit_cte).annotate(
edit=edit_cte.exists(cls._permission_filter),
)
if user.is_admin:
return queryset
return queryset.filter(edit=True)
@classmethod
def filter_view_queryset(cls, queryset, user):
user_id = not user.is_anonymous and user.id
queryset = queryset.annotate(
public=Exists(
Channel.objects.filter(
public=True, main_tree__tree_id=OuterRef("tree_id")
).values("pk")
),
)
if not user_id:
return queryset.annotate(edit=boolean_val(False), view=boolean_val(False)).filter(public=True)
edit_cte = PermissionCTE.editable_channels(user_id)
view_cte = PermissionCTE.view_only_channels(user_id)
queryset = queryset.with_cte(edit_cte).with_cte(view_cte).annotate(
edit=edit_cte.exists(cls._permission_filter),
view=view_cte.exists(cls._permission_filter),
)
if user.is_admin:
return queryset
return queryset.filter(
Q(view=True)
| Q(edit=True)
| Q(public=True)
)
@raise_if_unsaved
def get_root(self):
# Only topics can be root nodes
if self.is_root_node() and self.kind_id != content_kinds.TOPIC:
return self
return super(ContentNode, self).get_root()
@raise_if_unsaved
def get_root_id(self):
# Only topics can be root nodes
if self.is_root_node() and self.kind_id != content_kinds.TOPIC:
return self
return ContentNode.objects.values_list('pk', flat=True).get(
tree_id=self._mpttfield('tree_id'),
parent=None,
)
def get_tree_data(self, levels=float('inf')):
"""
Returns `levels`-deep tree information starting at current node.
Args:
levels (int): depth of tree hierarchy to return
Returns:
tree (dict): starting with self, with children list containing either
the just the children's `node_id`s or full recusive tree.
"""
if self.kind_id == content_kinds.TOPIC:
node_data = {
"title": self.title,
"kind": self
| 0 |
32e904a39d03d3166369420b49db0b9b118110a3
|
Python
|
.kind_id,
"node_id": self.node_id,
"studio_id": self.id,
}
children = self.children.all()
if levels > 0:
node_data["children"] = [c.get_tree_data(levels=levels - 1) for c in children]
return node_data
if self.kind_id == content_kinds.EXERCISE:
return {
"title": self.title,
"kind": self.kind_id,
"count": self.assessment_items.count(),
"node_id": self.node_id,
"studio_id": self.id,
}
return {
"title": self.title,
"kind": self.kind_id,
"file_size": self.files.values('file_size').aggregate(size=Sum('file_size'))['size'],
"node_id": self.node_id,
"studio_id": self.id,
}
def get_original_node(self):
original_node = self.original_node or self
if self.original_channel_id and self.original_source_node_id:
original_tree_id = Channel.objects.select_related("main_tree").get(pk=self.original_channel_id).main_tree.tree_id
original_node = ContentNode.objects.filter(tree_id=original_tree_id, node_id=self.original_source_node_id).first() or \
ContentNode.objects.filter(tree_id=original_tree_id, content_id=self.content_id).first() or self
return original_node
def get_associated_presets(self):
key = "associated_presets_{}".format(self.kind_id)
cached_data = cache.get(key)
if cached_data:
return cached_data
presets = list(FormatPreset.objects.filter(kind=self.kind).values())
cache.set(key, presets, None)
return presets
def get_prerequisites(self):
prerequisite_mapping = {}
prerequisites = self.prerequisite.all()
prereqlist = list(prerequisites)
for prereq in prerequisites:
prlist, prereqmapping = prereq.get_prerequisites()
prerequisite_mapping.update({prereq.pk: prereqmapping})
prereqlist.extend(prlist)
return prereqlist, prerequisite_mapping
def get_postrequisites(self):
postrequisite_mapping = {}
postrequisites = self.is_prerequisite_of.all()
postreqlist = list(postrequisites)
for postreq in postrequisites:
prlist, postreqmapping = postreq.get_postrequisites()
postrequisite_mapping.update({postreq.pk: postreqmapping})
postreqlist.extend(prlist)
return postreqlist, postrequisite_mapping
def get_channel_id(self):
if hasattr(self, "channel_id"):
return self.channel_id
channel = self.get_channel()
if channel:
return channel.id
return None
def get_channel(self):
try:
root = self.get_root()
if not root:
return None
return Channel.objects.filter(Q(main_tree=root) | Q(chef_tree=root) | Q(trash_tree=root) | Q(staging_tree=root) | Q(previous_tree=root)).first()
except (ObjectDoesNotExist, MultipleObjectsReturned, AttributeError):
return None
def get_thumbnail(self):
# Problems with json.loads, so use ast.literal_eval to get dict
if self.thumbnail_encoding:
thumbnail_data = load_json_string(self.thumbnail_encoding)
if type(thumbnail_data) is dict and thumbnail_data.get("base64"):
return thumbnail_data["base64"]
thumbnail = self.files.filter(preset__thumbnail=True).first()
if thumbnail:
return generate_storage_url(str(thumbnail))
return ""
@classmethod
def get_nodes_with_title(cls, title, limit_to_children_of=None):
"""
Returns all ContentNodes with a given title. If limit_to_children_of
is passed in with an id, only look at all the children of the node with that id.
"""
if limit_to_children_of:
root = cls.objects.get(id=limit_to_children_of)
return root.get_descendants().filter(title=title)
return cls.objects.filter(title=title)
def get_details(self, channel_id=None):
"""
Returns information about the node and its children, including total size, languages, files, etc.
:return: A dictionary with detailed statistics and information about the node.
"""
from contentcuration.viewsets.common import SQArrayAgg
from contentcuration.viewsets.common import SQCount
from contentcuration.viewsets.common import SQRelatedArrayAgg
from contentcuration.viewsets.common import SQSum
from contentcuration.viewsets.common import SQJSONBKeyArrayAgg
node = ContentNode.objects.filter(pk=self.id, tree_id=self.tree_id).order_by()
descendants = (
self.get_descendants()
.values("id")
)
if channel_id:
channel = Channel.objects.filter(id=channel_id)[0]
else:
channel = self.get_channel()
if not descendants.exists():
data = {
"last_update": pytz.utc.localize(datetime.now()).strftime(
settings.DATE_TIME_FORMAT
),
"created": self.created.strftime(settings.DATE_TIME_FORMAT),
"resource_count": 0,
"resource_size": 0,
"includes": {"coach_content": 0, "exercises": 0},
"kind_count": [],
"languages": [],
"accessible_languages": [],
"licenses": [],
"tags": [],
"copyright_holders": [],
"authors": [],
"aggregators": [],
"providers": [],
"sample_pathway": [],
"original_channels": [],
"sample_nodes": [],
"levels": [],
"categories": [],
}
# Set cache with latest data
cache.set("details_{}".format(self.node_id), json.dumps(data), None)
return data
# Get resources
resources = descendants.exclude(kind=content_kinds.TOPIC).order_by()
nodes = With(
File.objects.filter(contentnode_id__in=Subquery(resources.values("id")))
.values("checksum", "file_size")
.order_by(),
name="nodes",
)
file_query = (
nodes.queryset().with_cte(nodes).values("checksum", "file_size").distinct()
)
l_nodes = With(
File.objects.filter(contentnode_id__in=Subquery(resources.values("id")))
.values("language_id", "preset_id")
.order_by(),
name="l_nodes",
)
accessible_languages_query = (
l_nodes.queryset()
.filter(preset_id=format_presets.VIDEO_SUBTITLE)
.with_cte(l_nodes)
.values("language__native_name")
.distinct()
)
tags_query = str(
ContentTag.objects.filter(
tagged_content__pk__in=descendants.values_list("pk", flat=True)
)
.values("tag_name")
.annotate(count=Count("tag_name"))
.query
).replace("topic", "'topic'")
kind_count_query = str(
resources.values("kind_id").annotate(count=Count("kind_id")).query
).replace("topic", "'topic'")
node = node.annotate(
resource_count=SQCount(resources, field="id"),
resource_size=SQSum(file_query, field="file_size"),
copyright_holders=SQArrayAgg(
resources.distinct("copyright_holder").order_by("copyright_holder"),
field="copyright_holder",
),
authors=SQArrayAgg(
resources.distinct("author").order_by("author"), field="author"
),
aggregators=SQArrayAgg(
resources.distinct("aggregator").order_by("aggregator"),
field="aggregator",
),
providers=SQArrayAgg(
resources.distinct("provider").order_by("provider"), field="provider"
),
languages=SQRelatedArrayAgg(
descendants.exclude(language=None)
.distinct("language__native_name")
.order_by(),
field="language__native_name",
fieldname="native_name",
),
accessible_languages=SQRelatedArrayAgg(
accessible_languages_query,
field="language__native_name",
fieldname="native_name",
),
licenses=SQRelatedArrayAgg(
resources.exclude(license=None)
.distinct("license__license_name")
.order_by("license__license_name"),
field="license__license_name",
fieldname="license_name",
),
kind_count=RawSQL(
"SELECT json_agg(row_to_json (x)) FROM ({}) as x".format(
kind_count_query
),
(),
),
tags_list=RawSQL(
"SELECT json_agg(row_to_json (x)) FROM ({}) as x".format(tags_query), ()
),
coach_content=SQCount(
resources.filter(role_visibility=roles.COACH), field="id"
),
exercises=SQCount(
resources.filter(kind_id=content_kinds.EXERCISE), field="id"
),
levels=SQJSONBKeyArrayAgg(
descendants.exclude(grade_levels__isnull=True),
field="grade_levels",
),
all_categories=SQJSONBKeyArrayAgg(
descendants.exclude(categories__isnull=True),
field="categories",
),
)
# Get sample pathway by getting longest path
# Using resources.aggregate adds a lot of time, use values that have already been fetched
max_level = max(
resources.values_list("level", flat=True).order_by().distinct() or [0]
)
m_nodes = With(
resources.values("id", "level", "tree_id", "lft").order_by(),
name="m_nodes",
)
deepest_node_record = (
m_nodes.queryset()
.with_cte(m_nodes)
.filter(level=max_level)
.values("id")
.order_by("tree_id", "lft")
.first()
)
if deepest_node_record:
deepest_node = ContentNode.objects.get(pk=deepest_node_record["id"])
pathway = (
list(
deepest_node.get_ancestors()
.order_by()
.exclude(parent=None)
.values("title", "node_id", "kind_id")
.order_by()
)
if deepest_node_record
else []
)
sample_nodes = (
[
{
"node_id": n.node_id,
"title": n.title,
"description": n.description,
"thumbnail": n.get_thumbnail(),
"kind": n.kind_id,
}
for n in deepest_node.get_siblings(include_self=True)[0:4]
]
if deepest_node_record
else []
)
# Get list of channels nodes were originally imported from (omitting the current channel)
channel_id = channel and channel.id
originals = (
resources.values("original_channel_id")
.annotate(count=Count("original_channel_id"))
.order_by("original_channel_id")
)
originals = {c["original_channel_id"]: c["count"] for c in originals}
original_channels = (
Channel.objects.exclude(pk=channel_id)
.filter(pk__in=originals.keys(), deleted=False)
.order_by()
)
original_channels = [
{
"id": c.id,
"name": "{}{}".format(
c.name, _(" (Original)") if channel_id == c.id else ""
),
"thumbnail": c.get_thumbnail(),
"count": originals[c.id],
}
for c in original_channels
]
node = (
node.order_by()
.values(
"id",
"resource_count",
"resource_size",
"copyright_holders",
"authors",
"aggregators",
"providers",
"languages",
"accessible_languages",
"coach_content",
"licenses",
"tags_list",
"kind_count",
"exercises",
"levels",
"all_categories",
)
.first()
)
for_educators = {
"coach_content": node["coach_content"],
"exercises": node["exercises"],
}
# Serialize data
data = {
"last_update": pytz.utc.localize(datetime.now()).strftime(
settings.DATE_TIME_FORMAT
),
"created": self.created.strftime(settings.DATE_TIME_FORMAT),
"resource_count": node.get("resource_count", 0),
"resource_size": node.get("resource_size", 0),
"includes": for_educators,
"kind_count": node.get("kind_count") or [],
"languages": node.get("languages") or [],
"accessible_languages": node.get("accessible_languages") or [],
"licenses": node.get("licenses") or [],
"tags": node.get("tags_list") or [],
"original_channels": original_channels,
"sample_pathway": pathway,
"sample_nodes": sample_nodes,
# source model fields for the below default to an empty string, but can also be null
"authors": list(filter(bool, node["authors"])),
"aggregators": list(filter(bool, node["aggregators"])),
"providers": list(filter(bool, node["providers"])),
"copyright_holders": list(filter(bool, node["copyright_holders"])),
"levels": node.get("levels") or [],
"categories": node.get("all_categories") or [],
}
# Set cache with latest data
cache.set("details_{}".format(self.node_id), json.dumps(data), None)
return data
def has_changes(self):
mptt_opts = self._mptt_meta
# Ignore fields that are used for dirty tracking, and also mptt fields, as changes to these are tracked in mptt manager methods.
blacklist = set([
'changed',
'modified',
'publishing',
mptt_opts.tree_id_attr,
mptt_opts.left_attr,
mptt_opts.right_attr,
mptt_opts.level_attr,
])
original_values = self._field_updates.changed()
return any((True for field in original_values if field not in blacklist))
def recalculate_editors_storage(self):
from contentcuration.utils.user import calculate_user_storage
for editor in self.files.values_list('uploaded_by_id', flat=True).distinct():
calculate_user_storage(editor)
def mark_complete(self): # noqa C901
errors = []
# Is complete if title is falsy but only if not a root node.
if not (bool(self.title) or self.parent_id is None):
errors.append("Empty title")
if self.kind_id != content_kinds.TOPIC:
if not self.license:
errors.append("Missing license")
if self.license and self.license.is_custom and not self.license_description:
errors.append("Missing license description for custom license")
if self.license and self.license.copyright_holder_required and not self.copyright_holder:
errors.append("Missing required copyright holder")
if self.kind_id != content_kinds.EXERCISE and not self.files.filter(preset__supplementary=False).exists():
errors.append("Missing default file")
if self.kind_id == content_kinds.EXERCISE:
# Check to see if the exercise has at least one assessment item that has:
if not self.assessment_items.filter(
# Item with non-blank raw data
~Q(raw_data="") | (
# A non-blank question
~Q(question='')
# Non-blank answers
& ~Q(answers='[]')
# With either an input question or one answer marked as correct
& (Q(type=exercises.INPUT_QUESTION) | Q(answers__iregex=r'"correct":\s*true'))
)
).exists():
errors.append("No questions with question text and complete answers")
# Check that it has a mastery model set
# Either check for the previous location for the mastery model, or rely on our completion criteria validation
# that if it has been set, then it has been set correctly.
criterion = self.extra_fields.get("options", {}).get("completion_criteria")
if not (self.extra_fields.get("mastery_model") or criterion):
errors.append("Missing mastery criterion")
if criterion:
try:
completion_criteria.validate(criterion, kind=content_kinds.EXERCISE)
except completion_criteria.ValidationError:
errors.append("Mastery criterion is defined but is invalid")
self.complete = not errors
return errors
def make_content_id_unique(self):
"""
If self is NOT an original contentnode (in other words, a copied contentnode)
and a contentnode with same content_id exists then we update self's content_id.
"""
is_node_original = self.original_source_node_id is None or self.original_source_node_id == self.node_id
node_same_content_id = ContentNode.objects.exclude(pk=self.pk).filter(content_id=self.content_id)
if (not is_node_original) and node_same_content_id.exists():
ContentNode.objects.filter(pk=self.pk).update(content_id=uuid.uuid4().hex)
def on_create(self):
self.changed = True
self.recalculate_editors_storage()
self.set_default_learning_activity()
def on_update(self):
self.changed = self.changed or self.has_changes()
def move_to(self, target, *args, **kwargs):
parent_was_trashtree = self.parent.channel_trash.exists()
super(ContentNode, self).move_to(target, *args, **kwargs)
self.save()
# Update tree_id cache when node is moved to another tree
cache.set(CONTENTNODE_TREE_ID_CACHE_KEY.format(pk=self.id), self.tree_id, None)
# Recalculate storage if node was moved to or from the trash tree
if target.channel_trash.exists() or parent_was_trashtree:
self.recalculate_editors_storage()
def set_default_learning_activity(self):
if self.learning_activities is None:
if self.kind in kind_activity_map:
self.learning_activities = {
kind_activity_map[self.kind]: True
}
def save(self, skip_lock=False, *args, **kwargs):
if self._state.adding:
self.on_create()
else:
self.on_update()
# Logic borrowed from mptt - do a simple check to see if we have changed
# the parent of the node. We use the mptt specific cached fields here
# because these get updated by the mptt move methods, and so will be up to
# date, meaning we can avoid locking the DB twice when the fields have already
# been updated in the database.
# If most moves are being done independently of just changing the parent
# and then calling a save, locking within the save method itself should rarely
# be triggered - meaning updates to contentnode metadata should only rarely
# trigger a write lock on mptt fields.
old_parent_id = self._field_updates.changed().get("parent_id")
if self._state.adding and (self.parent_id or self.parent):
same_order = False
elif old_parent_id is DeferredAttribute:
same_order = True
else:
same_order = old_parent_id == self.parent_id
if not same_order:
changed_ids = list(filter(lambda x: x is not None, set([old_parent_id, self.parent_id])))
else:
changed_ids = []
if not same_order and not skip_lock:
# Lock the mptt fields for the trees of the old and new parent
with ContentNode.objects.lock_mptt(*ContentNode.objects
.filter(id__in=[pid for pid in [old_parent_id, self.parent_id] if pid])
.values_list('tree_id', flat=True).distinct()):
super(ContentNode, self).save(*args, **kwargs)
# Always write to the database for the parent change updates, as we have
# no persistent object references for the original and new parent to modify
if changed_ids:
ContentNode.objects.filter(id__in=changed_ids).update(changed=True)
else:
super(ContentNode, self).save(*args, **kwargs)
# Always write to the database for the parent change updates, as we have
# no persistent object references for the original and new parent to modify
if changed_ids:
ContentNode.objects.filter(id__in=changed_ids).update(changed=True)
# Copied from MPTT
save.alters_data = True
def delete(self, *args, **kwargs):
parent = self.parent or self._field_updates.changed().get('parent')
if parent:
parent.changed = True
parent.save()
self.recalculate_editors_storage()
# Lock the mptt fields for the tree of this node
with ContentNode.objects.lock_mptt(self.tree_id):
return super(ContentNode, self).delete(*args, **kwargs)
# Copied from MPTT
delete.alters_data = True
def copy_to(
self,
target=None,
position="last-child",
pk=None,
mods=None,
excluded_descendants=None,
can_edit_source_channel=None,
batch_size=None,
progress_tracker=None
):
return self._tree_manager.copy_node(self, target, position, pk, mods, excluded_descendants, can_edit_source_channel, batch_size, progress_tracker)[0]
def copy(self):
return self.copy_to()
def is_publishable(self):
return self.complete and self.get_descendants(include_self=True).exclude(kind_id=content_kinds.TOPIC).exists()
class Meta:
verbose_name = "Topic"
verbose_name_plural = "Topics"
# Do not allow two nodes with the same name on the same level
# unique_together = ('parent', 'title')
indexes = [
models.Index(fields=["node_id"], name=NODE_ID_INDEX_NAME),
models.Index(fields=["-modified"], name=NODE_MODIFIED_DESC_INDEX_NAME),
]
class ContentKind(models.Model):
kind = models.CharField(primary_key=True, max_length=200, choices=content_kinds.choices)
def __str__(self):
return self.kind
class FileFormat(models.Model):
extension = models.CharField(primary_key=True, max_length=40, choices=file_formats.choices)
mimetype = models.CharField(max_length=200, blank=True)
def __str__(self):
return self.extension
class FormatPreset(models.Model):
id = models.CharField(primary_key=True, max_length=150, choices=format_presets.choices)
readable_name = models.CharField(max_length=400)
multi_language = models.BooleanField(default=False)
supplementary = models.BooleanField(default=False)
thumbnail = models.BooleanField(default=False)
subtitle = models.BooleanField(default=False)
display = models.BooleanField(default=True) # Render on client side
order = models.IntegerField(default=0)
kind = models.ForeignKey(ContentKind, related_name='format_presets', null=True, on_delete=models.SET_NULL)
allowed_formats = models.ManyToManyField(FileFormat, blank=True)
def __str__(self):
return self.id
@classmethod
def guess_format_preset(cls, filename):
"""
Guess the format preset of a filename based on its extension.
Return None if format is unknown.
"""
_, ext = os.path.splitext(filename)
ext = ext.lstrip(".")
f = FormatPreset.objects.filter(
allowed_formats__extension=ext,
display=True
)
return f.first()
@classmethod
def get_preset(cls, preset_name):
"""
Get the FormatPreset object with that exact name.
Returns None if that format preset is not found.
"""
try:
return FormatPreset.objects.get(id=preset_name)
except FormatPreset.DoesNotExist:
return None
class Language(models.Model):
id = models.CharField(max_length=14, primary_key=True)
lang_code = models.CharField(max_length=3, db_index=True)
lang_subcode = models.CharField(max_length=10, db_index=True, blank=True, null=True)
readable_name = models.CharField(max_length=100, blank=True)
native_name = models.CharField(max_length=100, blank=True)
lang_direction = models.CharField(max_length=3, choices=languages.LANGUAGE_DIRECTIONS, default=languages.LANGUAGE_DIRECTIONS[0][0])
def ietf_name(self):
return "{code}-{subcode}".format(code=self.lang_code,
subcode=self.lang_subcode) if self.lang_subcode else self.lang_code
def __str__(self):
return self.ietf_name()
ASSESSMENT_ID_INDEX_NAME = "assessment_id_idx"
class AssessmentItem(models.Model):
type = models.CharField(max_length=50, default="multiplechoice")
question = models.TextField(blank=True)
hints = models.TextField(default="[]")
answers = models.TextField(default="[]")
order = models.IntegerField(default=1)
contentnode = models.ForeignKey('ContentNode', related_name="assessment_items", blank=True, null=True,
db_index=True, on_delete=models.CASCADE)
# Note this field is indexed, but we are using the Index API to give it an explicit name, see the model Meta
assessment_id = UUIDField(primary_key=False, default=uuid.uuid4, editable=False)
raw_data = models.TextField(blank=True)
source_url = models.CharField(max_length=400, blank=True, null=True)
randomize = models.BooleanField(default=False)
deleted = models.BooleanField(default=False)
objects = CustomManager()
# Track all updates
_field_updates = FieldTracker()
def has_changes(self):
return bool(self._field_updates.changed())
class Meta:
indexes = [
models.Index(fields=["assessment_id"], name=ASSESSMENT_ID_INDEX_NAME),
]
unique_together = ['contentnode', 'assessment_id']
_permission_filter = Q(tree_id=OuterRef("contentnode__tree_id"))
@classmethod
def filter_edit_queryset(cls, queryset, user):
user_id = not user.is_anonymous and user.id
if not user_id:
return queryset.none()
edit_cte = PermissionCTE.editable_channels(user_id)
queryset = queryset.with_cte(edit_cte).annotate(
edit=edit_cte.exists(cls._permission_filter),
)
if user.is_admin:
return queryset
return queryset.filter(edit=True)
@classmethod
def filter_view_queryset(cls, queryset, user):
user_id = not user.is_anonymous and user.id
queryset = queryset.annotate(
public=Exists(
Channel.objects.filter(
public=True, main_tree__tree_id=OuterRef("contentnode__tree_id")
).values("pk")
),
)
if not user_id:
return queryset.annotate(edit=boolean_val(False), view=boolean_val(False)).filter(public=True)
edit_cte = PermissionCTE.editable_channels(user_id)
view_cte = PermissionCTE.view_only_channels(user_id)
queryset = queryset.with_cte(edit_cte).with_cte(view_cte).annotate(
edit=edit_cte.exists(cls._permission_filter),
view=view_cte.exists(cls._permission_filter),
)
if user.is_admin:
return queryset
return queryset.filter(Q(view=True) | Q(edit=True) | Q(public=True))
def on_create(self):
"""
When an exercise is added to a contentnode, update its content_id
if it's a copied contentnode.
"""
self.contentnode.make_content_id_unique()
def on_update(self):
"""
When an exercise is updated of a contentnode, update its content_id
if it's a copied contentnode.
"""
self.contentnode.make_content_id_unique()
def delete(self, *args, **kwargs):
"""
When an exercise is deleted from a contentnode, update its content_id
if it's a copied contentnode.
"""
self.contentnode.make_content_id_unique()
return super(AssessmentItem, self).delete(*args, **kwargs)
class SlideshowSlide(models.Model):
contentnode = models.ForeignKey('ContentNode', related_name="slideshow_slides", blank=True, null=True,
db_index=True, on_delete=models.CASCADE)
sort_order = models.FloatField(default=1.0)
metadata = JSONField(default=dict)
class StagedFile(models.Model):
"""
Keeps track of files uploaded through Ricecooker to avoid user going over disk quota limit
"""
checksum = models.CharField(max_length=400, blank=True, db_index=True)
file_size = models.IntegerField(blank=True, null=True)
uploaded_by = models.ForeignKey(User, related_name='staged_files', blank=True, null=True, on_delete=models.CASCADE)
FILE_DISTINCT_INDEX_NAME = "file_checksum_file_size_idx"
FILE_MODIFIED_DESC_INDEX_NAME = "file_modified_desc_idx"
FILE_DURATION_CONSTRAINT = "file_media_duration_int"
MEDIA_PRESETS = [
format_presets.AUDIO,
format_presets.AUDIO_DEPENDENCY,
format_presets.VIDEO_HIGH_RES,
format_presets.VIDEO_LOW_RES,
format_presets.VIDEO_DEPENDENCY,
]
class File(models.Model):
"""
The bottom layer of the contentDB schema, defines the basic building brick for content.
Things it can represent are, for example, mp4, avi, mov, html, css, jpeg, pdf, mp3...
"""
id = UUIDField(primary_key=True, default=uuid.uuid4)
checksum = models.CharField(max_length=400, blank=True, db_index=True)
file_size = models.IntegerField(blank=True, null=True)
file_on_disk = models.FileField(upload_to=object_storage_name, storage=default_storage, max_length=500,
blank=True)
contentnode = models.ForeignKey(ContentNode, related_name='files', blank=True, null=True, db_index=True, on_delete=models.CASCADE)
assessment_item = models.ForeignKey(AssessmentItem, related_name='files', blank=True, null=True, db_index=True, on_delete=models.CASCADE)
slideshow_slide = models.ForeignKey(SlideshowSlide, related_name='files', blank=True, null=True, db_index=True, on_delete=models.CASCADE)
file_format = models.ForeignKey(FileFormat, related_name='files', blank=True, null=True, db_index=True, on_delete=models.SET_NULL)
preset = models.ForeignKey(FormatPreset, related_name='files', blank=True, null=True, db_index=True, on_delete=models.SET_NULL)
language = models.ForeignKey(Language, related_name='files', blank=True, null=True, on_delete=models.SET_NULL)
original_filename = models.CharField(max_length=255, blank=True)
source_url = models.CharField(max_length=400, blank=True, null=True)
uploaded_by = models.ForeignKey(User, related_name='files', blank=True, null=True, on_delete=models.SET_NULL)
modified = models.DateTimeField(auto_now=True, verbose_name="modified", null=True)
duration = models.IntegerField(blank=True, null=True)
objects = CustomManager()
_permission_filter = Q(tree_id=OuterRef("contentnode__tree_id")) | Q(tree_id=OuterRef("assessment_item__contentnode__tree_id"))
@classmethod
def filter_edit_queryset(cls, queryset, user):
user_id = not user.is_anonymous and user.id
if not user_id:
return queryset.none()
cte = PermissionCTE.editable_channels(user_id)
queryset = queryset.with_cte(cte).annotate(edit=cte.exists(cls._permission_filter))
if user.is_admin:
return queryset
return queryset.filter(
Q(edit=True) | Q(uploaded_by=user, contentnode__isnull=True, assessment_item__isnull=True)
)
@classmethod
def filter_view_queryset(cls, queryset, user):
user_id = not user.is_anonymous and user.id
queryset = queryset.annotate(
public=Exists(
Channel.objects.filter(public=True).filter(
Q(main_tree__tree_id=OuterRef("contentnode__tree_id"))
| Q(main_tree__tree_id=OuterRef("assessment_item__contentnode__tree_id"))
).values("pk")
),
)
if not user_id:
return queryset.annotate(edit=boolean_val(False), view=boolean_val(False)).filter(public=True)
edit_cte = PermissionCTE.editable_channels(user_id)
view_cte = PermissionCTE.view_only_channels(user_id)
queryset = queryset.with_cte(edit_cte).with_cte(view_cte).annotate(
edit=edit_cte.exists(cls._permission_filter),
view=view_cte.exists(cls._permission_filter),
)
if user.is_admin:
return queryset
return queryset.filter(
Q(view=True)
| Q(edit=True)
| Q(public=True)
| Q(uploaded_by=user, contentnode__isnull=True, assessment_item__isnull=True)
)
class Admin:
pass
def __str__(self):
return '{checksum}{extension}'.format(checksum=self.checksum, extension='.' + self.file_format.extension)
def filename(self):
"""
Returns just the filename of the File in storage, without the path
e.g. abcd.mp4
"""
# TODO(aron): write tests for this
return os.path.basename(self.file_on_disk.name)
def update_contentnode_content_id(self):
"""
If the file is attached to a contentnode and is not a thumbnail
then update that contentnode's content_id if it's a copied contentnode.
"""
if self.contentnode and self.preset.thumbnail is False:
self.contentnode.make_content_id_unique()
def on_update(self):
# since modified was added later as a nullable field to File, we don't use a default but
# instead we'll just make sure it's always updated through our serializers
self.modified = timezone.now()
self.update_contentnode_content_id()
def save(self, set_by_file_on_disk=True, *args, **kwargs):
"""
Overrider the default save method.
If the file_on_disk FileField gets passed a content copy:
1. generate the MD5 from the content copy
2. fill the other fields accordingly
"""
from contentcuration.utils.user import calculate_user_storage
# check if the file format exists in file_formats.choices
if self.file_format_id:
if self.file_format_id not in dict(file_formats.choices):
raise ValidationError("Invalid file_format")
if set_by_file_on_disk and self.file_on_disk: # if file_on_disk is supplied, hash out the file
if self.checksum is None or self.checksum == "":
md5 = hashlib.md5()
for chunk in self.file_on_disk.chunks():
md5.update(chunk)
self.checksum = md5.hexdigest()
if not self.file_size:
self.file_size = self.file_on_disk.size
if not self.file_format_id:
ext = os.path.splitext(self.file_on_disk.name)[1].lstrip('.')
if ext in list(dict(file_formats.choices).keys()):
self.file_format_id = ext
else:
raise ValueError("Files of type `{}` are not supported.".format(ext))
super(File, self).save(*args, **kwargs)
if self.uploaded_by_id:
calculate_user_storage(self.uploaded_by_id)
class Meta:
indexes = [
models.Index(fields=['checksum', 'file_size'], name=FILE_DISTINCT_INDEX_NAME),
models.Index(fields=["-modified"], name=FILE_MODIFIED_DESC_INDEX_NAME),
]
constraints = [
# enforces that duration is null when not a media preset, but the duration may be null for media presets
# but if not-null, should be greater than 0
models.CheckConstraint(
check=(Q(preset__in=MEDIA_PRESETS, duration__gt=0) | Q(duration__isnull=True)),
name=FILE_DURATION_CONSTRAINT
)
]
@receiver(models.signals.post_delete, sender=File)
def auto_delete_file_on_delete(sender, instance, **kwargs):
"""
Deletes file from filesystem if no other File objects are referencing the same file on disk
when corresponding `File` object is deleted.
Be careful! we don't know if this will work when perform bash delete on File obejcts.
"""
# Recalculate storage
from contentcuration.utils.user import calculate_user_storage
if instance.uploaded_by_id:
calculate_user_storage(instance.uploaded_by_id)
def delete_empty_file_reference(checksum, extension):
filename = checksum + '.' + extension
if not File.objects.filter(checksum=checksum).exists() and not Channel.objects.filter(thumbnail=filename).exists():
storage_path = generate_object_storage_name(checksum, filename)
if default_storage.exists(storage_path):
default_storage.delete(storage_path)
class PrerequisiteContentRelationship(models.Model):
"""
Predefine the prerequisite relationship between two ContentNode objects.
"""
target_node = models.ForeignKey(ContentNode, related_name='%(app_label)s_%(class)s_target_node', on_delete=models.CASCADE)
prerequisite = models.ForeignKey(ContentNode, related_name='%(app_label)s_%(class)s_prerequisite', on_delete=models.CASCADE)
class Meta:
unique_together = ['target_node', 'prerequisite']
def clean(self, *args, **kwargs):
# self reference exception
if self.target_node == self.prerequisite:
raise IntegrityError('Cannot self reference as prerequisite.')
# immediate cyclic exception
if PrerequisiteContentRelationship.objects.using(self._state.db) \
.filter(target_node=self.prerequisite, prerequisite=self.target_node):
raise IntegrityError(
'Note: Prerequisite relationship is directional! %s and %s cannot be prerequisite of each other!'
% (self.target_node, self.prerequisite))
# distant cyclic exception
# elif <this is a nice to have exception, may implement in the future when the priority raises.>
# raise Exception('Note: Prerequisite relationship is acyclic! %s and %s forms a closed loop!' % (
# self.target_node, self.prerequisite
# ))
super(PrerequisiteContentRelationship, self).clean(*args, **kwargs)
def save(self, *args, **kwargs):
self.full_clean()
super(PrerequisiteContentRelationship, self).save(*args, **kwargs)
def __unicode__(self):
return u'%s' % (self.pk)
class RelatedContentRelationship(models.Model):
"""
Predefine the related relationship between two ContentNode objects.
"""
contentnode_1 = models.ForeignKey(ContentNode, related_name='%(app_label)s_%(class)s_1', on_delete=models.CASCADE)
contentnode_2 = models.ForeignKey(ContentNode, related_name='%(app_label)s_%(class)s_2', on_delete=models.CASCADE)
class Meta:
unique_together = ['contentnode_1', 'contentnode_2']
def save(self, *args, **kwargs):
# self reference exception
if self.contentnode_1 == self.contentnode_2:
raise IntegrityError('Cannot self reference as related.')
# handle immediate cyclic
if RelatedContentRelationship.objects.using(self._state.db) \
.filter(contentnode_1=self.contentnode_2, contentnode_2=self.contentnode_1):
return # silently cancel the save
super(RelatedContentRelationship, self).save(*args, **kwargs)
class Invitation(models.Model):
""" Invitation to edit channel """
id = UUIDField(primary_key=True, default=uuid.uuid4)
accepted = models.BooleanField(default=False)
declined = models.BooleanField(default=False)
revoked = models.BooleanField(default=False)
invited = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.SET_NULL, null=True, related_name='sent_to')
share_mode = models.CharField(max_length=50, default=EDIT_ACCESS)
email = models.EmailField(max_length=100, null=True)
sender = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='sent_by', null=True, on_delete=models.CASCADE)
channel = models.ForeignKey('Channel', null=True, related_name='pending_editors', on_delete=models.CASCADE)
first_name = models.CharField(max_length=100, blank=True)
last_name = models.CharField(max_length=100, blank=True, null=True)
class Meta:
verbose_name = "Invitation"
verbose_name_plural = "Invitations"
def accept(self):
user = User.objects.filter(email__iexact=self.email).first()
if self.channel:
# channel is a nullable field, so check that it exists.
if self.share_mode == VIEW_ACCESS:
self.channel.editors.remove(user)
self.channel.viewers.add(user)
else:
self.channel.viewers.remove(user)
self.channel.editors.add(user)
@classmethod
def filter_edit_queryset(cls, queryset, user):
if user.is_anonymous:
return queryset.none()
if user.is_admin:
return queryset
return queryset.filter(
Q(email__iexact=user.email)
| Q(sender=user)
| Q(channel__editors=user)
).distinct()
@classmethod
def filter_view_queryset(cls, queryset, user):
if user.is_anonymous:
return queryset.none()
if user.is_admin:
return queryset
return queryset.filter(
Q(email__iexact=user.email)
| Q(sender=user)
| Q(channel__editors=user)
| Q(channel__viewers=user)
).distinct()
class Change(models.Model):
server_rev = models.BigAutoField(primary_key=True)
# We need to store the user who is applying this change
# so that we can validate they have permissions to do so
# allow to be null so that we don't lose changes if a user
# account is hard deleted.
created_by = models.ForeignKey(settings.AUTH_USER_MODEL, null=True, blank=True, on_delete=models.SET_NULL, related_name="changes_by_user")
# Almost all changes are related to channels, but some are specific only to users
# so we allow this to be nullable for these edge cases.
# Indexed by default because it's a ForeignKey field.
channel = models.ForeignKey(Channel, null=True, blank=True, on_delete=models.CASCADE)
# For those changes related to users, store a user value instead of channel
# this may be different to created_by, as changes to invitations affect individual users.
# Indexed by default because it's a ForeignKey field.
user = models.ForeignKey(settings.AUTH_USER_MODEL, null=True, blank=True, on_delete=models.CASCADE, related_name="changes_about_user")
# Use client_rev to keep track of changes coming from the client side
# but let it be blank or null for changes we generate on the server side
client_rev = models.IntegerField(null=True, blank=True)
# client_rev numbers are by session, we add the session key here for bookkeeping
# to allow a check within the same session to return whether a change has been applied
# or not, and hence remove it from the frontend
session = models.ForeignKey(Session, null=True, blank=True, on_delete=models.SET_NULL)
table = models.CharField(max_length=32)
change_type = models.IntegerField()
# Use the DRF JSONEncoder class as the encoder here
# so that we can handle anything that has been deserialized by DRF
# or that will be later be serialized by DRF
kwargs = JSONField(encoder=JSONEncoder)
applied = models.BooleanField(default=False)
errored = models.BooleanField(default=False)
@classmethod
def _create_from_change(cls, created_by_id=None, channel_id=None, user_id=None, session_key=None, applied=False, table=None, rev=None, **data):
change_type = data.pop("type")
if table is None or table not in ALL_TABLES:
raise TypeError("table is a required argument for creating changes and must be a valid table name")
if change_type is None or change_type not in ALL_CHANGES:
raise TypeError("change_type is a required argument for creating changes and must be a valid change type integer")
return cls(
session_id=session_key,
created_by_id=created_by_id,
channel_id=channel_id,
user_id=user_id,
client_rev=rev,
table=table,
change_type=change_type,
kwargs=data,
applied=applied
)
@classmethod
def create_changes(cls, changes, created_by_id=None, session_key=None, applied=False):
change_models = []
for change in changes:
change_models.append(cls._create_from_change(created_by_id=created_by_id, session_key=session_key, applied=applied, **change))
cls.objects.bulk_create(change_models)
return change_models
@classmethod
def create_change(cls, change, created_by_id=None, session_key=None, applied=False):
obj = cls._create_from_change(created_by_id=created_by_id, session_key=session_key, applied=applied, **change)
obj.save()
return obj
@classmethod
def serialize(cls, change):
datum = get_attribute(change, ["kwargs"]).copy()
datum.update({
"server_rev": get_attribute(change, ["server_rev"]),
"table": get_attribute(change, ["table"]),
"type": get_attribute(change, ["change_type"]),
"channel_id": get_attribute(change, ["channel_id"]),
"user_id": get_attribute(change, ["user_id"]),
"created_by_id": get_attribute(change, ["created_by_id"])
})
return datum
def serialize_to_change_dict(self):
return self.serialize(self)
class TaskResultCustom(object):
"""
Custom fields to add to django_celery_results's TaskResult model
If adding fields to this class, run `makemigrations` then move the generated migration from the
`django_celery_results` app to the `contentcuration` app and override the constructor to change
the app_label. See `0141_add_task_signature` for an example
"""
# user shouldn't be null, but in order to append the field, this needs to be allowed
user = models.ForeignKey(settings.AUTH_USER_MODEL, related_name="tasks", on_delete=models.CASCADE, null=True)
channel_id = DjangoUUIDField(db_index=True, null=True, blank=True)
progress = models.IntegerField(null=True, blank=True, validators=[MinValueValidator(0), MaxValueValidator(100)])
# a hash of the task name and kwargs for identifying repeat tasks
signature = models.CharField(null=True, blank=False, max_length=32)
super_as_dict = TaskResult.as_dict
def as_dict(self):
"""
:return: A dictionary representation
"""
super_dict = self.super_as_dict()
super_dict.update(
user_id=self.user_id,
channel_id=self.channel_id,
progress=self.progress,
)
return super_dict
@classmethod
def contribute_to_class(cls, model_class=TaskResult):
"""
Adds fields to model, by default TaskResult
:param model_class: TaskResult model
"""
for field in dir(cls):
if not field.startswith("_") and field not in ('contribute_to_class', 'Meta'):
model_class.add_to_class(field, getattr(cls, field))
# manually add Meta afterwards
setattr(model_class._meta, 'indexes', getattr(model_class._meta, 'indexes', []) + cls.Meta.indexes)
class Meta:
indexes = [
# add index that matches query usage for signature
models.Index(
fields=['signature'],
name='task_result_signature_idx',
condition=Q(status__in=celery_states.UNREADY_STATES),
),
]
# trigger class contributions immediately
TaskResultCustom.contribute_to_class()
| 1 |
1721bba2cae1e330bffeb9df05341df9522ff885
|
Python
|
import ROOT
from PhysicsTools.NanoAODTools.postprocessing.framework.datamodel import Collection
from PhysicsTools.NanoAODTools.postprocessing.framework.eventloop import Module
from TreeProducer import *
from TreeProducerCommon import *
from CorrectionTools.PileupWeightTool import *
from CorrectionTools.BTaggingTool import BTagWeightTool, BTagWPs
from CorrectionTools.MuonSFs import *
from CorrectionTools.ElectronSFs import *
from CorrectionTools.RecoilCorrectionTool import getTTptWeight, getTTPt
from CorrectionTools.DYCorrection import *
import struct
import numpy as np
class LLProducer(Module):
def __init__(self, name, DataType, filelist, **kwargs):
self.name = name
self.out = TreeProducer(name)
self.sample = filelist
if DataType=='data':
self.isData = True
self.isMC = False
else:
self.isData = False
self.isMC = True
self.year = kwargs.get('year', 2017 )
self.tes = kwargs.get('tes', 1.0 )
self.ltf = kwargs.get('ltf', 1.0 )
self.jtf = kwargs.get('jtf', 1.0 )
year = self.year
self.filter = getMETFilters(year,self.isData)
if not self.isData:
self.muSFs = MuonSFs(year=year)
self.elSFs = ElectronSFs(year=year)
self.puTool = PileupWeightTool(year =year)
self.btagToolAK8_deep = BTagWeightTool('DeepCSV','AK8','loose',sigma='central',channel='ll',year=year)
self.btagToolAK8_deep_up = BTagWeightTool('DeepCSV','AK8','loose',sigma='up',channel='ll',year=year)
self.btagToolAK8_deep_down = BTagWeightTool('DeepCSV','AK8','loose',sigma='down',channel='ll',year=year)
self.btagToolAK4_deep = BTagWeightTool('DeepCSV','AK4','loose',sigma='central',channel='ll',year=year)
self.btagToolAK4_deep_up = BTagWeightTool('DeepCSV','AK4','loose',sigma='up',channel='ll',year=year)
self.btagToolAK4_deep_down = BTagWeightTool('DeepCSV','AK4','loose',sigma='down',channel='ll',year=year)
if 'DYJetsToLL' in self.sample[0]:
self.DYCorr = DYCorrection('DYJetsToLL')
elif 'ZJetsToNuNu' in self.sample[0]:
self.DYCorr = DYCorrection('ZJetsToNuNu')
elif 'WJetsToLNu' in self.sample[0]:
self.DYCorr = DYCorrection('WJetsToLNu')
self.runJEC = False
JEC_samples = ['Zprime','WWTo','WZTo','ZZTo','GluGluHToBB','ZH_HToBB','Wplus','Wminus']
for JEC_sample in JEC_samples:
if self.sample[0].find(JEC_sample)>0:
self.runJEC = True
def beginJob(self):
pass
def endJob(self):
if not self.isData:
self.btagToolAK8_deep.setDirectory(self.out.outputfile,'AK8btag_deep')
self.btagToolAK4_deep.setDirectory(self.out.outputfile,'AK4btag_deep')
self.out.outputfile.Write()
self.out.outputfile.Close()
def beginFile(self, inputFile, outputFile, inputTree, wrappedOutputTree):
pass
def endFile(self, inputFile, outputFile, inputTree, wrappedOutputTree):
pass
def fillBranches(self,event):
self.out.isMC[0] = self.isMC
self.out.is2016[0] = self.is2016
self.out.is2017[0] = self.is2017
self.out.is2018[0] = self.is2018
self.out.EventNumber[0] = event.event
self.out.LumiNumber[0] = event.luminosityBlock
self.out.RunNumber[0] = event.run
self.out.EventWeight[0] = self.EventWeight
self.out.TopWeight[0] = self.TopWeight
self.out.BTagAK8Weight[0] = self.BTagAK8Weight
self.out.BTagAK4Weight[0] = self.BTagAK4Weight
self.out.BTagAK8Weight_deep[0] = self.BTagAK8Weight_deep
self.out.BTagAK8Weight_deep_up[0] = self.BTagAK8Weight_deep_up
self.out.BTagAK8Weight_deep_down[0] = self.BTagAK8Weight_deep_down
self.out.BTagAK4Weight_deep[0] = self.BTagAK4Weight_deep
self.out.BTagAK4Weight_deep_up[0] = self.BTagAK4Weight_deep_up
self.out.BTagAK4Weight_deep_down[0] = self.BTagAK4Weight_deep_down
self.out.BBTagWeight[0] = self.BBTagWeight
self.out.GenWeight[0] = self.GenWeight
self.out.PUWeight[0] = self.PUWeight
self.out.LeptonWeight[0] = self.LeptonWeight
self.out.LeptonWeightUp[0] = self.LeptonWeightUp
self.out.LeptonWeightDown[0] = self.LeptonWeightDown
self.out.TriggerWeight[0] = self.TriggerWeight
self.out.TriggerWeightUp[0] = self.TriggerWeightUp
self.out.TriggerWeightDown[0] = self.TriggerWeightDown
self.out.QCDNLO_Corr[0] = self.QCDNLO_Corr
self.out.QCDNNLO_Corr[0] = self.QCDNNLO_Corr
self.out.EWKNLO_Corr[0] = self.EWKNLO_Corr
self.out.isZtoNN[0] = self.isZtoNN
self.out.isZtoEE[0] = self.isZtoEE
self.out.isZtoMM[0] = self.isZtoMM
self.out.isTtoEM[0] = self.isTtoEM
self.out.isBoosted4B[0] = self.isBoosted4B
self.out.isHtobb[0] = self.isHtobb
self.out.isHtobb_ml[0] = self.isHtobb_ml
self.out.isMaxBTag_loose[0] = self.isMaxBTag_loose
self.out.isMaxBTag_medium[0] = self.isMaxBTag_medium
self.out.isMaxBTag_tight[0] = self.isMaxBTag_tight
self.out.isVBF[0] = self.isVBF
self.out.nPV[0] = event.PV_npvsGood
self.out.nTaus[0] = self.nTaus
self.out.nElectrons[0] = self.nElectrons
self.out.nMuons[0] = self.nMuons
self.out.nJets[0] = self.nJetsNoFatJet
self.out.nFatJets[0] = self.nFatJets
self.out.DPhi[0] = self.DPhi
self.out.DEta[0] = self.VHDEta
self.out.MinDPhi[0] = self.MinJetMetDPhi
self.out.MaxBTag[0] = self.MaxJetNoFatJetBTag
self.out.BtagDeepB[0] = self.BtagDeepB
self.out.DeepTagMD_H4qvsQCD[0] = self.DeepTagMD_H4qvsQCD
self.out.DeepTagMD_HbbvsQCD[0] = self.DeepTagMD_HbbvsQCD
self.out.DeepTagMD_ZHbbvsQCD[0] = self.DeepTagMD_ZHbbvsQCD
self.out.DeepTagMD_ZbbvsQCD[0] = self.DeepTagMD_ZbbvsQCD
self.out.DeepTagMD_bbvsLight[0] = self.DeepTagMD_bbvsLight
self.out.DeepTagMD_WvsQCD[0] = self.DeepTagMD_WvsQCD
self.out.DeepTagMD_ZvsQCD[0] = self.DeepTagMD_ZvsQCD
self.out.Mu1_pt[0] = self.Mu1_pt
self.out.Mu1_eta[0] = self.Mu1_eta
self.out.Mu1_phi[0] = self.Mu1_phi
self.out.Mu1_mass[0] = self.Mu1_mass
self.out.Mu1_pfIsoId[0] = self.Mu1_pfIsoId
self.out.Mu1_relIso[0] = self.Mu1_relIso
self.out.Mu1_highPtId[0] = self.Mu1_highPtId
self.out.Mu2_pt[0] = self.Mu2_pt
self.out.Mu2_eta[0] = self.Mu2_eta
self.out.Mu2_phi[0] = self.Mu2_phi
self.out.Mu2_mass[0] = self.Mu2_mass
self.out.Mu2_pfIsoId[0] = self.Mu2_pfIsoId
self.out.Mu2_relIso[0] = self.Mu2_relIso
self.out.Mu2_highPtId[0] = self.Mu2_highPtId
self.out.Ele1_pt[0] = self.Ele1_pt
self.out.Ele1_eta[0] = self.Ele1_eta
self.out.Ele1_phi[0] = self.Ele1_phi
self.out.Ele1_mass[0] = self.Ele1_mass
self.out.Ele2_pt[0] = self.Ele2_pt
self.out.Ele2_eta[0] = self.Ele2_eta
self.out.Ele2_phi[0] = self.Ele2_phi
self.out.Ele2_mass[0] = self.Ele2_mass
self.out.Ele_HEM15_16[0] = self.Ele_HEM15_16
self.out.Jet1_VBF_pt[0] = self.Jet1_VBF_pt
self.out.Jet1_VBF_eta[0] = self.Jet1_VBF_eta
self.out.Jet1_VBF_phi[0] = self.Jet1_VBF_phi
self.out.Jet1_VBF_mass[0] = self.Jet1_VBF_mass
self.out.Jet2_VBF_pt[0] = self.Jet2_VBF_pt
self.out.Jet2_VBF_eta[0] = self.Jet2_VBF_eta
self.out.Jet2_VBF_phi[0] = self.Jet2_VBF_phi
self.out.Jet2_VBF_mass[0] = self.Jet2_VBF_mass
self.out.dijet_VBF_mass[0] = self.dijet_VBF_mass
self.out.deltaR_VBF[0] = self.deltaR_VBF
self.out.deltaR_HVBFjet1[0] = self.deltaR_HVBFjet1
self.out.deltaR_HVBFjet2[0] = self.deltaR_HVBFjet2
self.out.MET[0] = event.PuppiMET_pt
self.out.MET_chs[0] = event.MET_pt
self.out.HT_HEM15_16[0] = self.HT_HEM15_16
self.out.LHEScaleWeight = self.LHEScaleWeight
self.out.LHEPdfWeight = self.LHEPdfWeight
self.out.LHEWeight_originalXWGTUP[0]= self.LHEWeight_originalXWGTUP
self.out.PrefireWeight[0] = self.PrefireWeight
self.out.PrefireWeightUp[0] = self.PrefireWeightUp
self.out.PrefireWeightDown[0] = self.PrefireWeightDown
self.out.HT[0] = self.HT
self.out.H_pt[0] = self.H_pt
self.out.H_eta[0] = self.H_eta
self.out.H_phi[0] = self.H_phi
self.out.H_mass[0] = self.H_mass
self.out.H_M[0] = self.H_M
self.out.H_tau21[0] = self.H_tau21
self.out.H_tau41[0] = self.H_tau41
self.out.H_tau42[0] = self.H_tau42
self.out.H_tau31[0] = self.H_tau31
self.out.H_tau32[0] = self.H_tau32
self.out.H_ddt[0] = self.H_ddt
self.out.H_csv1[0] = self.H_csv1
self.out.H_csv2[0] = self.H_csv2
self.out.H_deepcsv1[0] = self.H_deepcsv1
self.out.H_deepcsv2[0] = self.H_deepcsv2
self.out.H_dbt[0] = self.H_dbt
self.out.H_hadronflavour[0] = self.H_hadronflavour
self.out.H_partonflavour[0] = self.H_partonflavour
self.out.H_chf[0] = self.H_chf
self.out.H_nhf[0] = self.H_nhf
self.out.V_pt[0] = self.V_pt
self.out.V_eta[0] = self.V_eta
self.out.V_phi[0] = self.V_phi
self.out.V_mass[0] = self.V_mass
self.out.VH_deltaR[0] = self.VH_deltaR
self.out.X_pt[0] = self.X_pt
self.out.X_eta[0] = self.X_eta
self.out.X_phi[0] = self.X_phi
self.out.X_mass[0] = self.X_mass
self.out.X_mass_chs[0] = self.X_mass_chs
self.out.X_mass_nom[0] = self.X_mass_nom
self.out.X_mass_jesUp[0] = self.X_mass_jesUp
self.out.X_mass_jesDown[0] = self.X_mass_jesDown
self.out.X_mass_jerUp[0] = self.X_mass_jerUp
self.out.X_mass_jerDown[0] = self.X_mass_jerDown
self.out.X_mass_MET_nom[0] = self.X_mass_MET_nom
self.out.X_mass_MET_jesUp[0] = self.X_mass_MET_jesUp
self.out.X_mass_MET_jesDown[0] = self.X_mass_MET_jesDown
self.out.X_mass_MET_jerUp[0] = self.X_mass_MET_jerUp
self.out.X_mass_MET_jerDown[0] = self.X_mass_MET_jerDown
self.out.H_mass_nom[0] = self.H_mass_nom
self.out.H_mass_jmsUp[0] = self.H_mass_jmsUp
self.out.H_mass_jmsDown[0] = self.H_mass_jmsDown
self.out.H_mass_jmrUp[0] = self.H_mass_jmrUp
self.out.H_mass_jmrDown[0] = self.H_mass_jmrDown
self.out.tree.Fill()
def analyze(self, event):
"""process event, return True (go to next module) or False (fail, go to next event)"""
##### set variables ####
self.nElectrons = 0
self.nMuons = 0
self.nTaus = 0
self.nFatJets = 0
self.EventWeight = 1.
self.TopWeight = 1.
self.BTagAK8Weight = 1.
self.BTagAK4Weight = 1.
self.BTagAK8Weight_deep = 1.
self.BTagAK8Weight_deep_up = 1.
self.BTagAK8Weight_deep_down = 1.
self.BTagAK4Weight_deep = 1.
self.BTagAK4Weight_deep_up = 1.
self.BTagAK4Weight_deep_down = 1.
self.BBTagWeight = 1.
self.GenWeight = 1.
self.PUWeight = 1.
self.LeptonWeight = 1.
self.LeptonWeightUp = 1.
self.LeptonWeightDown = 1.
self.TriggerWeight = 1.
self.TriggerWeightUp = 1.
self.TriggerWeightDown = 1.
self.isZtoMM = False
self.isZtoEE = False
self.isZtoNN = False
self.isTtoEM = False
self.isBoosted4B = False
self.isHtobb = False
self.isHtobb_ml = False
self.isMaxBTag_loose = False
self.isMaxBTag_medium = False
self.isMaxBTag_tight = False
self.isVBF = False
self.is2016 = False
self.is2017 = False
self.is2018 = False
self.nTaus = 0
self.nJetsNoFatJet = 0
self.H_partonflavour = -1.
self.H_hadronflavour = -1.
self.DPhi = -1.
self.VHDEta = -1.
self.MinJetMetDPhi = 10.
self.MaxJetNoFatJetBTag = -1.
self.BtagDeepB = -1.
self.DeepTagMD_H4qvsQCD = -1.
self.DeepTagMD_HbbvsQCD = -1.
self.DeepTagMD_ZHbbvsQCD = -1.
self.DeepTagMD_ZbbvsQCD = -1.
self.DeepTagMD_bbvsLight = -1.
self.DeepTagMD_WvsQCD = -1.
self.DeepTagMD_ZvsQCD = -1.
self.Mu1_pt = -1.
self.Mu1_eta = -1.
self.Mu1_phi = -1.
self.Mu1_mass = -1.
self.Mu1_pfIsoId = -1.
self.Mu1_relIso = -1.
self.Mu1_highPtId = -1.
self.Mu2_pt = -1.
self.Mu2_eta = -1.
self.Mu2_phi = -1.
self.Mu2_mass = -1.
self.Mu2_pfIsoId = -1.
self.Mu2_relIso = -1.
self.Mu2_highPtId = -1.
self.Ele1_pt = -1.
self.Ele1_eta = -1.
self.Ele1_phi = -1.
self.Ele1_mass = -1.
self.Ele2_pt = -1.
self.Ele2_eta = -1.
self.Ele2_phi = -1.
self.Ele2_mass = -1.
self.Ele_HEM15_16 = -1.
self.HT_HEM15_16 = -1.
self.HT = 0.
self.LHEScaleWeight = -1.
self.LHEPdfWeight = -1.
self.LHEWeight_originalXWGTUP = -1.
self.PrefireWeight = 1.
self.PrefireWeightUp = 1.
self.PrefireWeightDown = 1.
self.QCDNLO_Corr = 1.
self.QCDNNLO_Corr = 1.
self.EWKNLO_Corr = 1.
self.Jet1_VBF_pt = -1.
self.Jet1_VBF_eta = -1.
self.Jet1_VBF_phi = -1.
self.Jet1_VBF_mass = -1.
self.Jet2_VBF_pt = -1.
self.Jet2_VBF_eta = -1.
self.Jet2_VBF_phi = -1.
self.Jet2_VBF_mass = -1.
self.dijet_VBF_mass = -1.
self.deltaR_VBF = -1.
self.deltaR_HVBFjet1 = -1.
self.deltaR_HVBFjet2 = -1.
self.H_pt = -1.
self.H_eta = -1.
self.H_phi = -1.
self.H_mass = -1.
self.H_M = -1.
self.H_tau21 = -1.
self.H_tau41 = -1.
self.H_tau42 = -1.
self.H_tau31 = -1.
self.H_tau32 = -1.
self.H_ddt = -1.
self.H_csv1 = -1.
self.H_csv2 = -1.
self.H_deepcsv1 = -1.
self.H_deepcsv2 = -1.
self.H_dbt = -1.
self.H_chf = -1.
self.H_nhf = -1.
self.V_pt = -1.
self.V_eta = -1.
self.V_phi = -1.
self.V_mass = -1.
self.VH_deltaR = -1.
self.X_pt = -1.
self.X_eta = -1.
self.X_phi = -1.
self.X_mass = -1.
self.X_mass_chs = -1.
self.X_mass_nom = -1.
self.X_mass_jesUp = -1.
self.X_mass_jesDown = -1.
self.X_mass_jerUp = -1.
self.X_mass_jerDown = -1.
self.X_mass_MET_nom = -1.
self.X_mass_MET_jesUp = -1.
self.X_mass_MET_jesDown = -1.
self.X_mass_MET_jerUp = -1.
self.X_mass_MET_jerDown = -1.
self.H_mass_nom = -1.
self.H_mass_jmsUp = -1.
self.H_mass_jmsDown = -1.
self.H_mass_jmrUp = -1.
self.H_mass_jmrDown = -1.
eecutflow_list = []
mmcutflow_list = []
nncutflow_list = []
idx_electrons = []
idx_loose_electrons = []
idx_muons = []
idx_loose_muons = []
idx_fatjet = []
idx_jet = []
idx_jet_vbf = []
electrons_tlv_list = []
loose_electrons_tlv_list = []
muons_tlv_list = []
loose_muons_tlv_list = []
fatjet_tlv_list = []
jet_tlv_list = []
jet_tlv_list_vbf = []
fatjet_tau21_list = []
fatjet_tau41_list = []
fatjet_tau42_list = []
fatjet_tau31_list = []
fatjet_tau32_list = []
V = ROOT.TLorentzVector()
H = ROOT.TLorentzVector()
X = ROOT.TLorentzVector()
V_chs = ROOT.TLorentzVector()
######### cuts #########
elec1_pt_cut = 55.
elec2_pt_cut = 20.
elec_pt_cut = 10.
elec_eta_cut = 2.5
muon1_pt_cut = 55.
muon2_pt_cut = 20.
muon_pt_cut = 10.
muon_eta_cut = 2.4
tau_pt_cut = 18.
tau_eta_cut = 2.3
ak4_pt_cut = 30.
ak4_eta_cut = 2.4
fatjet_pt_cut = 200.
fatjet_eta_cut = 2.4
met_pt_cut = 250.
v_pt_cut = 200.
tau21_lowercut = 0.35
tau21_uppercut = 0.75
j_mass_lowercut = 30.
j_mass_uppercut = 250.
v_mass_lowercut = 65.
v_mass_intercut = 85.
v_mass_uppercut = 105.
h_mass_lowercut = 105.
h_mass_uppercut = 135.
x_mass_lowercut = 750.
xt_mass_lowercut = 650.
xjj_mass_lowercut = 950.
#### flag for year #######
if self.year == 2016:
self.is2016 = True
elif self.year == 2017:
self.is2017 = True
elif self.year == 2018:
self.is2018 = True
######### triggers #########
if self.year == 2016:
try:
trigger_SingleMu = any([event.HLT_Mu50,
event.HLT_TkMu50])
except:
trigger_SingleMu = event.HLT_Mu50
trigger_SingleEle = event.HLT_Ele115_CaloIdVT_GsfTrkIdT
trigger_SingleIsoEle = event.HLT_Ele27_WPTight_Gsf
trigger_SinglePhoton = event.HLT_Photon175
trigger_METMHTNoMu = any([event.HLT_PFMETNoMu110_PFMHTNoMu110_IDTight,
event.HLT_PFMETNoMu120_PFMHTNoMu120_IDTight,
event.HLT_MonoCentralPFJet80_PFMETNoMu120_PFMHTNoMu120_IDTight])
trigger_METMHT = any([event.HLT_PFMET110_PFMHT110_IDTight,
event.HLT_PFMET120_PFMHT120_IDTight])
trigger_MET = any([event.HLT_PFMET170_NotCleaned,
event.HLT_PFMET170_HBHECleaned])
elif self.year == 2017:
try:
trigger_SingleMu = any([event.HLT_Mu50,
event.HLT_TkMu100,
event.HLT_OldMu100])
except:
trigger_SingleMu = event.HLT_Mu50
try:
trigger_SingleEle = event.HLT_Ele115_CaloIdVT_GsfTrkIdT
except:
trigger_SingleEle = None
trigger_SingleIsoEle = event.HLT_Ele35_WPTight_Gsf
trigger_SinglePhoton = event.HLT_Photon200
try:
trigger_METMHTNoMu = any([event.HLT_PFMETNoMu110_PFMHTNoMu110_IDTight,
event.HLT_PFMETNoMu120_PFMHTNoMu120_IDTight,
event.HLT_PFMETNoMu130_PFMHTNoMu130_IDTight,
event.HLT_PFMETNoMu140_PFMHTNoMu140_IDTight,
event.HLT_MonoCentralPFJet80_PFMETNoMu120_PFMHTNoMu120_IDTight])
except:
trigger_METMHTNoMu = any([event.HLT_PFMETNoMu110_PFMHTNoMu110_IDTight,
event.HLT_PFMETNoMu120_PFMHTNoMu120_IDTight,
event.HLT_MonoCentralPFJet80_PFMETNoMu120_PFMHTNoMu120_IDTight])
trigger_METMHT = any([event.HLT_PFMET110_PFMHT110_IDTight,
event.HLT_PFMET120_PFMHT120_IDTight,
event.HLT_PFMET130_PFMHT130_IDTight,
event.HLT_PFMET140_PFMHT140_IDTight,
event.HLT_PFMETTypeOne110_PFMHT110_IDTight,
event.HLT_PFMETTypeOne120_PFMHT120_IDTight,
event.HLT_PFMETTypeOne130_PFMHT130_IDTight,
event.HLT_PFMETTypeOne140_PFMHT140_IDTight])
try:
trigger_MET = any([event.HLT_PFMET200_NotCleaned,
event.HLT_PFMET200_HBHECleaned,
event.HLT_PFMET200_HBHE_BeamHaloCleaned,
event.HLT_PFMET250_HBHECleaned])
except:
trigger_MET = None
elif self.year == 2018:
trigger_SingleMu = any([event.HLT_Mu50,
event.HLT_TkMu100,
event.HLT_OldMu100])
trigger_SingleEle = event.HLT_Ele115_CaloIdVT_GsfTrkIdT
trigger_SingleIsoEle = event.HLT_Ele32_WPTight_Gsf
trigger_SinglePhoton = event.HLT_Photon200
trigger_METMHTNoMu = any([event.HLT_PFMETNoMu110_PFMHTNoMu110_IDTight,
event.HLT_PFMETNoMu120_PFMHTNoMu120_IDTight,
event.HLT_PFMETNoMu130_PFMHTNoMu130_IDTight,
event.HLT_PFMETNoMu140_PFMHTNoMu140_IDTight,
event.HLT_MonoCentralPFJet80_PFMETNoMu120_PFMHTNoMu120_IDTight])
trigger_METMHT = any([event.HLT_PFMET110_PFMHT110_IDTight,
event.HLT_PFMET120_PFMHT120_IDTight,
event.HLT_PFMET130_PFMHT130_IDTight,
event.HLT_PFMET140_PFMHT140_IDTight,
event.HLT_PFMETTypeOne110_PFMHT110_IDTight,
event.HLT_PFMETTypeOne120_PFMHT120_IDTight,
event.HLT_PFMETTypeOne130_PFMHT130_IDTight,
event.HLT_PFMETTypeOne140_PFMHT140_IDTight])
trigger_MET = any([event.HLT_PFMET200_NotCleaned,
event.HLT_PFMET200_HBHECleaned,
event.HLT_PFMET200_HBHE_BeamHaloCleaned,
event.HLT_PFMET250_HBHECleaned])
########## Gen Weight #########
if self.isMC:
self.GenWeight = -1. if event.genWeight < 0 else 1.
self.PUWeight = self.puTool.getWeight(event.Pileup_nTrueInt)
self.EventWeight *= self.GenWeight
self.EventWeight *= self.PUWeight
for i,weight in enumerate(event.LHEScaleWeight):
self.out.LHEScaleWeight_hist.Fill(i,weight)
for j,weight in enumerate(event.LHEPdfWeight):
self.out.LHEPdfWeight_hist.Fill(j,weight)
self.LHEScaleWeight = event.LHEScaleWeight
self.LHEPdfWeight = event.LHEPdfWeight
self.LHEWeight_originalXWGTUP = event.LHEWeight_originalXWGTUP
self.out.events.Fill(0.,self.GenWeight)
self.out.original.Fill(0.,event.LHEWeight_originalXWGTUP)
if self.year == 2016 or self.year == 2017:
self.PrefireWeight = event.PrefireWeight
self.PrefireWeightUp = event.PrefireWeight_Up
self.PrefireWeightDown = event.PrefireWeight_Down
if self.isData and event.PV_npvs == 0:
return False
if not self.isData:
self.out.pileup.Fill(event.Pileup_nTrueInt)
if event.Pileup_nTrueInt == 0:
return False
########### FatJet #########
for ifatjet in range(event.nFatJet):
fatjet_pt = event.FatJet_pt[ifatjet]
fatjet_eta = event.FatJet_eta[ifatjet]
fatjet_phi = event.FatJet_phi[ifatjet]
fatjet_mass = event.FatJet_mass[ifatjet]
fatjet_jetid = event.FatJet_jetId[ifatjet]
fatjet_tlv = ROOT.TLorentzVector()
fatjet_tlv.SetPtEtaPhiM(fatjet_pt, fatjet_eta, fatjet_phi, fatjet_mass)
if fatjet_pt > fatjet_pt_cut and abs(fatjet_eta) < fatjet_eta_cut:
fatjet_tlv_list.append(fatjet_tlv)
idx_fatjet.append(ifatjet)
if event.FatJet_tau1[ifatjet]==0:
fatjet_tau21_list.append(0)
fatjet_tau41_list.append(0)
fatjet_tau31_list.append(0)
else:
fatjet_tau21_list.append(event.FatJet_tau2[ifatjet]/event.FatJet_tau1[ifatjet])
fatjet_tau41_list.append(event.FatJet_tau4[ifatjet]/event.FatJet_tau1[ifatjet])
fatjet_tau31_list.append(event.FatJet_tau3[ifatjet]/event.FatJet_tau1[ifatjet])
if event.FatJet_tau2[ifatjet]==0:
fatjet_tau42_list.append(0)
fatjet_tau32_list.append(0)
else:
fatjet_tau42_list.append(event.FatJet_tau4[ifatjet]/event.FatJet_tau2[ifatjet])
fatjet_tau32_list.append(event.FatJet_tau3[ifatjet]/event.FatJet_tau2[ifatjet])
self.nFatJets = len(fatjet_tlv_list)
#stop if no suitable Fatjet
if len(fatjet_tlv_list) == 0:
return False
########### electrons ##########
for ielectron in range(event.nElectron):
electron_pt = event.Electron_pt[ielectron]
electron_eta = event.Electron_eta[ielectron]
electron_phi = event.Electron_phi[ielectron]
electron_mass = event.Electron_mass[ielectron]
electron_tlv = ROOT.TLorentzVector()
electron_tlv.SetPtEtaPhiM(electron_pt,electron_eta,electron_phi,electron_mass)
if electron_eta > -2.5 and electron_eta < -1.479 and electron_phi > -1.55 and electron_phi < -0.9:
if self.Ele_HEM15_16 == -1.:
self.Ele_HEM15_16 = 0.
self.Ele_HEM15_16 += electron_pt
if electron_pt > elec_pt_cut and abs(electron_eta) < elec_eta_cut:
idx_electrons.append(ielectron)
electrons_tlv_list.append(electron_tlv)
if event.Electron_cutBased[ielectron] >= 2:
idx_loose_electrons.append(ielectron)
loose_electrons_tlv_list.append(electron_tlv)
self.nElectrons = len(loose_electrons_tlv_list)
########### muons #########
for imuon in range(event.nMuon):
muon_pt = event.Muon_pt[imuon]
muon_eta = event.Muon_eta[imuon]
muon_phi = event.Muon_phi[imuon]
muon_mass = event.Muon_mass[imuon]
muon_tlv = ROOT.TLorentzVector()
muon_tlv.SetPtEtaPhiM(muon_pt, muon_eta, muon_phi, muon_mass)
if muon_pt > muon_pt_cut and abs(muon_eta) < muon_eta_cut:
idx_muons.append(imuon)
muons_tlv_list.append(muon_tlv)
if event.Muon_isPFcand[imuon] and struct.unpack('B',event.Muon_pfIsoId[imuon])[0]>=2 and (event.Muon_isGlobal[imuon] or event.Muon_isTracker[imuon]):
idx_loose_muons.append(imuon)
loose_muons_tlv_list.append(muon_tlv)
self.nMuons = len(loose_muons_tlv_list)
############ taus #########
for itau in range(event.nTau):
tau_pt = event.Tau_pt[itau]
tau_eta = event.Tau_eta[itau]
tau_phi = event.Tau_phi[itau]
tau_mass = event.Tau_mass[itau]
tau_tlv = ROOT.TLorentzVector()
tau_tlv.SetPtEtaPhiM(tau_pt, tau_eta, tau_phi, tau_mass)
if tau_pt > tau_pt_cut and abs(tau_eta) < tau_eta_cut:
cleanTau = True
for loose_electrons_tlv in loose_electrons_tlv_list:
if loose_electrons_tlv.DeltaR(tau_tlv) < 0.4:
cleanTau = False
for loose_muons_tlv in loose_muons_tlv_list:
if loose_muons_tlv.DeltaR(tau_tlv) < 0.4:
cleanTau = False
if cleanTau:
self.nTaus += 1
############ MET ##########
METx = 0.
METy = 0.
MET_tlv = ROOT.TLorentzVector()
MET_tlv.SetPtEtaPhiE(event.PuppiMET_pt,0.,event.PuppiMET_phi, event.PuppiMET_pt)
############ TTbar pT reweighting ########
if self.isMC and 'TT' in self.sample[0]:
Top1_pt, Top2_pt = getTTPt(event)
self.TopWeight = getTTptWeight(Top1_pt, Top2_pt)
############ ZtoEE ############
self.out.eecutflow.Fill(0.,self.EventWeight)
eecutflow_list.append(self.EventWeight)
maxZpt = -1.
Z_pt = -1.
Z_m = -1.
goodelectronpair = False
for i in idx_electrons:
for j in idx_electrons:
if i==j or event.Electron_charge[i] == event.Electron_charge[j]:
continue
eli_tlv = ROOT.TLorentzVector()
eli_tlv.SetPtEtaPhiM(event.Electron_pt[i],event.Electron_eta[i],event.Electron_phi[i],event.Electron_mass[i])
eli_v = ROOT.TVector3()
eli_v.SetPtEtaPhi(event.Electron_pt[i],event.Electron_eta[i],event.Electron_phi[i])
elj_tlv = ROOT.TLorentzVector()
elj_tlv.SetPtEtaPhiM(event.Electron_pt[j],event.Electron_eta[j],event.Electron_phi[j],event.Electron_mass[j])
elj_v = ROOT.TVector3()
elj_v.SetPtEtaPhi(event.Electron_pt[j],event.Electron_eta[j],event.Electron_phi[j])
diel = eli_tlv + elj_tlv
Z_pt = diel.Pt()
Z_m = diel.M()
if Z_m > 70. and Z_m < 110. and Z_pt > maxZpt:
maxZpt = Z_pt
if eli_tlv.Pt() > elj_tlv.Pt():
el1 = i
el2 = j
el1_tlv = eli_tlv
el2_tlv = elj_tlv
el1_v = eli_v
el2_v = elj_v
else:
el1 = j
el2 = i
el1_tlv = elj_tlv
el2_tlv = eli_tlv
el1_v = elj_v
el2_v = eli_v
goodelectronpair = True
if goodelectronpair:
self.out.eecutflow.Fill(1.,self.EventWeight)
eecutflow_list.append(self.EventWeight)
if el1_tlv.Pt() > elec1_pt_cut and el2_tlv.Pt() > elec2_pt_cut:
self.out.eecutflow.Fill(2.,self.EventWeight)
eecutflow_list.append(self.EventWeight)
if event.Electron_cutBased[el1] >= 2 and event.Electron_cutBased[el2] >= 2:
self.out.eecutflow.Fill(3.,self.EventWeight)
eecutflow_list.append(self.EventWeight)
if maxZpt > v_pt_cut:
self.out.eecutflow.Fill(4.,self.EventWeight)
eecutflow_list.append(self.EventWeight)
if trigger_SingleEle == None:
if not trigger_SingleIsoEle and not trigger_SinglePhoton:
print "ZtoEE trigger inconsistency"
return False
else:
if not trigger_SingleEle and not trigger_SingleIsoEle and not trigger_SinglePhoton:
print "ZtoEE trigger inconsistency"
return False
#if not self.isMC and ("SinglePhoton" in self.sample[0] and (trigger_SingleEle or trigger_SingleIsoEle)):
# print "ZtoEE double counting"
# return False
self.out.eecutflow.Fill(5.,self.EventWeight)
eecutflow_list.append(self.EventWeight)
if self.isMC:
eltrig_tlv = el1_tlv
#for i in range(event.nTrigObj):
# if event.TrigObj_id[i] ==11:
# trigobj_v = ROOT.TVector3()
# trigobj_v.SetPtEtaPhi(event.TrigObj_pt[i],event.TrigObj_eta[i],event.TrigObj_phi[i])
# print "electron TrigObj_filterBits:",event.TrigObj_filterBits[i]
# if event.TrigObj_filterBits[i]==14336:
# #if event.TrigObj_filterBits[i]==1110000000000000:
# print "found matching electron"
# deltaR1 = trigobj_v.DeltaR(el1_v)
# deltaR2 = trigobj_v.DeltaR(el2_v)
# if deltaR2 < deltaR1 and deltaR2 < 0.2:
# eltrig_tlv = el2_tlv
# break
self.TriggerWeight = self.elSFs.getTriggerSF(eltrig_tlv.Pt(),eltrig_tlv.Eta())
self.TriggerWeightUp = self.elSFs.getTriggerSF(eltrig_tlv.Pt(),eltrig_tlv.Eta()) + self.elSFs.getTriggerSFerror(eltrig_tlv.Pt(),eltrig_tlv.Eta())
self.TriggerWeightDown = self.elSFs.getTriggerSF(eltrig_tlv.Pt(),eltrig_tlv.Eta()) - self.elSFs.getTriggerSFerror(eltrig_tlv.Pt(),eltrig_tlv.Eta())
self.LeptonWeight = self.elSFs.getIdIsoSF(el1_tlv.Pt(), el1_tlv.Eta())*self.elSFs.getIdIsoSF(el2_tlv.Pt(),el2_tlv.Eta())
IdIsoSF1 = self.elSFs.getIdIsoSF(el1_tlv.Pt(), el1_tlv.Eta())
IdIsoSF2 = self.elSFs.getIdIsoSF(el2_tlv.Pt(),el2_tlv.Eta())
IdIsoSF1error = self.elSFs.getIdIsoSFerror(el1_tlv.Pt(), el1_tlv.Eta())
IdIsoSF2error = self.elSFs.getIdIsoSFerror(el2_tlv.Pt(),el2_tlv.Eta())
self.LeptonWeight = IdIsoSF1*IdIsoSF2
LeptonWeightsigma = np.sqrt((IdIsoSF1error*IdIsoSF2)**2+(IdIsoSF2error*IdIsoSF1)**2)
self.LeptonWeightUp = self.LeptonWeight + LeptonWeightsigma
self.LeptonWeightDown = self.LeptonWeight - LeptonWeightsigma
if 'DYJetsToLL' in self.sample[0] or 'ZJetsToNuNu' in self.sample[0] or 'WJetsToLNu' in self.sample[0]:
GenVpt = getGenVpt(event)
self.QCDNLO_Corr = self.DYCorr.getWeightQCDNLO(GenVpt)
self.QCDNNLO_Corr = self.DYCorr.getWeightQCDNNLO(GenVpt)
self.EWKNLO_Corr = self.DYCorr.getWeightEWKNLO(GenVpt)
self.EventWeight *= self.QCDNLO_Corr * self.QCDNNLO_Corr * self.EWKNLO_Corr
self.EventWeight *= self.TriggerWeight
self.EventWeight *= self.LeptonWeight
V = el1_tlv + el2_tlv
self.Ele1_pt = el1_tlv.Pt()
self.Ele1_eta = el1_tlv.Eta()
self.Ele1_phi = el1_tlv.Phi()
self.Ele1_mass = el1_tlv.M()
self.Ele2_pt = el2_tlv.Pt()
self.Ele2_eta = el2_tlv.Eta()
self.Ele2_phi = el2_tlv.Phi()
self.Ele2_mass = el2_tlv.M()
self.isZtoEE = True
########## ZtoMM #############
self.out.mmcutflow.Fill(0.,self.EventWeight)
mmcutflow_list.append(self.EventWeight)
maxZpt = -1.
Z_pt = -1.
Z_m = -1.
goodmuonpair = False
for i in idx_muons:
for j in idx_muons:
if i==j or event.Muon_charge[i] == event.Muon_charge[j]:
continue
mui_tlv = ROOT.TLorentzVector()
mui_tlv.SetPtEtaPhiM(event.Muon_pt[i],event.Muon_eta[i],event.Muon_phi[i],event.Muon_mass[i])
mui_v = ROOT.TVector3()
mui_v.SetPtEtaPhi(event.Muon_pt[i],event.Muon_eta[i],event.Muon_phi[i])
muj_tlv = ROOT.TLorentzVector()
muj_tlv.SetPtEtaPhiM(event.Muon_pt[j],event.Muon_eta[j],event.Muon_phi[j],event.Muon_mass[j])
muj_v = ROOT.TVector3()
muj_v.SetPtEtaPhi(event.Muon_pt[j],event.Muon_eta[j],event.Muon_phi[j])
dimu = mui_tlv + muj_tlv
Z_pt = dimu.Pt()
Z_m = dimu.M()
if Z_m > 70. and Z_m < 110. and Z_pt > maxZpt:
maxZpt = Z_pt
if mui_tlv.Pt() > muj_tlv.Pt():
mu1 = i
mu2 = j
mu1_tlv = mui_tlv
mu2_tlv = muj_tlv
mu1_v = mui_v
mu2_v = muj_v
else:
mu1 = j
mu2 = i
mu1_tlv = muj_tlv
mu2_tlv = mui_tlv
mu1_v = muj_v
mu2_v = mui_v
goodmuonpair = True
if goodmuonpair:
self.out.mmcutflow.Fill(1.,self.EventWeight)
mmcutflow_list.append(self.EventWeight)
mu1_highPtId = struct.unpack('B',event.Muon_highPtId[mu1])[0]
mu2_highPtId = struct.unpack('B',event.Muon_highPtId[mu2])[0]
if mu1_tlv.Pt() > muon1_pt_cut and mu2_tlv.Pt() > muon2_pt_cut:
self.out.mmcutflow.Fill(2.,self.EventWeight)
mmcutflow_list.append(self.EventWeight)
if (mu1_highPtId >= 2 and mu2_highPtId >= 1) or (mu1_highPtId >= 1 and mu2_highPtId >= 2):
self.out.mmcutflow.Fill(3.,self.EventWeight)
mmcutflow_list.append(self.EventWeight)
if maxZpt > v_pt_cut:
self.out.mmcutflow.Fill(4.,self.EventWeight)
mmcutflow_list.append(self.EventWeight)
if not trigger_SingleMu:
print "ZtoMM trigger inconsistency"
return False
self.out.mmcutflow.Fill(5.,self.EventWeight)
mmcutflow_list.append(self.EventWeight)
if self.isMC:
if mu1_highPtId >=2:
mutrig_tlv = mu1_tlv
else:
mutrig_tlv = mu2_tlv
#for i in range(event.nTrigObj):
# if event.TrigObj_id[i] ==13:
# trigobj_v = ROOT.TVector3()
# trigobj_v.SetPtEtaPhi(event.TrigObj_pt[i],event.TrigObj_eta[i],event.TrigObj_phi[i])
# deltaR1 = trigobj_v.DeltaR(mu1_v)
# deltaR2 = trigobj_v.DeltaR(mu2_v)
# print "muon TrigObj_filterBits:",event.TrigObj_filterBits[i]
# if event.TrigObj_filterBits[i]==2048:
# #if event.TrigObj_filterBits[i]==10000000000:
# print "found matching muon"
# if deltaR2 < deltaR1 and deltaR2 < 0.2:
# mutrig_tlv = mu2_tlv
# break
self.TriggerWeight = self.muSFs.getTriggerSF(mutrig_tlv.Pt(),mutrig_tlv.Eta())
self.TriggerWeightUp = self.muSFs.getTriggerSF(mutrig_tlv.Pt(),mutrig_tlv.Eta()) + self.muSFs.getTriggerSFerror(mutrig_tlv.Pt(),mutrig_tlv.Eta())
self.TriggerWeightDown = self.muSFs.getTriggerSF(mutrig_tlv.Pt(),mutrig_tlv.Eta()) - self.muSFs.getTriggerSFerror(mutrig_tlv.Pt(),mutrig_tlv.Eta())
IdSF1 = self.muSFs.getIdSF(mu1_tlv.Pt(),mu1_tlv.Eta(),mu1_highPtId)
IdSF2 = self.muSFs.getIdSF(mu2_tlv.Pt(),mu2_tlv.Eta(),mu2_highPtId)
IsoSF1 = self.muSFs.getIsoSF(mu1_tlv.Pt(),mu1_tlv.Eta(),mu1_highPtId)
IsoSF2 = self.muSFs.getIsoSF(mu2_tlv.Pt(),mu2_tlv.Eta(),mu2_highPtId)
IdSF1error = self.muSFs.getIdSFerror(mu1_tlv.Pt(),mu1_tlv.Eta(),mu1_highPtId)
IdSF2error = self.muSFs.getIdSFerror(mu2_tlv.Pt(),mu2_tlv.Eta(),mu2_highPtId)
IsoSF1error = self.muSFs.getIsoSFerror(mu1_tlv.Pt(),mu1_tlv.Eta(),mu1_highPtId)
IsoSF2error = self.muSFs.getIsoSFerror(mu2_tlv.Pt(),mu2_tlv.Eta(),mu2_highPtId)
self.LeptonWeight = IdSF1*IdSF2*IsoSF1*IsoSF2
LeptonWeightsigma = np.sqrt((IdSF1error*IdSF2*IsoSF1*IsoSF2)**2+(IdSF2error*IdSF1*IsoSF1*IsoSF2)**2+(IsoSF1error*IdSF1*IdSF2*IsoSF2)**2+(IsoSF2error*IdSF1*IdSF2*IsoSF1)**2)
self.LeptonWeightUp = self.LeptonWeight + LeptonWeightsigma
self.LeptonWeightDown = self.LeptonWeight - LeptonWeightsigma
if 'DYJetsToLL' in self.sample[0] or 'ZJetsToNuNu' in self.sample[0] or 'WJetsToLNu' in self.sample[0]:
GenVpt = getGenVpt(event)
self.QCDNLO_Corr = self.DYCorr.getWeightQCDNLO(GenVpt)
self.QCDNNLO_Corr = self.DYCorr.getWeightQCDNNLO(GenVpt)
self.EWKNLO_Corr = self.DYCorr.getWeightEWKNLO(GenVpt)
self.
| 0 |
1721bba2cae1e330bffeb9df05341df9522ff885
|
Python
|
EventWeight *= self.QCDNLO_Corr * self.QCDNNLO_Corr * self.EWKNLO_Corr
self.EventWeight *= self.TriggerWeight
self.EventWeight *= self.LeptonWeight
if mu1_tlv.DeltaR(mu2_tlv) < 0.3:
try:
self.Mu1_relIso = ((event.Muon_tkRelIso[mu1]*mu1_tlv.Pt()) - mu2_tlv.Pt())/mu1_tlv.Pt()
self.Mu2_relIso = ((event.Muon_tkRelIso[mu2]*mu2_tlv.Pt()) - mu1_tlv.Pt())/mu2_tlv.Pt()
except:
self.Mu1_relIso = -1.
self.Mu2_relIso = -1.
else:
try:
self.Mu1_relIso = event.Muon_tkRelIso[mu1]
self.Mu2_relIso = event.Muon_tkRelIso[mu2]
except:
self.Mu1_relIso = -1.
self.Mu2_relIso = -1.
V = mu1_tlv + mu2_tlv
self.Mu1_pt = mu1_tlv.Pt()
self.Mu1_eta = mu1_tlv.Eta()
self.Mu1_phi = mu1_tlv.Phi()
self.Mu1_mass = mu1_tlv.M()
self.Mu1_pfIsoId = struct.unpack('B',event.Muon_pfIsoId[mu1])[0]
self.Mu1_highPtId = struct.unpack('B',event.Muon_highPtId[mu1])[0]
self.Mu2_pt = mu2_tlv.Pt()
self.Mu2_eta = mu2_tlv.Eta()
self.Mu2_phi = mu2_tlv.Phi()
self.Mu2_mass = mu2_tlv.M()
self.Mu2_pfIsoId = struct.unpack('B',event.Muon_pfIsoId[mu2])[0]
self.Mu2_highPtId = struct.unpack('B',event.Muon_highPtId[mu2])[0]
self.isZtoMM = True
########### TtoEM #########
if not self.isZtoMM and not self.isZtoEE and self.nElectrons == 1 and self.nMuons == 1:
if event.Electron_charge[idx_loose_electrons[0]] != event.Muon_charge[idx_loose_muons[0]]:
el_tlv = loose_electrons_tlv_list[0]
mu_tlv = loose_muons_tlv_list[0]
if mu_tlv.Pt() > 30. and el_tlv.Pt() > 30.:
V = mu_tlv + el_tlv
if V.Pt() > 50.:
if trigger_SingleEle == None:
if not trigger_SingleIsoEle:
print "TtoEM trigger inconsistency"
return False
else:
if not trigger_SingleEle and not trigger_SingleIsoEle:
print "TtoEM trigger inconsistency"
return False
if self.isMC:
self.TriggerWeight = self.elSFs.getTriggerSF(el_tlv.Pt(),el_tlv.Eta())
self.LeptonWeight = self.elSFs.getIdIsoSF(el_tlv.Pt(), el_tlv.Eta())
if 'DYJetsToLL' in self.sample[0] or 'ZJetsToNuNu' in self.sample[0] or 'WJetsToLNu' in self.sample[0]:
GenVpt = getGenVpt(event)
self.QCDNLO_Corr = self.DYCorr.getWeightQCDNLO(GenVpt)
self.QCDNNLO_Corr = self.DYCorr.getWeightQCDNNLO(GenVpt)
self.EWKNLO_Corr = self.DYCorr.getWeightEWKNLO(GenVpt)
self.EventWeight *= self.QCDNLO_Corr * self.QCDNNLO_Corr * self.EWKNLO_Corr
self.EventWeight *= self.TriggerWeight
self.EventWeight *= self.LeptonWeight
self.Mu1_pt = mu_tlv.Pt()
self.Mu1_eta = mu_tlv.Eta()
self.Mu1_phi = mu_tlv.Phi()
self.Mu1_mass = mu_tlv.M()
self.Ele1_pt = el_tlv.Pt()
self.Ele1_eta = el_tlv.Eta()
self.Ele1_phi = el_tlv.Phi()
self.Ele1_mass = el_tlv.M()
self.isTtoEM = True
######### ZtoNN ##########
self.out.nncutflow.Fill(0.,self.EventWeight)
nncutflow_list.append(self.EventWeight)
if not self.isZtoMM and not self.isZtoEE and not self.isTtoEM:
if event.PuppiMET_pt > met_pt_cut :
self.out.nncutflow.Fill(1.,self.EventWeight)
nncutflow_list.append(self.EventWeight)
if self.nElectrons == 0 and self.nMuons == 0 and self.nTaus == 0:
self.out.nncutflow.Fill(2.,self.EventWeight)
nncutflow_list.append(self.EventWeight)
V.SetPtEtaPhiE(event.PuppiMET_pt,0.,event.PuppiMET_phi,event.PuppiMET_pt)
V_chs.SetPtEtaPhiE(event.MET_pt,0.,event.MET_phi,event.MET_pt)
if trigger_MET == None:
if not self.isMC and not trigger_METMHT and not trigger_METMHTNoMu:
print "ZtoNN Trigger inconsistency"
return False
else:
if not self.isMC and not trigger_MET and not trigger_METMHT and not trigger_METMHTNoMu:
print "ZtoNN Trigger inconsistency"
return False
self.out.nncutflow.Fill(3.,self.EventWeight)
nncutflow_list.append(self.EventWeight)
if self.filter(event) == False:
print "Bad event"
return False
self.out.nncutflow.Fill(4.,self.EventWeight)
nncutflow_list.append(self.EventWeight)
if self.isMC:
if 'DYJetsToLL' in self.sample[0] or 'ZJetsToNuNu' in self.sample[0] or 'WJetsToLNu' in self.sample[0]:
GenVpt = getGenVpt(event)
self.QCDNLO_Corr = self.DYCorr.getWeightQCDNLO(GenVpt)
self.QCDNNLO_Corr = self.DYCorr.getWeightQCDNNLO(GenVpt)
self.EWKNLO_Corr = self.DYCorr.getWeightEWKNLO(GenVpt)
self.EventWeight *= self.QCDNLO_Corr * self.QCDNNLO_Corr * self.EWKNLO_Corr
self.TriggerWeight = 1.
self.isZtoNN = True
#stop if no semileptonic decays
if self.isZtoEE==False and self.isZtoMM==False and self.isZtoNN==False and self.isTtoEM==False:
return False
########## setting the Higgs and V index #######
fatjet_idx_H = 0
valid_Higgs = False
if self.isZtoMM:
fatjet_maxpt = 0.
for i,fatjet_tlv in enumerate(fatjet_tlv_list):
if fatjet_tlv.DeltaR(mu1_tlv)>0.8 and fatjet_tlv.DeltaR(mu2_tlv)>0.8 and fatjet_tlv.Pt()>fatjet_maxpt:
fatjet_maxpt=fatjet_tlv.Pt()
fatjet_idx_H = i
valid_Higgs = True
if not valid_Higgs:
return False
elif self.isZtoEE:
fatjet_maxpt = 0.
for i,fatjet_tlv in enumerate(fatjet_tlv_list):
if fatjet_tlv.DeltaR(el1_tlv)>0.8 and fatjet_tlv.DeltaR(el2_tlv)>0.8 and fatjet_tlv.Pt()>fatjet_maxpt:
fatjet_maxpt=fatjet_tlv.Pt()
fatjet_idx_H = i
valid_Higgs = True
if not valid_Higgs:
return False
elif self.isZtoNN:
fatjet_maxpt = 0.
for i,fatjet_tlv in enumerate(fatjet_tlv_list):
if fatjet_tlv.Pt()>fatjet_maxpt:
fatjet_maxpt=fatjet_tlv.Pt()
fatjet_idx_H = i
############ AK4 Jet ###########
for ijet in range(event.nJet):
jet_pt = event.Jet_pt[ijet]
jet_eta = event.Jet_eta[ijet]
jet_phi = event.Jet_phi[ijet]
jet_mass = event.Jet_mass[ijet]
jet_tlv = ROOT.TLorentzVector()
jet_tlv.SetPtEtaPhiM(jet_pt,jet_eta,jet_phi,jet_mass)
self.HT += jet_pt
if jet_eta > -2.5 and jet_eta < -1.479 and jet_phi > -1.55 and jet_phi < -0.9:
if self.HT_HEM15_16 == -1.:
self.HT_HEM15_16 = 0.
self.HT_HEM15_16 += jet_pt
if jet_pt > ak4_pt_cut and abs(jet_eta) < ak4_eta_cut:
cleanJet = True
for loose_electrons_tlv in loose_electrons_tlv_list:
if loose_electrons_tlv.DeltaR(jet_tlv) < 0.4:
cleanJet = False
for loose_muons_tlv in loose_muons_tlv_list:
if loose_muons_tlv.DeltaR(jet_tlv) < 0.4:
cleanJet = False
if cleanJet and getJetID(self.year,event,ijet):
if len(fatjet_tlv_list) > 0 and fatjet_tlv_list[fatjet_idx_H].DeltaR(jet_tlv) > 1.2:
jet_tlv_list.append(jet_tlv)
idx_jet.append(ijet)
############ AK4 Jet check for VBF ###########
if self.isZtoMM:
lep1_tlv = mu1_tlv
lep2_tlv = mu2_tlv
if self.isZtoEE:
lep1_tlv = el1_tlv
lep2_tlv = el2_tlv
for ijet in range(event.nJet):
jet_pt = event.Jet_pt[ijet]
jet_eta = event.Jet_eta[ijet]
jet_phi = event.Jet_phi[ijet]
jet_mass = event.Jet_mass[ijet]
jet_tlv = ROOT.TLorentzVector()
jet_tlv.SetPtEtaPhiM(jet_pt,jet_eta,jet_phi,jet_mass)
if abs(jet_eta) < 5.0:
if len(fatjet_tlv_list) > 0:
if fatjet_tlv_list[fatjet_idx_H].DeltaR(jet_tlv) > 1.2:
if getJetID(self.year,event,ijet) and event.Jet_puId[ijet]==7:
if self.isZtoMM or self.isZtoEE:
if jet_tlv.DeltaR(lep1_tlv)>0.4 and jet_tlv.DeltaR(lep2_tlv)>0.4:
jet_tlv_list_vbf.append(jet_tlv)
idx_jet_vbf.append(ijet)
elif self.isZtoNN:
jet_tlv_list_vbf.append(jet_tlv)
idx_jet_vbf.append(ijet)
idx1_vbf = -1
idx2_vbf = -1
maxVBFmass = -1.
for ijet1, jet1_tlv in enumerate(jet_tlv_list_vbf):
for ijet2, jet2_tlv in enumerate(jet_tlv_list_vbf):
if ijet1 == ijet2: continue
eta1 = jet_tlv_list_vbf[ijet1].Eta()
eta2 = jet_tlv_list_vbf[ijet2].Eta()
V_VBF = jet_tlv_list_vbf[ijet1]+jet_tlv_list_vbf[ijet2]
VBFmass = V_VBF.M()
if abs(eta1-eta2)>4.0 and eta1*eta2<0. and VBFmass>maxVBFmass:
idx1_vbf = ijet1
idx2_vbf = ijet2
maxVBFmass = VBFmass
self.dijet_VBF_mass = maxVBFmass
if maxVBFmass > 500.:
self.isVBF = True
self.Jet1_VBF_pt = jet_tlv_list_vbf[idx1_vbf].Pt()
self.Jet1_VBF_eta = jet_tlv_list_vbf[idx1_vbf].Eta()
self.Jet1_VBF_phi = jet_tlv_list_vbf[idx1_vbf].Phi()
self.Jet1_VBF_mass = jet_tlv_list_vbf[idx1_vbf].M()
self.Jet2_VBF_pt = jet_tlv_list_vbf[idx2_vbf].Pt()
self.Jet2_VBF_eta = jet_tlv_list_vbf[idx2_vbf].Eta()
self.Jet2_VBF_phi = jet_tlv_list_vbf[idx2_vbf].Phi()
self.Jet2_VBF_mass = jet_tlv_list_vbf[idx2_vbf].M()
self.deltaR_VBF = jet_tlv_list_vbf[idx1_vbf].DeltaR(jet_tlv_list_vbf[idx2_vbf])
self.deltaR_HVBFjet1 = (fatjet_tlv_list[fatjet_idx_H].DeltaR(jet_tlv_list_vbf[idx1_vbf]))
self.deltaR_HVBFjet2 = (fatjet_tlv_list[fatjet_idx_H].DeltaR(jet_tlv_list_vbf[idx2_vbf]))
########## Higgs ########
H = fatjet_tlv_list[fatjet_idx_H]
if self.runJEC:
self.H_mass_nom = event.FatJet_msoftdrop_nom[fatjet_idx_H]
self.H_mass_jmsUp = event.FatJet_msoftdrop_jmsUp[fatjet_idx_H]
self.H_mass_jmsDown = event.FatJet_msoftdrop_jmsDown[fatjet_idx_H]
self.H_mass_jmrUp = event.FatJet_msoftdrop_jmrUp[fatjet_idx_H]
self.H_mass_jmrDown = event.FatJet_msoftdrop_jmrDown[fatjet_idx_H]
self.H_pt_nom = event.FatJet_pt_nom[fatjet_idx_H]
self.H_pt_jesUp = event.FatJet_pt_jesTotalUp[fatjet_idx_H]
self.H_pt_jesDown = event.FatJet_pt_jesTotalDown[fatjet_idx_H]
self.H_pt_jerUp = event.FatJet_pt_jerUp[fatjet_idx_H]
self.H_pt_jerDown = event.FatJet_pt_jerDown[fatjet_idx_H]
self.PuppiMET_pt_nom = event.PuppiMET_pt_nom
self.PuppiMET_pt_jesUp = event.PuppiMET_pt_jesTotalUp
self.PuppiMET_pt_jesDown = event.PuppiMET_pt_jesTotalDown
self.PuppiMET_pt_jerUp = event.PuppiMET_pt_jerUp
self.PuppiMET_pt_jerDown = event.PuppiMET_pt_jerDown
H_Eta = H.Eta()
H_Phi = H.Phi()
H_M = H.M()
H_nom = ROOT.TLorentzVector()
H_jesUp = ROOT.TLorentzVector()
H_jesDown = ROOT.TLorentzVector()
H_jerUp = ROOT.TLorentzVector()
H_jerDown = ROOT.TLorentzVector()
H_nom.SetPtEtaPhiM(self.H_pt_nom,H_Eta,H_Phi,H_M)
H_jesUp.SetPtEtaPhiM(self.H_pt_jesUp,H_Eta,H_Phi,H_M)
H_jesDown.SetPtEtaPhiM(self.H_pt_jesDown,H_Eta,H_Phi,H_M)
H_jerUp.SetPtEtaPhiM(self.H_pt_jerUp,H_Eta,H_Phi,H_M)
H_jerDown.SetPtEtaPhiM(self.H_pt_jerDown,H_Eta,H_Phi,H_M)
MET_nom = ROOT.TLorentzVector()
MET_jesUp = ROOT.TLorentzVector()
MET_jesDown = ROOT.TLorentzVector()
MET_jerUp = ROOT.TLorentzVector()
MET_jerDown = ROOT.TLorentzVector()
MET_nom.SetPtEtaPhiM(self.PuppiMET_pt_nom,0.,event.PuppiMET_phi,self.PuppiMET_pt_nom)
MET_jesUp.SetPtEtaPhiM(self.PuppiMET_pt_jesUp,0.,event.PuppiMET_phi,self.PuppiMET_pt_jesUp)
MET_jesDown.SetPtEtaPhiM(self.PuppiMET_pt_jesDown,0.,event.PuppiMET_phi,self.PuppiMET_pt_jesDown)
MET_jerUp.SetPtEtaPhiM(self.PuppiMET_pt_jerUp,0.,event.PuppiMET_phi,self.PuppiMET_pt_jerUp)
MET_jerDown.SetPtEtaPhiM(self.PuppiMET_pt_jerDown,0.,event.PuppiMET_phi,self.PuppiMET_pt_jerDown)
for ifatjet in idx_fatjet:
if event.FatJet_btagHbb[ifatjet] > 0.3:
self.isBoosted4B = True
self.nJetsNoFatJet = len(jet_tlv_list)
if self.isZtoNN:
self.DPhi = abs(MET_tlv.DeltaPhi(H))
else:
self.DPhi = abs(V.DeltaPhi(H))
self.VH_deltaR = H.DeltaR(V)
jet_list_temp = []
for ijet in range(event.nJet):
jet_pt = event.Jet_pt[ijet]
jet_eta = event.Jet_eta[ijet]
jet_phi = event.Jet_phi[ijet]
jet_mass = event.Jet_mass[ijet]
jet_tlv = ROOT.TLorentzVector()
jet_tlv.SetPtEtaPhiM(jet_pt,jet_eta,jet_phi,jet_mass)
if jet_tlv.DeltaR(H) < 0.8:
jet_list_temp.append(ijet)
if len(jet_list_temp) == 1:
idx = jet_list_temp[0]
self.H_chf = event.Jet_chHEF[idx]
self.H_nhf = event.Jet_neHEF[idx]
elif len(jet_list_temp) == 2:
idx1 = jet_list_temp[0]
idx2 = jet_list_temp[1]
pt1 = event.Jet_pt[idx1]
pt2 = event.Jet_pt[idx2]
chf1 = event.Jet_chHEF[idx1]
chf2 = event.Jet_chHEF[idx2]
nhf1 = event.Jet_neHEF[idx1]
nhf2 = event.Jet_neHEF[idx2]
self.H_chf = (chf1*pt1+chf2*pt2)/(pt1+pt2)
self.H_nhf = (nhf1*pt1+nhf2*pt2)/(pt1+pt2)
elif len(jet_list_temp) == 3:
idx1 = jet_list_temp[0]
idx2 = jet_list_temp[1]
idx3 = jet_list_temp[2]
pt1 = event.Jet_pt[idx1]
pt2 = event.Jet_pt[idx2]
pt3 = event.Jet_pt[idx3]
chf1 = event.Jet_chHEF[idx1]
chf2 = event.Jet_chHEF[idx2]
chf3 = event.Jet_chHEF[idx3]
nhf1 = event.Jet_neHEF[idx1]
nhf2 = event.Jet_neHEF[idx2]
nhf3 = event.Jet_neHEF[idx3]
self.H_chf = (chf1*pt1+chf2*pt2+chf3*pt3)/(pt1+pt2+pt3)
self.H_nhf = (nhf1*pt1+nhf2*pt2+nhf3*pt3)/(pt1+pt2+pt3)
for jet_tlv in jet_tlv_list:
if abs(MET_tlv.DeltaPhi(jet_tlv)) < self.MinJetMetDPhi:
self.MinJetMetDPhi = abs(MET_tlv.DeltaPhi(jet_tlv))
for ijet in idx_jet:
if event.Jet_btagDeepB[ijet] > self.MaxJetNoFatJetBTag:
self.MaxJetNoFatJetBTag = event.Jet_btagDeepB[ijet]
if not self.isData:
for igenjet in range(event.nGenJetAK8):
genjetAK8_tlv = ROOT.TLorentzVector()
genjetAK8_tlv.SetPtEtaPhiM(event.GenJetAK8_pt[igenjet], event.GenJetAK8_eta[igenjet], event.GenJetAK8_phi[igenjet], event.GenJetAK8_mass[igenjet])
if H.DeltaR(genjetAK8_tlv) < 0.8:
self.H_hadronflavour = struct.unpack('B',event.GenJetAK8_hadronFlavour[igenjet])[0]
self.H_partonflavour = event.GenJetAK8_partonFlavour[igenjet]
self.btagToolAK4_deep.fillEfficiencies(event,idx_jet,fatjet_idx_H)
self.BTagAK4Weight_deep = self.btagToolAK4_deep.getWeight(event,idx_jet,fatjet_idx_H)
self.BTagAK4Weight_deep_up = self.btagToolAK4_deep_up.getWeight(event,idx_jet,fatjet_idx_H)
self.BTagAK4Weight_deep_down = self.btagToolAK4_deep_down.getWeight(event,idx_jet,fatjet_idx_H)
#search for AK4 jets which match with the subjets from the H
ak4_subjets = []
subjet1 = TLorentzVector()
subjet2 = TLorentzVector()
subjet1_idx = event.FatJet_subJetIdx1[fatjet_idx_H]
subjet2_idx = event.FatJet_subJetIdx2[fatjet_idx_H]
if subjet1_idx>=0. and subjet2_idx>=0.:
subjet1.SetPtEtaPhiM(event.SubJet_pt[subjet1_idx],event.SubJet_eta[subjet1_idx],event.SubJet_phi[subjet1_idx],event.SubJet_mass[subjet1_idx])
subjet2.SetPtEtaPhiM(event.SubJet_pt[subjet2_idx],event.SubJet_eta[subjet2_idx],event.SubJet_phi[subjet2_idx],event.SubJet_mass[subjet2_idx])
for jetid in range(event.nJet):
ak4jet = TLorentzVector()
ak4jet.SetPtEtaPhiM(event.Jet_pt[jetid],event.Jet_eta[jetid],event.Jet_phi[jetid],event.Jet_mass[jetid])
if ak4jet.DeltaR(subjet1)<0.4:
ak4_subjets.append(jetid)
if ak4jet.DeltaR(subjet2)<0.4:
ak4_subjets.append(jetid)
self.btagToolAK8_deep.fillEfficiencies(event,ak4_subjets,fatjet_idx_H)
self.BTagAK8Weight_deep = self.btagToolAK8_deep.getWeight(event,ak4_subjets,fatjet_idx_H)
self.BTagAK8Weight_deep_up = self.btagToolAK8_deep_up.getWeight(event,ak4_subjets,fatjet_idx_H)
self.BTagAK8Weight_deep_down = self.btagToolAK8_deep_down.getWeight(event,ak4_subjets,fatjet_idx_H)
########### X and variables ############
X = V + H
if self.isZtoNN:
X_chs = V_chs + H
self.X_mass_chs = X_chs.M()
if self.runJEC:
X_nom = V + H_nom
X_jesUp = V + H_jesUp
X_jesDown = V + H_jesDown
X_jerUp = V + H_jerUp
X_jerDown = V + H_jerDown
X_MET_nom = MET_nom + H_nom
X_MET_jesUp = MET_jesUp + H_jesUp
X_MET_jesDown = MET_jesDown + H_jesDown
X_MET_jerUp = MET_jerUp + H_jerUp
X_MET_jerDown = MET_jerDown + H_jerDown
self.X_mass_nom = X_nom.M()
self.X_mass_jesUp = X_jesUp.M()
self.X_mass_jesDown = X_jesDown.M()
self.X_mass_jerUp = X_jerUp.M()
self.X_mass_jerDown = X_jerDown.M()
self.X_mass_MET_nom = X_MET_nom.M()
self.X_mass_MET_jesUp = X_MET_jesUp.M()
self.X_mass_MET_jesDown = X_MET_jesDown.M()
self.X_mass_MET_jerUp = X_MET_jerUp.M()
self.X_mass_MET_jerDown = X_MET_jerDown.M()
self.V_pt = V.Pt()
self.V_eta = V.Eta()
self.V_phi = V.Phi()
self.V_mass = V.M()
if self.isZtoNN:
self.V_mass = 0.
self.H_pt = H.Pt()
self.H_eta = H.Eta()
self.H_phi = H.Phi()
self.H_M = H.M()
self.H_mass = event.FatJet_msoftdrop[fatjet_idx_H]
self.X_pt = X.Pt()
self.X_eta = X.Eta()
self.X_phi = X.Phi()
self.X_mass = X.M()
self.H_dbt = event.FatJet_btagHbb[fatjet_idx_H]
self.BtagDeepB = event.FatJet_btagDeepB[fatjet_idx_H]
self.DeepTagMD_H4qvsQCD = event.FatJet_deepTagMD_H4qvsQCD[fatjet_idx_H]
self.DeepTagMD_HbbvsQCD = event.FatJet_deepTagMD_HbbvsQCD[fatjet_idx_H]
self.DeepTagMD_ZHbbvsQCD = event.FatJet_deepTagMD_ZHbbvsQCD[fatjet_idx_H]
self.DeepTagMD_ZbbvsQCD = event.FatJet_deepTagMD_ZbbvsQCD[fatjet_idx_H]
self.DeepTagMD_bbvsLight = event.FatJet_deepTagMD_bbvsLight[fatjet_idx_H]
self.DeepTagMD_WvsQCD = event.FatJet_deepTagMD_WvsQCD[fatjet_idx_H]
self.DeepTagMD_ZvsQCD = event.FatJet_deepTagMD_ZvsQCD[fatjet_idx_H]
self.H_tau21 = fatjet_tau21_list[fatjet_idx_H]
self.H_tau41 = fatjet_tau41_list[fatjet_idx_H]
self.H_tau42 = fatjet_tau42_list[fatjet_idx_H]
self.H_tau31 = fatjet_tau31_list[fatjet_idx_H]
self.H_tau32 = fatjet_tau32_list[fatjet_idx_H]
self.VHDEta = abs(V.Eta() - H.Eta())
if event.FatJet_subJetIdx1[fatjet_idx_H] >= 0:
Hcsv1 = event.SubJet_btagCSVV2[event.FatJet_subJetIdx1[fatjet_idx_H]]
Hdeepcsv1 = event.SubJet_btagDeepB[event.FatJet_subJetIdx1[fatjet_idx_H]]
else:
Hcsv1 = -1.
Hdeepcsv1 = -1.
if event.FatJet_subJetIdx2[fatjet_idx_H] >= 0:
Hcsv2 = event.SubJet_btagCSVV2[event.FatJet_subJetIdx2[fatjet_idx_H]]
Hdeepcsv2 = event.SubJet_btagDeepB[event.FatJet_subJetIdx2[fatjet_idx_H]]
else:
Hcsv2 = -1.
Hdeepcsv2 = -1.
self.H_csv1 = max(Hcsv1,Hcsv2)
self.H_csv2 = min(Hcsv1,Hcsv2)
self.H_deepcsv1 = max(Hdeepcsv1,Hdeepcsv2)
self.H_deepcsv2 = min(Hdeepcsv1,Hdeepcsv2)
if self.year == 2016:
wp_loose = 0.2217
wp_medium = 0.6321
wp_tight = 0.8953
elif self.year == 2017:
wp_loose = 0.1522
wp_medium = 0.4941
wp_tight = 0.8001
elif self.year == 2018:
wp_loose = 0.1241
wp_medium = 0.4184
wp_tight = 0.7527
if self.H_deepcsv2 > wp_loose:
self.isHtobb = True
if self.H_deepcsv1 > wp_medium and self.H_deepcsv2 > wp_loose:
self.isHtobb_ml = True
if self.MaxJetNoFatJetBTag > wp_loose:
self.isMaxBTag_loose = True
if self.MaxJetNoFatJetBTag > wp_medium:
self.isMaxBTag_medium = True
if self.MaxJetNoFatJetBTag > wp_tight:
self.isMaxBTag_tight = True
if self.H_mass != 0.:
self.H_ddt = self.H_tau21 + 0.082 *np.log(self.H_mass*self.H_mass/self.H_pt)
else:
self.H_ddt = -1.
self.X_tmass = np.sqrt(2.*V.Pt()*fatjet_tlv_list[fatjet_idx_H].Pt()*(1.-np.cos(fatjet_tlv_list[fatjet_idx_H].DeltaPhi(V))))
if self.isZtoNN:
self.X_mass = self.X_tmass
else:
self.X_mass = X.M()
if self.X_mass > 750 and self.VH_deltaR > 2:
if self.MinJetMetDPhi>0.5 and self.DPhi>2:
for i,weight in enumerate(nncutflow_list):
self.out.nncutflow_inc.Fill(i,weight)
if self.VHDEta<1.3:
for i,weight in enumerate(eecutflow_list):
self.out.eecutflow_inc.Fill(i,weight)
for i,weight in enumerate(mmcutflow_list):
self.out.mmcutflow_inc.Fill(i,weight)
if self.isZtoEE or self.isZtoMM or self.isZtoNN or self.isTtoEM:
self.fillBranches(event)
return True
| 1 |
202670314ad28685aaa296dce4b5094daab3f47a
|
Python
|
#
# PySNMP MIB module Nortel-MsCarrier-MscPassport-AtmEbrMIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/Nortel-MsCarrier-MscPassport-AtmEbrMIB
# Produced by pysmi-0.3.4 at Mon Apr 29 20:19:41 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, ObjectIdentifier, OctetString = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueRangeConstraint, SingleValueConstraint, ValueSizeConstraint, ConstraintsUnion, ConstraintsIntersection = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueRangeConstraint", "SingleValueConstraint", "ValueSizeConstraint", "ConstraintsUnion", "ConstraintsIntersection")
mscAtmIfIndex, mscAtmIfVptIndex, mscAtmIfVcc, mscAtmIfVptVccIndex, mscAtmIfVpc, mscAtmIfVptVcc, mscAtmIfVccIndex, mscAtmIfVpcIndex = mibBuilder.importSymbols("Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex", "mscAtmIfVptIndex", "mscAtmIfVcc", "mscAtmIfVptVccIndex", "mscAtmIfVpc", "mscAtmIfVptVcc", "mscAtmIfVccIndex", "mscAtmIfVpcIndex")
mscAtmIfIisp, mscAtmIfVptIisp, mscAtmIfVptIispIndex, mscAtmIfIispIndex = mibBuilder.importSymbols("Nortel-MsCarrier-MscPassport-AtmIispMIB", "mscAtmIfIisp", "mscAtmIfVptIisp", "mscAtmIfVptIispIndex", "mscAtmIfIispIndex")
mscAtmIfVpcSrc, mscAtmIfVptVccSrcIndex, mscAtmIfVccSrcIndex, mscAtmIfVptVccSrc, mscAtmIfVpcSrcIndex, mscAtmIfVccSrc = mibBuilder.importSymbols("Nortel-MsCarrier-MscPassport-AtmNetworkingMIB", "mscAtmIfVpcSrc", "mscAtmIfVptVccSrcIndex", "mscAtmIfVccSrcIndex", "mscAtmIfVptVccSrc", "mscAtmIfVpcSrcIndex", "mscAtmIfVccSrc")
mscAtmIfVptPnniIndex, mscAtmIfPnniIndex, mscAtmIfPnni, mscAtmIfVptPnni = mibBuilder.importSymbols("Nortel-MsCarrier-MscPassport-AtmPnniMIB", "mscAtmIfVptPnniIndex", "mscAtmIfPnniIndex", "mscAtmIfPnni", "mscAtmIfVptPnni")
mscAtmIfVptUni, mscAtmIfUni, mscAtmIfUniIndex, mscAtmIfVptUniIndex = mibBuilder.importSymbols("Nortel-MsCarrier-MscPassport-AtmUniMIB", "mscAtmIfVptUni", "mscAtmIfUni", "mscAtmIfUniIndex", "mscAtmIfVptUniIndex")
Counter32, DisplayString, Gauge32, StorageType, RowStatus = mibBuilder.importSymbols("Nortel-MsCarrier-MscPassport-StandardTextualConventionsMIB", "Counter32", "DisplayString", "Gauge32", "StorageType", "RowStatus")
NonReplicated, = mibBuilder.importSymbols("Nortel-MsCarrier-MscPassport-TextualConventionsMIB", "NonReplicated")
mscPassportMIBs, = mibBuilder.importSymbols("Nortel-MsCarrier-MscPassport-UsefulDefinitionsMIB", "mscPassportMIBs")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
Integer32, ObjectIdentity, ModuleIdentity, Bits, Counter32, IpAddress, Gauge32, NotificationType, iso, MibScalar, MibTable, MibTableRow, MibTableColumn, MibIdentifier, Unsigned32, Counter64, TimeTicks = mibBuilder.importSymbols("SNMPv2-SMI", "Integer32", "ObjectIdentity", "ModuleIdentity", "Bits", "Counter32", "IpAddress", "Gauge32", "NotificationType", "iso", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "MibIdentifier", "Unsigned32", "Counter64", "TimeTicks")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
atmEbrMIB = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159))
mscAtmIfVpcSrcEbrOv = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 6, 2))
mscAtmIfVpcSrcEbrOvRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 6, 2, 1), )
if mibBuilder.loadTexts: mscAtmIfVpcSrcEbrOvRowStatusTable.setStatus('mandatory')
mscAtmIfVpcSrcEbrOvRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 6, 2, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVpcIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmNetworkingMIB", "mscAtmIfVpcSrcIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVpcSrcEbrOvIndex"))
if mibBuilder.loadTexts: mscAtmIfVpcSrcEbrOvRowStatusEntry.setStatus('mandatory')
mscAtmIfVpcSrcEbrOvRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 6, 2, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfVpcSrcEbrOvRowStatus.setStatus('mandatory')
mscAtmIfVpcSrcEbrOvComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 6, 2, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVpcSrcEbrOvComponentName.setStatus('mandatory')
mscAtmIfVpcSrcEbrOvStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 6, 2, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVpcSrcEbrOvStorageType.setStatus('mandatory')
mscAtmIfVpcSrcEbrOvIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 6, 2, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: mscAtmIfVpcSrcEbrOvIndex.setStatus('mandatory')
mscAtmIfVpcSrcEbrOvProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 6, 2, 20), )
if mibBuilder.loadTexts: mscAtmIfVpcSrcEbrOvProvTable.setStatus('mandatory')
mscAtmIfVpcSrcEbrOvProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 6, 2, 20, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVpcIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmNetworkingMIB", "mscAtmIfVpcSrcIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVpcSrcEbrOvIndex"))
if mibBuilder.loadTexts: mscAtmIfVpcSrcEbrOvProvEntry.setStatus('mandatory')
mscAtmIfVpcSrcEbrOvRecoverySubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 6, 2, 20, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1))).clone('yes')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfVpcSrcEbrOvRecoverySubscribed.setStatus('mandatory')
mscAtmIfVpcSrcEbrOvOptimizationSubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 6, 2, 20, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1))).clone('yes')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfVpcSrcEbrOvOptimizationSubscribed.setStatus('mandatory')
mscAtmIfVpcEbrInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11))
mscAtmIfVpcEbrInfoRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 1), )
if mibBuilder.loadTexts: mscAtmIfVpcEbrInfoRowStatusTable.setStatus('mandatory')
mscAtmIfVpcEbrInfoRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVpcIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVpcEbrInfoIndex"))
if mibBuilder.loadTexts: mscAtmIfVpcEbrInfoRowStatusEntry.setStatus('mandatory')
mscAtmIfVpcEbrInfoRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 1, 1, 1), RowStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVpcEbrInfoRowStatus.setStatus('mandatory')
mscAtmIfVpcEbrInfoComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVpcEbrInfoComponentName.setStatus('mandatory')
mscAtmIfVpcEbrInfoStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVpcEbrInfoStorageType.setStatus('mandatory')
mscAtmIfVpcEbrInfoIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: mscAtmIfVpcEbrInfoIndex.setStatus('mandatory')
mscAtmIfVpcEbrInfoOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 30), )
if mibBuilder.loadTexts: mscAtmIfVpcEbrInfoOperTable.setStatus('mandatory')
mscAtmIfVpcEbrInfoOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 30, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVpcIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVpcEbrInfoIndex"))
if mibBuilder.loadTexts: mscAtmIfVpcEbrInfoOperEntry.setStatus('mandatory')
mscAtmIfVpcEbrInfoRecoverySubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 30, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVpcEbrInfoRecoverySubscribed.setStatus('mandatory')
mscAtmIfVpcEbrInfoOptimizationSubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 30, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVpcEbrInfoOptimizationSubscribed.setStatus('mandatory')
mscAtmIfVpcEbrInfoConnectionRecovered = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 30, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVpcEbrInfoConnectionRecovered.setStatus('mandatory')
mscAtmIfVpcEbrInfoStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 40), )
if mibBuilder.loadTexts: mscAtmIfVpcEbrInfoStatsTable.setStatus('mandatory')
mscAtmIfVpcEbrInfoStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 40, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVpcIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVpcEbrInfoIndex"))
if mibBuilder.loadTexts: mscAtmIfVpcEbrInfoStatsEntry.setStatus('mandatory')
mscAtmIfVpcEbrInfoTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 40, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVpcEbrInfoTotalConnectionRecoveries.setStatus('mandatory')
mscAtmIfVpcEbrInfoTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 4, 11, 40, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVpcEbrInfoTotalPathOptimizations.setStatus('mandatory')
mscAtmIfVccSrcEbrOv = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 8, 2))
mscAtmIfVccSrcEbrOvRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 8, 2, 1), )
if mibBuilder.loadTexts: mscAtmIfVccSrcEbrOvRowStatusTable.setStatus('mandatory')
mscAtmIfVccSrcEbrOvRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 8, 2, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVccIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmNetworkingMIB", "mscAtmIfVccSrcIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVccSrcEbrOvIndex"))
if mibBuilder.loadTexts: mscAtmIfVccSrcEbrOvRowStatusEntry.setStatus('mandatory')
mscAtmIfVccSrcEbrOvRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 8, 2, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfVccSrcEbrOvRowStatus.setStatus('mandatory')
mscAtmIfVccSrcEbrOvComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 8, 2, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVccSrcEbrOvComponentName.setStatus('mandatory')
mscAtmIfVccSrcEbrOvStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 8, 2, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVccSrcEbrOvStorageType.setStatus('mandatory')
mscAtmIfVccSrcEbrOvIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 8, 2, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: mscAtmIfVccSrcEbrOvIndex.setStatus('mandatory')
mscAtmIfVccSrcEbrOvProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 8, 2, 20), )
if mibBuilder.loadTexts: mscAtmIfVccSrcEbrOvProvTable.setStatus('mandatory')
mscAtmIfVccSrcEbrOvProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 8, 2, 20, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVccIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmNetworkingMIB", "mscAtmIfVccSrcIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVccSrcEbrOvIndex"))
if mibBuilder.loadTexts: mscAtmIfVccSrcEbrOvProvEntry.setStatus('mandatory')
mscAtmIfVccSrcEbrOvRecoverySubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 8, 2, 20, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1))).clone('yes')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfVccSrcEbrOvRecoverySubscribed.setStatus('mandatory')
mscAtmIfVccSrcEbrOvOptimizationSubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 8, 2, 20, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1))).clone('yes')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfVccSrcEbrOvOptimizationSubscribed.setStatus('mandatory')
mscAtmIfVccEbrInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12))
mscAtmIfVccEbrInfoRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 1), )
if mibBuilder.loadTexts: mscAtmIfVccEbrInfoRowStatusTable.setStatus('mandatory')
mscAtmIfVccEbrInfoRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVccIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVccEbrInfoIndex"))
if mibBuilder.loadTexts: mscAtmIfVccEbrInfoRowStatusEntry.setStatus('mandatory')
mscAtmIfVccEbrInfoRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 1, 1, 1), RowStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVccEbrInfoRowStatus.setStatus('mandatory')
mscAtmIfVccEbrInfoComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVccEbrInfoComponentName.setStatus('mandatory')
mscAtmIfVccEbrInfoStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVccEbrInfoStorageType.setStatus('mandatory')
mscAtmIfVccEbrInfoIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: mscAtmIfVccEbrInfoIndex.setStatus('mandatory')
mscAtmIfVccEbrInfoOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 30), )
if mibBuilder.loadTexts: mscAtmIfVccEbrInfoOperTable.setStatus('mandatory')
mscAtmIfVccEbrInfoOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 30, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVccIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVccEbrInfoIndex"))
if mibBuilder.loadTexts: mscAtmIfVccEbrInfoOperEntry.setStatus('mandatory')
mscAtmIfVccEbrInfoRecoverySubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 30, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVccEbrInfoRecoverySubscribed.setStatus('mandatory')
mscAtmIfVccEbrInfoOptimizationSubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 30, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVccEbrInfoOptimizationSubscribed.setStatus('mandatory')
mscAtmIfVccEbrInfoConnectionRecovered = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 30, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVccEbrInfoConnectionRecovered.setStatus('mandatory')
mscAtmIfVccEbrInfoStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 40), )
if mibBuilder.loadTexts: mscAtmIfVccEbrInfoStatsTable.setStatus('mandatory')
mscAtmIfVccEbrInfoStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 40, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVccIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVccEbrInfoIndex"))
if mibBuilder.loadTexts: mscAtmIfVccEbrInfoStatsEntry.setStatus('mandatory')
mscAtmIfVccEbrInfoTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 40, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVccEbrInfoTotalConnectionRecoveries.setStatus('mandatory')
mscAtmIfVccEbrInfoTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 5, 12, 40, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVccEbrInfoTotalPathOptimizations.setStatus('mandatory')
mscAtmIfUniEbr = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7))
mscAtmIfUniEbrRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 1), )
if mibBuilder.loadTexts: mscAtmIfUniEbrRowStatusTable.setStatus('mandatory')
mscAtmIfUniEbrRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmUniMIB", "mscAtmIfUniIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfUniEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfUniEbrRowStatusEntry.setStatus('mandatory')
mscAtmIfUniEbrRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfUniEbrRowStatus.setStatus('mandatory')
mscAtmIfUniEbrComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfUniEbrComponentName.setStatus('mandatory')
mscAtmIfUniEbrStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfUniEbrStorageType.setStatus('mandatory')
mscAtmIfUniEbrIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: mscAtmIfUniEbrIndex.setStatus('mandatory')
mscAtmIfUniEbrProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 20), )
if mibBuilder.loadTexts: mscAtmIfUniEbrProvTable.setStatus('mandatory')
mscAtmIfUniEbrProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 20, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmUniMIB", "mscAtmIfUniIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfUniEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfUniEbrProvEntry.setStatus('mandatory')
mscAtmIfUniEbrConnectionRecovery = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 20, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue="c0")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfUniEbrConnectionRecovery.setStatus('mandatory')
mscAtmIfUniEbrPathOptimization = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 20, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue="c0")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfUniEbrPathOptimization.setStatus('mandatory')
mscAtmIfUniEbrOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 30), )
if mibBuilder.loadTexts: mscAtmIfUniEbrOperTable.setStatus('mandatory')
mscAtmIfUniEbrOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 30, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmUniMIB", "mscAtmIfUniIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfUniEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfUniEbrOperEntry.setStatus('mandatory')
mscAtmIfUniEbrSubscribedConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 30, 1, 1), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfUniEbrSubscribedConnections.setStatus('mandatory')
mscAtmIfUniEbrEligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 30, 1, 2), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfUniEbrEligibleRecoveredConnections.setStatus('mandatory')
mscAtmIfUniEbrIneligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 30, 1, 3), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfUniEbrIneligibleRecoveredConnections.setStatus('mandatory')
mscAtmIfUniEbrStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 40), )
if mibBuilder.loadTexts: mscAtmIfUniEbrStatsTable.setStatus('mandatory')
mscAtmIfUniEbrStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 40, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmUniMIB", "mscAtmIfUniIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfUniEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfUniEbrStatsEntry.setStatus('mandatory')
mscAtmIfUniEbrTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 40, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfUniEbrTotalConnectionRecoveries.setStatus('mandatory')
mscAtmIfUniEbrTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 6, 7, 40, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfUniEbrTotalPathOptimizations.setStatus('mandatory')
mscAtmIfIispEbr = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7))
mscAtmIfIispEbrRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 1), )
if mibBuilder.loadTexts: mscAtmIfIispEbrRowStatusTable.setStatus('mandatory')
mscAtmIfIispEbrRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmIispMIB", "mscAtmIfIispIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfIispEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfIispEbrRowStatusEntry.setStatus('mandatory')
mscAtmIfIispEbrRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfIispEbrRowStatus.setStatus('mandatory')
mscAtmIfIispEbrComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfIispEbrComponentName.setStatus('mandatory')
mscAtmIfIispEbrStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfIispEbrStorageType.setStatus('mandatory')
mscAtmIfIispEbrIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: mscAtmIfIispEbrIndex.setStatus('mandatory')
mscAtmIfIispEbrProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 20), )
if mibBuilder.loadTexts: mscAtmIfIispEbrProvTable.setStatus('mandatory')
mscAtmIfIispEbrProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 20, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmIispMIB", "mscAtmIfIispIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfIispEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfIispEbrProvEntry.setStatus('mandatory')
mscAtmIfIispEbrConnectionRecovery = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 20, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue="c0")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfIispEbrConnectionRecovery.setStatus('mandatory')
mscAtmIfIispEbrPathOptimization = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 20, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue="c0")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfIispEbrPathOptimization.setStatus('mandatory')
mscAtmIfIispEbrOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 30), )
if mibBuilder.loadTexts: mscAtmIfIispEbrOperTable.setStatus('mandatory')
mscAtmIfIispEbrOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 30, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmIispMIB", "mscAtmIfIispIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfIispEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfIispEbrOperEntry.setStatus('mandatory')
mscAtmIfIispEbrSubscribedConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 30, 1, 1), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfIispEbrSubscribedConnections.setStatus('mandatory')
mscAtmIfIispEbrEligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 30, 1, 2), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfIispEbrEligibleRecoveredConnections.setStatus('mandatory')
mscAtmIfIispEbrIneligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 30, 1, 3), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfIispEbrIneligibleRecoveredConnections.setStatus('mandatory')
mscAtmIfIispEbrStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 40), )
if mibBuilder.loadTexts: mscAtmIfIispEbrStatsTable.setStatus('mandatory')
mscAtmIfIispEbrStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 40, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmIispMIB", "mscAtmIfIispIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfIispEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfIispEbrStatsEntry.setStatus('mandatory')
mscAtmIfIispEbrTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 40, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfIispEbrTotalConnectionRecoveries.setStatus('mandatory')
mscAtmIfIispEbrTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 7, 7, 40, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfIispEbrTotalPathOptimizations.setStatus('mandatory')
mscAtmIfVptIispEbr = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7))
mscAtmIfVptIispEbrRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 1), )
if mibBuilder.loadTexts: mscAtmIfVptIispEbrRowStatusTable.setStatus('mandatory')
mscAtmIfVptIispEbrRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVptIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmIispMIB", "mscAtmIfVptIispIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVptIispEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfVptIispEbrRowStatusEntry.setStatus('mandatory')
mscAtmIfVptIispEbrRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfVptIispEbrRowStatus.setStatus('mandatory')
mscAtmIfVptIispEbrComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptIispEbrComponentName.setStatus('mandatory')
mscAtmIfVptIispEbrStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptIispEbrStorageType.setStatus('mandatory')
mscAtmIfVptIispEbrIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: mscAtmIfVptIispEbrIndex.setStatus('mandatory')
mscAtmIfVptIispEbrProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 20), )
if mibBuilder.loadTexts: mscAtmIfVptIispEbrProvTable.setStatus('mandatory')
mscAtmIfVptIispEbrProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 20, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVptIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmIispMIB", "mscAtmIfVptIispIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVptIispEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfVptIispEbrProvEntry.setStatus('mandatory')
mscAtmIfVptIispEbrConnectionRecovery = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 20, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue="c0")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfVptIispEbrConnectionRecovery.setStatus('mandatory')
mscAtmIfVptIispEbrPathOptimization = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 20, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue="c0")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfVptIispEbrPathOptimization.setStatus('mandatory')
mscAtmIfVptIispEbrOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 30), )
if mibBuilder.loadTexts: mscAtmIfVptIispEbrOperTable.setStatus('mandatory')
mscAtmIfVptIispEbrOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 30, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVptIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmIispMIB", "mscAtmIfVptIispIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVptIispEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfVptIispEbrOperEntry.setStatus('mandatory')
mscAtmIfVptIispEbrSubscribedConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 30, 1, 1), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptIispEbrSubscribedConnections.setStatus('mandatory')
mscAtmIfVptIispEbrEligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 30, 1, 2), Gauge32().subtype(subtypeSpec
| 0 |
202670314ad28685aaa296dce4b5094daab3f47a
|
Python
|
=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptIispEbrEligibleRecoveredConnections.setStatus('mandatory')
mscAtmIfVptIispEbrIneligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 30, 1, 3), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptIispEbrIneligibleRecoveredConnections.setStatus('mandatory')
mscAtmIfVptIispEbrStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 40), )
if mibBuilder.loadTexts: mscAtmIfVptIispEbrStatsTable.setStatus('mandatory')
mscAtmIfVptIispEbrStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 40, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVptIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmIispMIB", "mscAtmIfVptIispIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVptIispEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfVptIispEbrStatsEntry.setStatus('mandatory')
mscAtmIfVptIispEbrTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 40, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptIispEbrTotalConnectionRecoveries.setStatus('mandatory')
mscAtmIfVptIispEbrTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 6, 7, 40, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptIispEbrTotalPathOptimizations.setStatus('mandatory')
mscAtmIfVptPnniEbr = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7))
mscAtmIfVptPnniEbrRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 1), )
if mibBuilder.loadTexts: mscAtmIfVptPnniEbrRowStatusTable.setStatus('mandatory')
mscAtmIfVptPnniEbrRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVptIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmPnniMIB", "mscAtmIfVptPnniIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVptPnniEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfVptPnniEbrRowStatusEntry.setStatus('mandatory')
mscAtmIfVptPnniEbrRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfVptPnniEbrRowStatus.setStatus('mandatory')
mscAtmIfVptPnniEbrComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptPnniEbrComponentName.setStatus('mandatory')
mscAtmIfVptPnniEbrStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptPnniEbrStorageType.setStatus('mandatory')
mscAtmIfVptPnniEbrIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: mscAtmIfVptPnniEbrIndex.setStatus('mandatory')
mscAtmIfVptPnniEbrProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 20), )
if mibBuilder.loadTexts: mscAtmIfVptPnniEbrProvTable.setStatus('mandatory')
mscAtmIfVptPnniEbrProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 20, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVptIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmPnniMIB", "mscAtmIfVptPnniIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVptPnniEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfVptPnniEbrProvEntry.setStatus('mandatory')
mscAtmIfVptPnniEbrConnectionRecovery = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 20, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue="c0")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfVptPnniEbrConnectionRecovery.setStatus('mandatory')
mscAtmIfVptPnniEbrPathOptimization = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 20, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue="c0")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfVptPnniEbrPathOptimization.setStatus('mandatory')
mscAtmIfVptPnniEbrOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 30), )
if mibBuilder.loadTexts: mscAtmIfVptPnniEbrOperTable.setStatus('mandatory')
mscAtmIfVptPnniEbrOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 30, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVptIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmPnniMIB", "mscAtmIfVptPnniIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVptPnniEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfVptPnniEbrOperEntry.setStatus('mandatory')
mscAtmIfVptPnniEbrSubscribedConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 30, 1, 1), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptPnniEbrSubscribedConnections.setStatus('mandatory')
mscAtmIfVptPnniEbrEligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 30, 1, 2), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptPnniEbrEligibleRecoveredConnections.setStatus('mandatory')
mscAtmIfVptPnniEbrIneligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 30, 1, 3), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptPnniEbrIneligibleRecoveredConnections.setStatus('mandatory')
mscAtmIfVptPnniEbrStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 40), )
if mibBuilder.loadTexts: mscAtmIfVptPnniEbrStatsTable.setStatus('mandatory')
mscAtmIfVptPnniEbrStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 40, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVptIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmPnniMIB", "mscAtmIfVptPnniIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVptPnniEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfVptPnniEbrStatsEntry.setStatus('mandatory')
mscAtmIfVptPnniEbrTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 40, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptPnniEbrTotalConnectionRecoveries.setStatus('mandatory')
mscAtmIfVptPnniEbrTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 7, 7, 40, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptPnniEbrTotalPathOptimizations.setStatus('mandatory')
mscAtmIfVptUniEbr = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7))
mscAtmIfVptUniEbrRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 1), )
if mibBuilder.loadTexts: mscAtmIfVptUniEbrRowStatusTable.setStatus('mandatory')
mscAtmIfVptUniEbrRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVptIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmUniMIB", "mscAtmIfVptUniIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVptUniEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfVptUniEbrRowStatusEntry.setStatus('mandatory')
mscAtmIfVptUniEbrRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfVptUniEbrRowStatus.setStatus('mandatory')
mscAtmIfVptUniEbrComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptUniEbrComponentName.setStatus('mandatory')
mscAtmIfVptUniEbrStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptUniEbrStorageType.setStatus('mandatory')
mscAtmIfVptUniEbrIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: mscAtmIfVptUniEbrIndex.setStatus('mandatory')
mscAtmIfVptUniEbrProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 20), )
if mibBuilder.loadTexts: mscAtmIfVptUniEbrProvTable.setStatus('mandatory')
mscAtmIfVptUniEbrProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 20, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVptIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmUniMIB", "mscAtmIfVptUniIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVptUniEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfVptUniEbrProvEntry.setStatus('mandatory')
mscAtmIfVptUniEbrConnectionRecovery = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 20, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue="c0")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfVptUniEbrConnectionRecovery.setStatus('mandatory')
mscAtmIfVptUniEbrPathOptimization = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 20, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue="c0")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfVptUniEbrPathOptimization.setStatus('mandatory')
mscAtmIfVptUniEbrOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 30), )
if mibBuilder.loadTexts: mscAtmIfVptUniEbrOperTable.setStatus('mandatory')
mscAtmIfVptUniEbrOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 30, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVptIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmUniMIB", "mscAtmIfVptUniIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVptUniEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfVptUniEbrOperEntry.setStatus('mandatory')
mscAtmIfVptUniEbrSubscribedConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 30, 1, 1), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptUniEbrSubscribedConnections.setStatus('mandatory')
mscAtmIfVptUniEbrEligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 30, 1, 2), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptUniEbrEligibleRecoveredConnections.setStatus('mandatory')
mscAtmIfVptUniEbrIneligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 30, 1, 3), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptUniEbrIneligibleRecoveredConnections.setStatus('mandatory')
mscAtmIfVptUniEbrStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 40), )
if mibBuilder.loadTexts: mscAtmIfVptUniEbrStatsTable.setStatus('mandatory')
mscAtmIfVptUniEbrStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 40, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVptIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmUniMIB", "mscAtmIfVptUniIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVptUniEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfVptUniEbrStatsEntry.setStatus('mandatory')
mscAtmIfVptUniEbrTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 40, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptUniEbrTotalConnectionRecoveries.setStatus('mandatory')
mscAtmIfVptUniEbrTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 8, 7, 40, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptUniEbrTotalPathOptimizations.setStatus('mandatory')
mscAtmIfVptVccSrcEbrOv = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 8, 2))
mscAtmIfVptVccSrcEbrOvRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 8, 2, 1), )
if mibBuilder.loadTexts: mscAtmIfVptVccSrcEbrOvRowStatusTable.setStatus('mandatory')
mscAtmIfVptVccSrcEbrOvRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 8, 2, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVptIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVptVccIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmNetworkingMIB", "mscAtmIfVptVccSrcIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVptVccSrcEbrOvIndex"))
if mibBuilder.loadTexts: mscAtmIfVptVccSrcEbrOvRowStatusEntry.setStatus('mandatory')
mscAtmIfVptVccSrcEbrOvRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 8, 2, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfVptVccSrcEbrOvRowStatus.setStatus('mandatory')
mscAtmIfVptVccSrcEbrOvComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 8, 2, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptVccSrcEbrOvComponentName.setStatus('mandatory')
mscAtmIfVptVccSrcEbrOvStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 8, 2, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptVccSrcEbrOvStorageType.setStatus('mandatory')
mscAtmIfVptVccSrcEbrOvIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 8, 2, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: mscAtmIfVptVccSrcEbrOvIndex.setStatus('mandatory')
mscAtmIfVptVccSrcEbrOvProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 8, 2, 20), )
if mibBuilder.loadTexts: mscAtmIfVptVccSrcEbrOvProvTable.setStatus('mandatory')
mscAtmIfVptVccSrcEbrOvProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 8, 2, 20, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVptIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVptVccIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmNetworkingMIB", "mscAtmIfVptVccSrcIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVptVccSrcEbrOvIndex"))
if mibBuilder.loadTexts: mscAtmIfVptVccSrcEbrOvProvEntry.setStatus('mandatory')
mscAtmIfVptVccSrcEbrOvRecoverySubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 8, 2, 20, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1))).clone('yes')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfVptVccSrcEbrOvRecoverySubscribed.setStatus('mandatory')
mscAtmIfVptVccSrcEbrOvOptimizationSubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 8, 2, 20, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1))).clone('yes')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfVptVccSrcEbrOvOptimizationSubscribed.setStatus('mandatory')
mscAtmIfVptVccEbrInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12))
mscAtmIfVptVccEbrInfoRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 1), )
if mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoRowStatusTable.setStatus('mandatory')
mscAtmIfVptVccEbrInfoRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVptIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVptVccIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVptVccEbrInfoIndex"))
if mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoRowStatusEntry.setStatus('mandatory')
mscAtmIfVptVccEbrInfoRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 1, 1, 1), RowStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoRowStatus.setStatus('mandatory')
mscAtmIfVptVccEbrInfoComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoComponentName.setStatus('mandatory')
mscAtmIfVptVccEbrInfoStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoStorageType.setStatus('mandatory')
mscAtmIfVptVccEbrInfoIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoIndex.setStatus('mandatory')
mscAtmIfVptVccEbrInfoOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 30), )
if mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoOperTable.setStatus('mandatory')
mscAtmIfVptVccEbrInfoOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 30, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVptIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVptVccIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVptVccEbrInfoIndex"))
if mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoOperEntry.setStatus('mandatory')
mscAtmIfVptVccEbrInfoRecoverySubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 30, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoRecoverySubscribed.setStatus('mandatory')
mscAtmIfVptVccEbrInfoOptimizationSubscribed = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 30, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoOptimizationSubscribed.setStatus('mandatory')
mscAtmIfVptVccEbrInfoConnectionRecovered = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 30, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoConnectionRecovered.setStatus('mandatory')
mscAtmIfVptVccEbrInfoStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 40), )
if mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoStatsTable.setStatus('mandatory')
mscAtmIfVptVccEbrInfoStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 40, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVptIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfVptVccIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfVptVccEbrInfoIndex"))
if mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoStatsEntry.setStatus('mandatory')
mscAtmIfVptVccEbrInfoTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 40, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoTotalConnectionRecoveries.setStatus('mandatory')
mscAtmIfVptVccEbrInfoTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 9, 20, 12, 40, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfVptVccEbrInfoTotalPathOptimizations.setStatus('mandatory')
mscAtmIfPnniEbr = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7))
mscAtmIfPnniEbrRowStatusTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 1), )
if mibBuilder.loadTexts: mscAtmIfPnniEbrRowStatusTable.setStatus('mandatory')
mscAtmIfPnniEbrRowStatusEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 1, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmPnniMIB", "mscAtmIfPnniIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfPnniEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfPnniEbrRowStatusEntry.setStatus('mandatory')
mscAtmIfPnniEbrRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 1, 1, 1), RowStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfPnniEbrRowStatus.setStatus('mandatory')
mscAtmIfPnniEbrComponentName = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 1, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfPnniEbrComponentName.setStatus('mandatory')
mscAtmIfPnniEbrStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 1, 1, 4), StorageType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfPnniEbrStorageType.setStatus('mandatory')
mscAtmIfPnniEbrIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 1, 1, 10), NonReplicated())
if mibBuilder.loadTexts: mscAtmIfPnniEbrIndex.setStatus('mandatory')
mscAtmIfPnniEbrProvTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 20), )
if mibBuilder.loadTexts: mscAtmIfPnniEbrProvTable.setStatus('mandatory')
mscAtmIfPnniEbrProvEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 20, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmPnniMIB", "mscAtmIfPnniIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfPnniEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfPnniEbrProvEntry.setStatus('mandatory')
mscAtmIfPnniEbrConnectionRecovery = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 20, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue="c0")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfPnniEbrConnectionRecovery.setStatus('mandatory')
mscAtmIfPnniEbrPathOptimization = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 20, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1).clone(hexValue="c0")).setMaxAccess("readwrite")
if mibBuilder.loadTexts: mscAtmIfPnniEbrPathOptimization.setStatus('mandatory')
mscAtmIfPnniEbrOperTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 30), )
if mibBuilder.loadTexts: mscAtmIfPnniEbrOperTable.setStatus('mandatory')
mscAtmIfPnniEbrOperEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 30, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmPnniMIB", "mscAtmIfPnniIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfPnniEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfPnniEbrOperEntry.setStatus('mandatory')
mscAtmIfPnniEbrSubscribedConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 30, 1, 1), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfPnniEbrSubscribedConnections.setStatus('mandatory')
mscAtmIfPnniEbrEligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 30, 1, 2), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfPnniEbrEligibleRecoveredConnections.setStatus('mandatory')
mscAtmIfPnniEbrIneligibleRecoveredConnections = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 30, 1, 3), Gauge32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfPnniEbrIneligibleRecoveredConnections.setStatus('mandatory')
mscAtmIfPnniEbrStatsTable = MibTable((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 40), )
if mibBuilder.loadTexts: mscAtmIfPnniEbrStatsTable.setStatus('mandatory')
mscAtmIfPnniEbrStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 40, 1), ).setIndexNames((0, "Nortel-MsCarrier-MscPassport-AtmCoreMIB", "mscAtmIfIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmPnniMIB", "mscAtmIfPnniIndex"), (0, "Nortel-MsCarrier-MscPassport-AtmEbrMIB", "mscAtmIfPnniEbrIndex"))
if mibBuilder.loadTexts: mscAtmIfPnniEbrStatsEntry.setStatus('mandatory')
mscAtmIfPnniEbrTotalConnectionRecoveries = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 40, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfPnniEbrTotalConnectionRecoveries.setStatus('mandatory')
mscAtmIfPnniEbrTotalPathOptimizations = MibTableColumn((1, 3, 6, 1, 4, 1, 562, 36, 2, 1, 114, 96, 7, 40, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mscAtmIfPnniEbrTotalPathOptimizations.setStatus('mandatory')
atmEbrGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159, 1))
atmEbrGroupCA = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159, 1, 1))
atmEbrGroupCA02 = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159, 1, 1, 3))
atmEbrGroupCA02A = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159, 1, 1, 3, 2))
atmEbrCapabilities = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159, 3))
atmEbrCapabilitiesCA = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159, 3, 1))
atmEbrCapabilitiesCA02 = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159, 3, 1, 3))
atmEbrCapabilitiesCA02A = MibIdentifier((1, 3, 6, 1, 4, 1, 562, 36, 2, 2, 159, 3, 1, 3, 2))
mibBuilder.exportSymbols("Nortel-MsCarrier-MscPassport-AtmEbrMIB", mscAtmIfVptPnniEbr=mscAtmIfVptPnniEbr, atmEbrGroupCA=atmEbrGroupCA, mscAtmIfUniEbrTotalConnectionRecoveries=mscAtmIfUniEbrTotalConnectionRecoveries, mscAtmIfPnniEbrComponentName=mscAtmIfPnniEbrComponentName, mscAtmIfVptPnniEbrProvEntry=mscAtmIfVptPnniEbrProvEntry, mscAtmIfVptVccEbrInfoTotalPathOptimizations=mscAtmIfVptVccEbrInfoTotalPathOptimizations, mscAtmIfIispEbrOperTable=mscAtmIfIispEbrOperTable, mscAtmIfPnniEbrStatsTable=mscAtmIfPnniEbrStatsTable, atmEbrGroup=atmEbrGroup, mscAtmIfUniEbrConnectionRecovery=mscAtmIfUniEbrConnectionRecovery, mscAtmIfVptIispEbrOperEntry=mscAtmIfVptIispEbrOperEntry, mscAtmIfVptUniEbrTotalPathOptimizations=mscAtmIfVptUniEbrTotalPathOptimizations, mscAtmIfVptVccSrcEbrOvIndex=mscAtmIfVptVccSrcEbrOvIndex, mscAtmIfUniEbr=mscAtmIfUniEbr, mscAtmIfVptUniEbrPathOptimization=mscAtmIfVptUniEbrPathOptimization, mscAtmIfUniEbrStatsEntry=mscAtmIfUniEbrStatsEntry, mscAtmIfVpcEbrInfoStorageType=mscAtmIfVpcEbrInfoStorageType, mscAtmIfVptIispEbrRowStatus=mscAtmIfVptIispEbrRowStatus, mscAtmIfPnniEbrProvTable=mscAtmIfPnniEbrProvTable, mscAtmIfVptPnniEbrSubscribedConnections=mscAtmIfVptPnniEbrSubscribedConnections, mscAtmIfVccEbrInfoTotalPathOptimizations=mscAtmIfVccEbrInfoTotalPathOptimizations, mscAtmIfVptIispEbrStatsTable=mscAtmIfVptIispEbrStatsTable, mscAtmIfVptUniEbrProvEntry=mscAtmIfVptUniEbrProvEntry, mscAtmIfVptPnniEbrEligibleRecoveredConnections=mscAtmIfVptPnniEbrEligibleRecoveredConnections, mscAtmIfVccEbrInfoComponentName=mscAtmIfVccEbrInfoComponentName, mscAtmIfVccSrcEbrOvRowStatusEntry=mscAtmIfVccSrcEbrOvRowStatusEntry, mscAtmIfPnniEbrIndex=mscAtmIfPnniEbrIndex, mscAtmIfVpcSrcEbrOvStorageType=mscAtmIfVpcSrcEbrOvStorageType, mscAtmIfIispEbrRowStatusTable=mscAtmIfIispEbrRowStatusTable, mscAtmIfVptPnniEbrPathOptimization=mscAtmIfVptPnniEbrPathOptimization, mscAtmIfIispEbrProvEntry=mscAtmIfIispEbrProvEntry, mscAtmIfVccEbrInfoRowStatusEntry=mscAtmIfVccEbrInfoRowStatusEntry, mscAtmIfVptIispEbrStorageType=mscAtmIfVptIispEbrStorageType, mscAtmIfVptPnniEbrStatsEntry=mscAtmIfVptPnniEbrStatsEntry, mscAtmIfVptVccEbrInfoIndex=mscAtmIfVptVccEbrInfoIndex, mscAtmIfPnniEbrTotalConnectionRecoveries=mscAtmIfPnniEbrTotalConnectionRecoveries, mscAtmIfVptVccEbrInfoOperTable=mscAtmIfVptVccEbrInfoOperTable, mscAtmIfPnniEbrEligibleRecoveredConnections=mscAtmIfPnniEbrEligibleRecoveredConnections, mscAtmIfVpcEbrInfoRecoverySubscribed=mscAtmIfVpcEbrInfoRecoverySubscribed, mscAtmIfVptVccSrcEbrOvProvTable=mscAtmIfVptVccSrcEbrOvProvTable, mscAtmIfVptVccEbrInfoConnectionRecovered=mscAtmIfVptVccEbrInfoConnectionRecovered, mscAtmIfVptIispEbrComponentName=mscAtmIfVptIispEbrComponentName, mscAtmIfVptUniEbrComponentName=mscAtmIfVptUniEbrComponentName, mscAtmIfVptVccEbrInfoRowStatusEntry=mscAtmIfVptVccEbrInfoRowStatusEntry, mscAtmIfIispEbrComponentName=mscAtmIfIispEbrComponentName, mscAtmIfPnniEbrOperEntry=mscAtmIfPnniEbrOperEntry, mscAtmIfVptIispEbrTotalPathOptimizations=mscAtmIfVptIispEbrTotalPathOptimizations, mscAtmIfVccEbrInfo=mscAtmIfVccEbrInfo, mscAtmIfVptUniEbrIndex=mscAtmIfVptUniEbrIndex, mscAtmIfVptUniEbrIneligibleRecoveredConnections=mscAtmIfVptUniEbrIneligibleRecoveredConnections, atmEbrCapabilitiesCA02=atmEbrCapabilitiesCA02, mscAtmIfVptUniEbrRowStatusTable=mscAtmIfVptUniEbrRowStatusTable, mscAtmIfVptVccEbrInfoRowStatusTable=mscAtmIfVptVccEbrInfoRowStatusTable, mscAtmIfVptIispEbrProvTable=mscAtmIfVptIispEbrProvTable, mscAtmIfVpcSrcEbrOvOptimizationSubscribed=mscAtmIfVpcSrcEbrOvOptimizationSubscribed, mscAtmIfIispEbrTotalPathOptimizations=mscAtmIfIispEbrTotalPathOptimizations, mscAtmIfVccSrcEbrOvComponentName=mscAtmIfVccSrcEbrOvComponentName, mscAtmIfVccSrcEbrOvOptimizationSubscribed=mscAtmIfVccSrcEbrOvOptimizationSubscribed, mscAtmIfUniEbrOperTable=mscAtmIfUniEbrOperTable, mscAtmIfIispEbrStorageType=mscAtmIfIispEbrStorageType, mscAtmIfVptVccSrcEbrOv=mscAtmIfVptVccSrcEbrOv, mscAtmIfIispEbrStatsTable=mscAtmIfIispEbrStatsTable, mscAtmIfUniEbrSubscribedConnections=mscAtmIfUniEbrSubscribedConnections, mscAtmIfUniEbrRowStatusTable=mscAtmIfUniEbrRowStatusTable, mscAtmIfIispEbrStatsEntry=mscAtmIfIispEbrStatsEntry, mscAtmIfVptVccEbrInfoOperEntry=mscAtmIfVptVccEbrInfoOperEntry, mscAtmIfIispEbrRowStatusEntry=mscAtmIfIispEbrRowStatusEntry, mscAtmIfVptIispEbrIneligibleRecoveredConnections=mscAtmIfVptIispEbrIneligibleRecoveredConnections, atmEbrCapabilitiesCA02A=atmEbrCapabilitiesCA02A, mscAtmIfVptVccEbrInfoOptimizationSubscribed=mscAtmIfVptVccEbrInfoOptimizationSubscribed, mscAtmIfVccEbrInfoIndex=mscAtmIfVccEbrInfoIndex, mscAtmIfIispEbrPathOptimization=mscAtmIfIispEbrPathOptimization, mscAtmIfPnniEbrRowStatusEntry=mscAtmIfPnniEbrRowStatusEntry, mscAtmIfVptIispEbrSubscribedConnections=mscAtmIfVptIispEbrSubscribedConnections, mscAtmIfUniEbrStatsTable=mscAtmIfUniEbrStatsTable, mscAtmIfVptUniEbrStatsTable=mscAtmIfVptUniEbrStatsTable, mscAtmIfVptPnniEbrRowStatus=mscAtmIfVptPnniEbrRowStatus, mscAtmIfVptUniEbrProvTable=mscAtmIfVptUniEbrProvTable, mscAtmIfVptUniEbrOperEntry=
| 1 |
202670314ad28685aaa296dce4b5094daab3f47a
|
Python
|
mscAtmIfVptUniEbrOperEntry, mscAtmIfVccEbrInfoRecoverySubscribed=mscAtmIfVccEbrInfoRecoverySubscribed, mscAtmIfVpcEbrInfo=mscAtmIfVpcEbrInfo, mscAtmIfPnniEbrIneligibleRecoveredConnections=mscAtmIfPnniEbrIneligibleRecoveredConnections, mscAtmIfVpcSrcEbrOvRowStatusTable=mscAtmIfVpcSrcEbrOvRowStatusTable, mscAtmIfVptPnniEbrIneligibleRecoveredConnections=mscAtmIfVptPnniEbrIneligibleRecoveredConnections, mscAtmIfVpcEbrInfoConnectionRecovered=mscAtmIfVpcEbrInfoConnectionRecovered, mscAtmIfVccSrcEbrOvProvTable=mscAtmIfVccSrcEbrOvProvTable, mscAtmIfVccEbrInfoRowStatusTable=mscAtmIfVccEbrInfoRowStatusTable, mscAtmIfVccEbrInfoStorageType=mscAtmIfVccEbrInfoStorageType, mscAtmIfVpcEbrInfoTotalPathOptimizations=mscAtmIfVpcEbrInfoTotalPathOptimizations, mscAtmIfVptIispEbr=mscAtmIfVptIispEbr, mscAtmIfVpcEbrInfoRowStatus=mscAtmIfVpcEbrInfoRowStatus, mscAtmIfVccSrcEbrOvRowStatusTable=mscAtmIfVccSrcEbrOvRowStatusTable, mscAtmIfIispEbrConnectionRecovery=mscAtmIfIispEbrConnectionRecovery, mscAtmIfVccSrcEbrOvProvEntry=mscAtmIfVccSrcEbrOvProvEntry, mscAtmIfUniEbrIndex=mscAtmIfUniEbrIndex, mscAtmIfVptUniEbrTotalConnectionRecoveries=mscAtmIfVptUniEbrTotalConnectionRecoveries, mscAtmIfVpcEbrInfoTotalConnectionRecoveries=mscAtmIfVpcEbrInfoTotalConnectionRecoveries, mscAtmIfVptVccSrcEbrOvRowStatusEntry=mscAtmIfVptVccSrcEbrOvRowStatusEntry, mscAtmIfIispEbrTotalConnectionRecoveries=mscAtmIfIispEbrTotalConnectionRecoveries, mscAtmIfIispEbrRowStatus=mscAtmIfIispEbrRowStatus, mscAtmIfVpcSrcEbrOvProvTable=mscAtmIfVpcSrcEbrOvProvTable, mscAtmIfVptUniEbrRowStatus=mscAtmIfVptUniEbrRowStatus, mscAtmIfPnniEbrRowStatusTable=mscAtmIfPnniEbrRowStatusTable, mscAtmIfPnniEbrStatsEntry=mscAtmIfPnniEbrStatsEntry, mscAtmIfVpcSrcEbrOvIndex=mscAtmIfVpcSrcEbrOvIndex, mscAtmIfVpcEbrInfoComponentName=mscAtmIfVpcEbrInfoComponentName, mscAtmIfVptIispEbrPathOptimization=mscAtmIfVptIispEbrPathOptimization, mscAtmIfVpcSrcEbrOvRowStatus=mscAtmIfVpcSrcEbrOvRowStatus, mscAtmIfVpcEbrInfoRowStatusEntry=mscAtmIfVpcEbrInfoRowStatusEntry, mscAtmIfVptPnniEbrOperEntry=mscAtmIfVptPnniEbrOperEntry, mscAtmIfIispEbrSubscribedConnections=mscAtmIfIispEbrSubscribedConnections, mscAtmIfVccSrcEbrOv=mscAtmIfVccSrcEbrOv, mscAtmIfVptIispEbrEligibleRecoveredConnections=mscAtmIfVptIispEbrEligibleRecoveredConnections, mscAtmIfUniEbrProvEntry=mscAtmIfUniEbrProvEntry, mscAtmIfVpcEbrInfoRowStatusTable=mscAtmIfVpcEbrInfoRowStatusTable, mscAtmIfVptPnniEbrComponentName=mscAtmIfVptPnniEbrComponentName, mscAtmIfVptPnniEbrConnectionRecovery=mscAtmIfVptPnniEbrConnectionRecovery, mscAtmIfVptVccSrcEbrOvRowStatus=mscAtmIfVptVccSrcEbrOvRowStatus, mscAtmIfVptIispEbrRowStatusTable=mscAtmIfVptIispEbrRowStatusTable, mscAtmIfVptPnniEbrStorageType=mscAtmIfVptPnniEbrStorageType, mscAtmIfVptVccEbrInfoStorageType=mscAtmIfVptVccEbrInfoStorageType, mscAtmIfIispEbr=mscAtmIfIispEbr, mscAtmIfVccEbrInfoOperEntry=mscAtmIfVccEbrInfoOperEntry, mscAtmIfVptPnniEbrTotalConnectionRecoveries=mscAtmIfVptPnniEbrTotalConnectionRecoveries, mscAtmIfPnniEbrRowStatus=mscAtmIfPnniEbrRowStatus, mscAtmIfVpcSrcEbrOvProvEntry=mscAtmIfVpcSrcEbrOvProvEntry, mscAtmIfVccEbrInfoRowStatus=mscAtmIfVccEbrInfoRowStatus, mscAtmIfVptIispEbrIndex=mscAtmIfVptIispEbrIndex, mscAtmIfVpcEbrInfoOperEntry=mscAtmIfVpcEbrInfoOperEntry, mscAtmIfVptIispEbrOperTable=mscAtmIfVptIispEbrOperTable, mscAtmIfUniEbrProvTable=mscAtmIfUniEbrProvTable, mscAtmIfPnniEbrPathOptimization=mscAtmIfPnniEbrPathOptimization, mscAtmIfVpcEbrInfoStatsTable=mscAtmIfVpcEbrInfoStatsTable, mscAtmIfVccSrcEbrOvIndex=mscAtmIfVccSrcEbrOvIndex, mscAtmIfPnniEbrSubscribedConnections=mscAtmIfPnniEbrSubscribedConnections, mscAtmIfVptIispEbrRowStatusEntry=mscAtmIfVptIispEbrRowStatusEntry, mscAtmIfIispEbrProvTable=mscAtmIfIispEbrProvTable, mscAtmIfVptVccSrcEbrOvComponentName=mscAtmIfVptVccSrcEbrOvComponentName, mscAtmIfVptUniEbrConnectionRecovery=mscAtmIfVptUniEbrConnectionRecovery, mscAtmIfVccSrcEbrOvStorageType=mscAtmIfVccSrcEbrOvStorageType, mscAtmIfVpcSrcEbrOv=mscAtmIfVpcSrcEbrOv, mscAtmIfVptPnniEbrRowStatusTable=mscAtmIfVptPnniEbrRowStatusTable, mscAtmIfUniEbrEligibleRecoveredConnections=mscAtmIfUniEbrEligibleRecoveredConnections, mscAtmIfVptUniEbrRowStatusEntry=mscAtmIfVptUniEbrRowStatusEntry, mscAtmIfVccSrcEbrOvRowStatus=mscAtmIfVccSrcEbrOvRowStatus, mscAtmIfIispEbrEligibleRecoveredConnections=mscAtmIfIispEbrEligibleRecoveredConnections, mscAtmIfPnniEbrOperTable=mscAtmIfPnniEbrOperTable, mscAtmIfVpcEbrInfoOperTable=mscAtmIfVpcEbrInfoOperTable, mscAtmIfVpcEbrInfoStatsEntry=mscAtmIfVpcEbrInfoStatsEntry, mscAtmIfVptUniEbrStorageType=mscAtmIfVptUniEbrStorageType, mscAtmIfVccEbrInfoStatsTable=mscAtmIfVccEbrInfoStatsTable, mscAtmIfVptVccEbrInfoStatsTable=mscAtmIfVptVccEbrInfoStatsTable, mscAtmIfUniEbrPathOptimization=mscAtmIfUniEbrPathOptimization, mscAtmIfVptPnniEbrStatsTable=mscAtmIfVptPnniEbrStatsTable, mscAtmIfVptUniEbrSubscribedConnections=mscAtmIfVptUniEbrSubscribedConnections, mscAtmIfVptVccEbrInfo=mscAtmIfVptVccEbrInfo, mscAtmIfPnniEbrConnectionRecovery=mscAtmIfPnniEbrConnectionRecovery, mscAtmIfVccEbrInfoConnectionRecovered=mscAtmIfVccEbrInfoConnectionRecovered, mscAtmIfVccEbrInfoStatsEntry=mscAtmIfVccEbrInfoStatsEntry, mscAtmIfVptVccEbrInfoTotalConnectionRecoveries=mscAtmIfVptVccEbrInfoTotalConnectionRecoveries, mscAtmIfUniEbrStorageType=mscAtmIfUniEbrStorageType, mscAtmIfVptUniEbrStatsEntry=mscAtmIfVptUniEbrStatsEntry, mscAtmIfVptPnniEbrProvTable=mscAtmIfVptPnniEbrProvTable, mscAtmIfVccSrcEbrOvRecoverySubscribed=mscAtmIfVccSrcEbrOvRecoverySubscribed, atmEbrCapabilities=atmEbrCapabilities, mscAtmIfUniEbrComponentName=mscAtmIfUniEbrComponentName, mscAtmIfPnniEbrTotalPathOptimizations=mscAtmIfPnniEbrTotalPathOptimizations, mscAtmIfUniEbrIneligibleRecoveredConnections=mscAtmIfUniEbrIneligibleRecoveredConnections, mscAtmIfPnniEbr=mscAtmIfPnniEbr, mscAtmIfVptIispEbrProvEntry=mscAtmIfVptIispEbrProvEntry, mscAtmIfUniEbrRowStatusEntry=mscAtmIfUniEbrRowStatusEntry, mscAtmIfVptPnniEbrRowStatusEntry=mscAtmIfVptPnniEbrRowStatusEntry, mscAtmIfVpcEbrInfoIndex=mscAtmIfVpcEbrInfoIndex, mscAtmIfVptVccSrcEbrOvProvEntry=mscAtmIfVptVccSrcEbrOvProvEntry, mscAtmIfVccEbrInfoOperTable=mscAtmIfVccEbrInfoOperTable, mscAtmIfVptVccEbrInfoStatsEntry=mscAtmIfVptVccEbrInfoStatsEntry, atmEbrGroupCA02A=atmEbrGroupCA02A, mscAtmIfVccEbrInfoOptimizationSubscribed=mscAtmIfVccEbrInfoOptimizationSubscribed, mscAtmIfVptVccSrcEbrOvRowStatusTable=mscAtmIfVptVccSrcEbrOvRowStatusTable, atmEbrMIB=atmEbrMIB, mscAtmIfVptVccEbrInfoRecoverySubscribed=mscAtmIfVptVccEbrInfoRecoverySubscribed, mscAtmIfVpcSrcEbrOvRowStatusEntry=mscAtmIfVpcSrcEbrOvRowStatusEntry, mscAtmIfVptVccEbrInfoRowStatus=mscAtmIfVptVccEbrInfoRowStatus, mscAtmIfVptIispEbrStatsEntry=mscAtmIfVptIispEbrStatsEntry, mscAtmIfPnniEbrStorageType=mscAtmIfPnniEbrStorageType, mscAtmIfPnniEbrProvEntry=mscAtmIfPnniEbrProvEntry, mscAtmIfVptUniEbrOperTable=mscAtmIfVptUniEbrOperTable, mscAtmIfIispEbrIneligibleRecoveredConnections=mscAtmIfIispEbrIneligibleRecoveredConnections, mscAtmIfVptIispEbrConnectionRecovery=mscAtmIfVptIispEbrConnectionRecovery, mscAtmIfVptUniEbr=mscAtmIfVptUniEbr, atmEbrGroupCA02=atmEbrGroupCA02, mscAtmIfVptIispEbrTotalConnectionRecoveries=mscAtmIfVptIispEbrTotalConnectionRecoveries, mscAtmIfUniEbrTotalPathOptimizations=mscAtmIfUniEbrTotalPathOptimizations, mscAtmIfVpcSrcEbrOvRecoverySubscribed=mscAtmIfVpcSrcEbrOvRecoverySubscribed, mscAtmIfVptPnniEbrOperTable=mscAtmIfVptPnniEbrOperTable, mscAtmIfVptVccSrcEbrOvOptimizationSubscribed=mscAtmIfVptVccSrcEbrOvOptimizationSubscribed, mscAtmIfVptUniEbrEligibleRecoveredConnections=mscAtmIfVptUniEbrEligibleRecoveredConnections, mscAtmIfVpcEbrInfoOptimizationSubscribed=mscAtmIfVpcEbrInfoOptimizationSubscribed, mscAtmIfVptPnniEbrIndex=mscAtmIfVptPnniEbrIndex, mscAtmIfUniEbrRowStatus=mscAtmIfUniEbrRowStatus, mscAtmIfUniEbrOperEntry=mscAtmIfUniEbrOperEntry, mscAtmIfVptVccSrcEbrOvStorageType=mscAtmIfVptVccSrcEbrOvStorageType, mscAtmIfVptPnniEbrTotalPathOptimizations=mscAtmIfVptPnniEbrTotalPathOptimizations, mscAtmIfVpcSrcEbrOvComponentName=mscAtmIfVpcSrcEbrOvComponentName, mscAtmIfVptVccEbrInfoComponentName=mscAtmIfVptVccEbrInfoComponentName, mscAtmIfIispEbrOperEntry=mscAtmIfIispEbrOperEntry, mscAtmIfVptVccSrcEbrOvRecoverySubscribed=mscAtmIfVptVccSrcEbrOvRecoverySubscribed, mscAtmIfIispEbrIndex=mscAtmIfIispEbrIndex, atmEbrCapabilitiesCA=atmEbrCapabilitiesCA, mscAtmIfVccEbrInfoTotalConnectionRecoveries=mscAtmIfVccEbrInfoTotalConnectionRecoveries)
| 2 |
b8e18877af990c533c642d4937354198a4676419
|
Python
|
"""autogenerated by genpy from arm_navigation_msgs/GetPlanningSceneRequest.msg. Do not edit."""
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
import arm_navigation_msgs.msg
import geometry_msgs.msg
import std_msgs.msg
import genpy
import sensor_msgs.msg
class GetPlanningSceneRequest(genpy.Message):
_md5sum = "67ad55e9bed9c8f21dfb4b9b1ca8df7d"
_type = "arm_navigation_msgs/GetPlanningSceneRequest"
_has_header = False #flag to mark the presence of a Header object
_full_text = """
PlanningScene planning_scene_diff
arm_navigation_msgs/OrderedCollisionOperations operations
================================================================================
MSG: arm_navigation_msgs/PlanningScene
#full robot state
arm_navigation_msgs/RobotState robot_state
#additional frames for duplicating tf
geometry_msgs/TransformStamped[] fixed_frame_transforms
#full allowed collision matrix
AllowedCollisionMatrix allowed_collision_matrix
#allowed contacts
arm_navigation_msgs/AllowedContactSpecification[] allowed_contacts
#all link paddings
arm_navigation_msgs/LinkPadding[] link_padding
#collision objects
arm_navigation_msgs/CollisionObject[] collision_objects
arm_navigation_msgs/AttachedCollisionObject[] attached_collision_objects
#the collision map
arm_navigation_msgs/CollisionMap collision_map
================================================================================
MSG: arm_navigation_msgs/RobotState
# This message contains information about the robot state, i.e. the positions of its joints and links
sensor_msgs/JointState joint_state
arm_navigation_msgs/MultiDOFJointState multi_dof_joint_state
================================================================================
MSG: sensor_msgs/JointState
# This is a message that holds data to describe the state of a set of torque controlled joints.
#
# The state of each joint (revolute or prismatic) is defined by:
# * the position of the joint (rad or m),
# * the velocity of the joint (rad/s or m/s) and
# * the effort that is applied in the joint (Nm or N).
#
# Each joint is uniquely identified by its name
# The header specifies the time at which the joint states were recorded. All the joint states
# in one message have to be recorded at the same time.
#
# This message consists of a multiple arrays, one for each part of the joint state.
# The goal is to make each of the fields optional. When e.g. your joints have no
# effort associated with them, you can leave the effort array empty.
#
# All arrays in this message should have the same size, or be empty.
# This is the only way to uniquely associate the joint name with the correct
# states.
Header header
string[] name
float64[] position
float64[] velocity
float64[] effort
================================================================================
MSG: std_msgs/Header
# Standard metadata for higher-level stamped data types.
# This is generally used to communicate timestamped data
# in a particular coordinate frame.
#
# sequence ID: consecutively increasing ID
uint32 seq
#Two-integer timestamp that is expressed as:
# * stamp.secs: seconds (stamp_secs) since epoch
# * stamp.nsecs: nanoseconds since stamp_secs
# time-handling sugar is provided by the client library
time stamp
#Frame this data is associated with
# 0: no frame
# 1: global frame
string frame_id
================================================================================
MSG: arm_navigation_msgs/MultiDOFJointState
#A representation of a multi-dof joint state
time stamp
string[] joint_names
string[] frame_ids
string[] child_frame_ids
geometry_msgs/Pose[] poses
================================================================================
MSG: geometry_msgs/Pose
# A representation of pose in free space, composed of postion and orientation.
Point position
Quaternion orientation
================================================================================
MSG: geometry_msgs/Point
# This contains the position of a point in free space
float64 x
float64 y
float64 z
================================================================================
MSG: geometry_msgs/Quaternion
# This represents an orientation in free space in quaternion form.
float64 x
float64 y
float64 z
float64 w
================================================================================
MSG: geometry_msgs/TransformStamped
# This expresses a transform from coordinate frame header.frame_id
# to the coordinate frame child_frame_id
#
# This message is mostly used by the
# <a href="http://www.ros.org/wiki/tf">tf</a> package.
# See it's documentation for more information.
Header header
string child_frame_id # the frame id of the child frame
Transform transform
================================================================================
MSG: geometry_msgs/Transform
# This represents the transform between two coordinate frames in free space.
Vector3 translation
Quaternion rotation
================================================================================
MSG: geometry_msgs/Vector3
# This represents a vector in free space.
float64 x
float64 y
float64 z
================================================================================
MSG: arm_navigation_msgs/AllowedCollisionMatrix
# the list of link names in the matrix
string[] link_names
# the individual entries in the allowed collision matrix
# symmetric, with same order as link_names
AllowedCollisionEntry[] entries
================================================================================
MSG: arm_navigation_msgs/AllowedCollisionEntry
# whether or not collision checking is enabled
bool[] enabled
================================================================================
MSG: arm_navigation_msgs/AllowedContactSpecification
# The names of the regions
string name
# The shape of the region in the environment
arm_navigation_msgs/Shape shape
# The pose of the space defining the region
geometry_msgs/PoseStamped pose_stamped
# The set of links that will be allowed to have penetration contact within this region
string[] link_names
# The maximum penetration depth allowed for every link
float64 penetration_depth
================================================================================
MSG: arm_navigation_msgs/Shape
byte SPHERE=0
byte BOX=1
byte CYLINDER=2
byte MESH=3
byte type
#### define sphere, box, cylinder ####
# the origin of each shape is considered at the shape's center
# for sphere
# radius := dimensions[0]
# for cylinder
# radius := dimensions[0]
# length := dimensions[1]
# the length is along the Z axis
# for box
# size_x := dimensions[0]
# size_y := dimensions[1]
# size_z := dimensions[2]
float64[] dimensions
#### define mesh ####
# list of triangles; triangle k is defined by tre vertices located
# at indices triangles[3k], triangles[3k+1], triangles[3k+2]
int32[] triangles
geometry_msgs/Point[] vertices
================================================================================
MSG: geometry_msgs/PoseStamped
# A Pose with reference coordinate frame and timestamp
Header header
Pose pose
================================================================================
MSG: arm_navigation_msgs/LinkPadding
#name for the link
string link_name
# padding to apply to the link
float64 padding
================================================================================
MSG: arm_navigation_msgs/CollisionObject
# a header, used for interpreting the poses
Header header
# the id of the object
string id
# The padding used for filtering points near the object.
# This does not affect collision checking for the object.
# Set to negative to get zero padding.
float32 padding
#This contains what is to be done with the object
CollisionObjectOperation operation
#the shapes associated with the object
arm_navigation_msgs/Shape[] shapes
#the poses associated with the shapes - will be transformed using the header
geometry_msgs/Pose[] poses
================================================================================
MSG: arm_navigation_msgs/CollisionObjectOperation
#Puts the object into the environment
#or updates the object if already added
byte ADD=0
#Removes the object from the environment entirely
byte REMOVE=1
#Only valid within the context of a CollisionAttachedObject message
#Will be ignored if sent with an CollisionObject message
#Takes an attached object, detaches from the attached link
#But adds back in as regular object
byte DETACH_AND_ADD_AS_OBJECT=2
#Only valid within the context of a CollisionAttachedObject message
#Will be ignored if sent with an CollisionObject message
#Takes current object in the environment and removes it as
#a regular object
byte ATTACH_AND_REMOVE_AS_OBJECT=3
# Byte code for operation
byte operation
================================================================================
MSG: arm_navigation_msgs/AttachedCollisionObject
# The CollisionObject will be attached with a fixed joint to this link
# If link name is set to REMOVE_ALL_ATTACHED_OBJECTS and object.operation
# is set to REMOVE will remove all attached bodies attached to any object
string link_name
#Reserved for indicating that all attached objects should be removed
string REMOVE_ALL_ATTACHED_OBJECTS = "all"
#This contains the actual shapes and poses for the CollisionObject
#to be attached to the link
#If action is remove and no object.id is set, all objects
#attached to the link indicated by link_name will be removed
CollisionObject object
# The set of links that the attached objects are allowed to touch
# by default - the link_name is included by default
string[] touch_links
================================================================================
MSG: arm_navigation_msgs/CollisionMap
#header for interpreting box positions
Header header
#boxes for use in collision testing
OrientedBoundingBox[] boxes
================================================================================
MSG: arm_navigation_msgs/OrientedBoundingBox
#the center of the box
geometry_msgs/Point32 center
#the extents of the box, assuming the center is at the point
geometry_msgs/Point32 extents
#the axis of the box
geometry_msgs/Point32 axis
#the angle of rotation around the axis
float32 angle
================================================================================
MSG: geometry_msgs/Point32
# This contains the position of a point in free space(with 32 bits of precision).
# It is recommeded to use Point wherever possible instead of Point32.
#
# This recommendation is to promote interoperability.
#
# This message is designed to take up less space when sending
# lots of points at once, as in the case of a PointCloud.
float32 x
float32 y
float32 z
================================================================================
MSG: arm_navigation_msgs/OrderedCollisionOperations
# A set of collision operations that will be performed in the order they are specified
CollisionOperation[] collision_operations
================================================================================
MSG: arm_navigation_msgs/CollisionOperation
# A definition of a collision operation
# E.g. ("gripper",COLLISION_SET_ALL,ENABLE) will enable collisions
# between the gripper and all objects in the collision space
string object1
string object2
string COLLISION_SET_ALL="all"
string COLLISION_SET_OBJECTS="objects"
string COLLISION_SET_ATTACHED_OBJECTS="attached"
# The penetration distance to which collisions are allowed. This is 0.0 by default.
float64 penetration_distance
# Flag that determines whether collisions will be enabled or disabled for the pair of objects specified above
int32 operation
int32 DISABLE=0
int32 ENABLE=1
"""
__slots__ = ['planning_scene_diff','operations']
_slot_types = ['arm_navigation_msgs/PlanningScene','arm_navigation_msgs/OrderedCollisionOperations']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
planning_scene_diff,operations
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(GetPlanningSceneRequest, self).__init__(*args, **kwds)
#message fields cannot be None, assign default values for those that are
if self.planning_scene_diff is None:
self.planning_scene_diff = arm_navigation_msgs.msg.PlanningScene()
if self.operations is None:
self.operations = arm_navigation_msgs.msg.OrderedCollisionOperations()
else:
self.planning_scene_diff = arm_navigation_msgs.msg.PlanningScene()
self.operations = arm_navigation_msgs.msg.OrderedCollisionOperations()
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
_x = self
buff.write(_struct_3I.pack(_x.planning_scene_diff.robot_state.joint_state.header.seq, _x.planning_scene_diff.robot_state.joint_state.header.stamp.secs, _x.planning_scene_diff.robot_state.joint_state.header.stamp.nsecs))
_x = self.planning_scene_diff.robot_state.joint_state.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
length = len(self.planning_scene_diff.robot_state.joint_state.name)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.robot_state.joint_state.name:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.planning_scene_diff.robot_state.joint_state.position)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.pack(pattern, *self.planning_scene_diff.robot_state.joint_state.position))
length = len(self.planning_scene_diff.robot_state.joint_state.velocity)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.pack(pattern, *self.planning_scene_diff.robot_state.joint_state.velocity))
length = len(self.planning_scene_diff.robot_state.joint_state.effort)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.pack(pattern, *self.planning_scene_diff.robot_state.joint_state.effort))
_x = self
buff.write(_struct_2I.pack(_x.planning_scene_diff.robot_state.multi_dof_joint_state.stamp.secs, _x.planning_scene_diff.robot_state.multi_dof_joint_state.stamp.nsecs))
length = len(self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.planning_scene_diff.robot_state.multi_dof_joint_state.poses)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.poses:
_v1 = val1.position
_x = _v1
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v2 = val1.orientation
_x = _v2
buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
length = len(self.planning_scene_diff.fixed_frame_transforms)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.fixed_frame_transforms:
_v3 = val1.header
buff.write(_struct_I.pack(_v3.seq))
_v4 = _v3.stamp
_x = _v4
buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
_x = _v3.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = val1.child_frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_v5 = val1.transform
_v6 = _v5.translation
_x = _v6
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v7 = _v5.rotation
_x = _v7
buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
length = len(self.planning_scene_diff.allowed_collision_matrix.link_names)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.allowed_collision_matrix.link_names:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.planning_scene_diff.allowed_collision_matrix.entries)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.allowed_collision_matrix.entries:
length = len(val1.enabled)
buff.write(_struct_I.pack(length))
pattern = '<%sB'%length
buff.write(struct.pack(pattern, *val1.enabled))
length = len(self.planning_scene_diff.allowed_contacts)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.allowed_contacts:
_x = val1.name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_v8 = val1.shape
buff.write(_struct_b.pack(_v8.type))
length = len(_v8.dimensions)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.pack(pattern, *_v8.dimensions))
length = len(_v8.triangles)
buff.write(_struct_I.pack(length))
pattern = '<%si'%length
buff.write(struct.pack(pattern, *_v8.triangles))
length = len(_v8.vertices)
buff.write(_struct_I.pack(length))
for val3 in _v8.vertices:
_x = val3
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v9 = val1.pose_stamped
_v10 = _v9.header
buff.write(_struct_I.pack(_v10.seq))
_v11 = _v10.stamp
_x = _v11
buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
_x = _v10.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_v12 = _v9.pose
_v13 = _v12.position
_x = _v13
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v14 = _v12.orientation
_x = _v14
buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
length = len(val1.link_names)
buff.write(_struct_I.pack(length))
for val2 in val1.link_names:
length = len(val2)
if python3 or type(val2) == unicode:
val2 = val2.encode('utf-8')
length = len(val2)
buff.write(struct.pack('<I%ss'%length, length, val2))
buff.write(_struct_d.pack(val1.penetration_depth))
length = len(self.planning_scene_diff.link_padding)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.link_padding:
_x = val1.link_name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
buff.write(_struct_d.pack(val1.padding))
length = len(self.planning_scene_diff.collision_objects)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.collision_objects:
_v15 = val1.header
buff.write(_struct_I.pack(_v15.seq))
_v16 = _v15.stamp
_x = _v16
buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
_x = _v15.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = val1.id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
buff.write(_struct_f.pack(val1.padding))
_v17 = val1.operation
buff.write(_struct_b.pack(_v17.operation))
length = len(val1.shapes)
buff.write(_struct_I.pack(length))
for val2 in val1.shapes:
buff.write(_struct_b.pack(val2.type))
length = len(val2.dimensions)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.pack(pattern, *val2.dimensions))
length = len(val2.triangles)
buff.write(_struct_I.pack(length))
pattern = '<%si'%length
buff.write(struct.pack(pattern, *val2.triangles))
length = len(val2.vertices)
buff.write(_struct_I.pack(length))
for val3 in val2.vertices:
_x = val3
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
length = len(val1.poses)
buff.write(_struct_I.pack(length))
for val2 in val1.poses:
_v18 = val2.position
_x = _v18
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v19 = val2.orientation
_x = _v19
buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
length = len(self.planning_scene_diff.attached_collision_objects)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.attached_collision_objects:
_x = val1.link_name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_v20 = val1.object
_v21 = _v20.header
buff.write(_struct_I.pack(_v21.seq))
_v22 = _v21.stamp
_x = _v22
buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
_x = _v21.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = _v20.id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
buff.write(_struct_f.pack(_v20.padding))
_v23 = _v20.operation
buff.write(_struct_b.pack(_v23.operation))
length = len(_v20.shapes)
buff.write(_struct_I.pack(length))
for val3 in _v20.shapes:
buff.write(_struct_b.pack(val3.type))
length = len(val3.dimensions)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.pack(pattern, *val3.dimensions))
length = len(val3.triangles)
buff.write(_struct_I.pack(length))
pattern = '<%si'%length
buff.write(struct.pack(pattern, *val3.triangles))
length = len(val3.vertices)
buff.write(_struct_I.pack(length))
for val4 in val3.vertices:
_x = val4
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
length = len(_v20.poses)
buff.write(_struct_I.pack(length))
for val3 in _v20.poses:
_v24 = val3.position
_x = _v24
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v25 = val3.orientation
_x = _v25
buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
length = len(val1.touch_links)
buff.write(_struct_I.pack(length))
for val2 in val1.touch_links:
length = len(val2)
if python3 or type(val2) == unicode:
val2 = val2.encode('utf-8')
length = len(val2)
buff.write(struct.pack('<I%ss'%length, length, val2))
_x = self
buff.write(_struct_3I.pack(_x.planning_scene_diff.collision_map.header.seq, _x.planning_scene_diff.collision_map.header.stamp.secs, _x.planning_scene_diff.collision_map.header.stamp.nsecs))
_x = self.planning_scene_diff.collision_map.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
length = len(self.planning_scene_diff.collision_map.boxes)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.collision_map.boxes:
_v26 = val1.center
_x = _v26
buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))
_v27 = val1.extents
_x = _v27
buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))
_v28 = val1.axis
_x = _v28
buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))
buff.write(_struct_f.pack(val1.angle))
length = len(self.operations.collision_operations)
buff.write(_struct_I.pack(length))
for val1 in self.operations.collision_operations:
_x = val1.object1
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = val1.object2
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = val1
buff.write(_struct_di.pack(_x.penetration_distance, _x.operation))
except struct.error as se: self._check_types(se)
except TypeError as te: self._check_types(te)
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
try:
if self.planning_scene_diff is None:
self.planning_scene_diff = arm_navigation_msgs.msg.PlanningScene()
if self.operations is None:
self.operations = arm_navigation_msgs.msg.OrderedCollisionOperations()
end = 0
_x = self
start = end
end += 12
(_x.planning_scene_diff.robot_state.joint_state.header.seq, _x.planning_scene_diff.robot_state.joint_state.header.stamp.secs, _x.planning_scene_diff.robot_state.joint_state.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.planning_scene_diff.robot_state.joint_state.header.frame_id = str[start:end].decode('utf-8')
else:
self.planning_scene_diff.robot_state.joint_state.header.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.robot_state.joint_state.name = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.planning_scene_diff.robot_state.joint_state.name.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
self.planning_scene_diff.robot_state.joint_state.position = struct.unpack(pattern, str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
self.planning_scene_diff.robot_state.joint_state.velocity = struct.unpack(pattern, str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
self.planning_scene_diff.robot_state.joint_state.effort = struct.unpack(pattern, str[start:end])
_x = self
start = end
end += 8
(_x.planning_scene_diff.robot_state.multi_dof_joint_state.stamp.secs, _x.planning_scene_diff.robot_state.multi_dof_joint_state.stamp.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.robot_state.multi_dof_joint_state.poses = []
for i in range(0, length):
val1 = geometry_msgs.msg.Pose()
_v29 = val1.position
_x = _v29
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v30 = val1.orientation
_x = _v30
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
self.planning_scene_diff.robot_state.multi_dof_joint_state.poses.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.fixed_frame_transforms = []
for i in range(0, length):
val1 = geometry_msgs.msg.TransformStamped()
_v31 = val1.header
start = end
end += 4
(_v31.seq,) = _struct_I.unpack(str[start:end])
_v32 = _v31.stamp
_x = _v32
start = end
end += 8
(_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v31.frame_id = str[start:end].decode('utf-8')
else:
_v31.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.child_frame_id = str[start:end].decode('utf-8')
else:
val1.child_frame_id = str[start:end]
_v33 = val1.transform
_v34 = _v33.translation
_x = _v34
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v35 = _v33.rotation
_x = _v35
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
self.planning_scene_diff.fixed_frame_transforms.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.allowed_collision_matrix.link_names = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.planning_scene_diff.allowed_collision_matrix.link_names.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.allowed_collision_matrix.entries = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.AllowedCollisionEntry()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sB'%length
start = end
end += struct.calcsize(pattern)
val1.enabled = struct.unpack(pattern, str[start:end])
val1.enabled = map(bool, val1.enabled)
self.planning_scene_diff.allowed_collision_matrix.entries.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.allowed_contacts = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.AllowedContactSpecification()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.name = str[start:end].decode('utf-8')
else:
val1.name = str[start:end]
_v36 = val1.shape
start = end
end += 1
(_v36.type,) = _struct_b.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
_v36.dimensions = struct.unpack(pattern, str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%si'%length
start = end
end += struct.calcsize(pattern)
_v36.triangles = struct.unpack(pattern, str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v36.vertices = []
for i in range(0, length):
val3 = geometry_msgs.msg.Point()
_x = val3
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v36.vertices.append(val3)
_v37 = val1.pose_stamped
_v38 = _v37.header
start = end
end += 4
(_v38.seq,) = _struct_I.unpack(str[start:end])
_v39 = _v38.stamp
_x = _v39
start = end
end += 8
(_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v38.frame_id = str[start:end].decode('utf-8')
else:
_v38.frame_id = str[start:end]
_v40 = _v37.pose
_v41 = _v40.position
_x = _v41
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v42 = _v40.orientation
_x = _v42
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.link_names = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val2 = str[start:end].decode('utf-8')
else:
val2 = str[start:end]
val1.link_names.append(val2)
start = end
end += 8
(val1.penetration_depth,) = _struct_d.unpack(str[start:end])
self.planning_scene_diff.allowed_contacts.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.link_padding = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.LinkPadding()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.link_name = str[start:end].decode('utf-8')
else:
val1.link_name = str[start:end]
start = end
end += 8
(val1.padding,) = _struct_d.unpack(str[start:end])
self.planning_scene_diff.link_padding.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.collision_objects = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.CollisionObject()
_v43 = val1.header
start = end
end += 4
(_v43.seq,) = _struct_I.unpack(str[start:end])
_v44 = _v43.stamp
_x = _v44
start = end
end += 8
(_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v43.frame_id = str[start:end].decode('utf-8')
else:
_v43.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.id = str[start:end].decode('utf-8')
else:
val1.id = str[start:end]
start = end
end += 4
(val1.padding,) = _struct_f.unpack(str[start:end])
_v45 = val1.operation
start = end
end += 1
(_v45.operation,) = _struct_b.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.shapes = []
for i in range(0, length):
val2 = arm_navigation_msgs.msg.Shape()
start = end
end += 1
(val2.type,) = _struct_b.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
val2.dimensions = struct.unpack(pattern, str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%si'%length
start = end
end += struct.calcsize(pattern)
val2.triangles = struct.unpack(pattern, str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val2.vertices = []
for i in range(0, length):
val3 = geometry_msgs.msg.Point()
_x = val3
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
val2.vertices.append(val3)
val1.shapes.append(val2)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.poses = []
for i in range(0, length):
val2 = geometry_msgs.msg.Pose()
_v46 = val2.position
_x = _v46
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v47 = val2.orientation
_x = _v47
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
val1.poses.append(val2)
self.planning_scene_diff.collision_objects.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.attached_collision_objects = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.AttachedCollisionObject()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.link_name = str[start:end].decode('utf-8')
else:
val1.link_name = str[start:end]
_v48 = val1.object
_v49 = _v48.header
start = end
end += 4
(_v49.seq,) = _struct_I.unpack(str[start:end])
_v50 = _v49.stamp
_x = _v50
start = end
end += 8
(_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v49.frame_id = str[start:end].decode('utf-8')
else:
_v49.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v48.id = str[start:end].decode('utf-8')
else:
_v48.id = str[start:end]
start = end
end += 4
(_v48.padding,) = _struct_f.unpack(str[start:end])
_v51 = _v48.operation
start = end
end += 1
(_v51.operation,) = _struct_b.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v48.shapes = []
for i in range(0, length):
val3 = arm_navigation_msgs.msg.Shape()
start = end
end += 1
(val3.type,) = _struct_b.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
val3.dimensions = struct.unpack(pattern, str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%si'%length
start = end
end += struct.calcsize(pattern)
val3.triangles = struct.unpack(pattern, str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val3.vertices = []
for i in range(0, length):
val4 = geometry_msgs.msg.Point()
_x = val4
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
val3.vertices.append(val4)
_v48.shapes.append(val3)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v48.poses = []
for i in range(0, length):
val3 = geometry_msgs.msg.Pose()
_v52 = val3.position
_x = _v52
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v53 = val3.orientation
_x = _v53
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
_v48.poses.append(val3)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.touch_links = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val2 = str[start:end].decode('utf-8')
else:
val2 = str[start:end]
val1.touch_links.append(val2)
self.planning_scene_diff.attached_collision_objects.append(val1)
_x = self
start = end
end += 12
(_x.planning_scene_diff.collision_map.header.seq, _x.planning_scene_diff.collision_map.header.stamp.secs, _x.planning_scene_diff.collision_map.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.planning_scene_diff.collision_map.header.frame_id = str[start:end].decode('utf-8')
else:
self.planning_scene_diff.collision_map.header.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.collision_map.boxes = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.OrientedBoundingBox()
_v54 = val1.center
_x = _v54
start = end
end += 12
(_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])
_v55 = val1.extents
_x = _v55
start = end
end += 12
(_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])
_v56 = val1.axis
_x = _v56
start = end
end += 12
(_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])
start = end
end += 4
(val1.angle,) = _struct_f.unpack(str[start:end])
self.planning_scene_diff.collision_map.boxes.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.operations.collision_operations = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.CollisionOperation()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.object1 = str[start:end].decode('utf-8')
else:
val1.object1 = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.object2 = str[start:end].decode('utf-8')
else:
val1.object2 = str[start:end]
_x = val1
start = end
end += 12
(_x.penetration_distance, _x.operation,) = _struct_di.unpack(str[start:end])
self.operations.collision_operations.append(val1)
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
_x = self
buff.write(_struct_3I.pack(_x.planning_scene_diff.robot_state.joint_state.header.seq, _x.planning_scene_diff.robot_state.joint_state.header.stamp.secs, _x.planning_scene_diff.robot_state.joint_state.header.stamp.nsecs))
_x = self.planning_scene_diff.robot_state.joint_state.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
length = len(self.planning_scene_diff.robot_state.joint_state.name)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.robot_state.joint_state.name:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.planning_scene_diff.robot_state.joint_state.position)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(self.planning_scene_diff.robot_state.joint_state.position.tostring())
length = len(self.planning_scene_diff.robot_state.joint_state.velocity)
buff.write(_
| 0 |
b8e18877af990c533c642d4937354198a4676419
|
Python
|
struct_I.pack(length))
pattern = '<%sd'%length
buff.write(self.planning_scene_diff.robot_state.joint_state.velocity.tostring())
length = len(self.planning_scene_diff.robot_state.joint_state.effort)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(self.planning_scene_diff.robot_state.joint_state.effort.tostring())
_x = self
buff.write(_struct_2I.pack(_x.planning_scene_diff.robot_state.multi_dof_joint_state.stamp.secs, _x.planning_scene_diff.robot_state.multi_dof_joint_state.stamp.nsecs))
length = len(self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.planning_scene_diff.robot_state.multi_dof_joint_state.poses)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.robot_state.multi_dof_joint_state.poses:
_v57 = val1.position
_x = _v57
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v58 = val1.orientation
_x = _v58
buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
length = len(self.planning_scene_diff.fixed_frame_transforms)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.fixed_frame_transforms:
_v59 = val1.header
buff.write(_struct_I.pack(_v59.seq))
_v60 = _v59.stamp
_x = _v60
buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
_x = _v59.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = val1.child_frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_v61 = val1.transform
_v62 = _v61.translation
_x = _v62
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v63 = _v61.rotation
_x = _v63
buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
length = len(self.planning_scene_diff.allowed_collision_matrix.link_names)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.allowed_collision_matrix.link_names:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.planning_scene_diff.allowed_collision_matrix.entries)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.allowed_collision_matrix.entries:
length = len(val1.enabled)
buff.write(_struct_I.pack(length))
pattern = '<%sB'%length
buff.write(val1.enabled.tostring())
length = len(self.planning_scene_diff.allowed_contacts)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.allowed_contacts:
_x = val1.name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_v64 = val1.shape
buff.write(_struct_b.pack(_v64.type))
length = len(_v64.dimensions)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(_v64.dimensions.tostring())
length = len(_v64.triangles)
buff.write(_struct_I.pack(length))
pattern = '<%si'%length
buff.write(_v64.triangles.tostring())
length = len(_v64.vertices)
buff.write(_struct_I.pack(length))
for val3 in _v64.vertices:
_x = val3
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v65 = val1.pose_stamped
_v66 = _v65.header
buff.write(_struct_I.pack(_v66.seq))
_v67 = _v66.stamp
_x = _v67
buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
_x = _v66.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_v68 = _v65.pose
_v69 = _v68.position
_x = _v69
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v70 = _v68.orientation
_x = _v70
buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
length = len(val1.link_names)
buff.write(_struct_I.pack(length))
for val2 in val1.link_names:
length = len(val2)
if python3 or type(val2) == unicode:
val2 = val2.encode('utf-8')
length = len(val2)
buff.write(struct.pack('<I%ss'%length, length, val2))
buff.write(_struct_d.pack(val1.penetration_depth))
length = len(self.planning_scene_diff.link_padding)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.link_padding:
_x = val1.link_name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
buff.write(_struct_d.pack(val1.padding))
length = len(self.planning_scene_diff.collision_objects)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.collision_objects:
_v71 = val1.header
buff.write(_struct_I.pack(_v71.seq))
_v72 = _v71.stamp
_x = _v72
buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
_x = _v71.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = val1.id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
buff.write(_struct_f.pack(val1.padding))
_v73 = val1.operation
buff.write(_struct_b.pack(_v73.operation))
length = len(val1.shapes)
buff.write(_struct_I.pack(length))
for val2 in val1.shapes:
buff.write(_struct_b.pack(val2.type))
length = len(val2.dimensions)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(val2.dimensions.tostring())
length = len(val2.triangles)
buff.write(_struct_I.pack(length))
pattern = '<%si'%length
buff.write(val2.triangles.tostring())
length = len(val2.vertices)
buff.write(_struct_I.pack(length))
for val3 in val2.vertices:
_x = val3
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
length = len(val1.poses)
buff.write(_struct_I.pack(length))
for val2 in val1.poses:
_v74 = val2.position
_x = _v74
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v75 = val2.orientation
_x = _v75
buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
length = len(self.planning_scene_diff.attached_collision_objects)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.attached_collision_objects:
_x = val1.link_name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_v76 = val1.object
_v77 = _v76.header
buff.write(_struct_I.pack(_v77.seq))
_v78 = _v77.stamp
_x = _v78
buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
_x = _v77.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = _v76.id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
buff.write(_struct_f.pack(_v76.padding))
_v79 = _v76.operation
buff.write(_struct_b.pack(_v79.operation))
length = len(_v76.shapes)
buff.write(_struct_I.pack(length))
for val3 in _v76.shapes:
buff.write(_struct_b.pack(val3.type))
length = len(val3.dimensions)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(val3.dimensions.tostring())
length = len(val3.triangles)
buff.write(_struct_I.pack(length))
pattern = '<%si'%length
buff.write(val3.triangles.tostring())
length = len(val3.vertices)
buff.write(_struct_I.pack(length))
for val4 in val3.vertices:
_x = val4
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
length = len(_v76.poses)
buff.write(_struct_I.pack(length))
for val3 in _v76.poses:
_v80 = val3.position
_x = _v80
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v81 = val3.orientation
_x = _v81
buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
length = len(val1.touch_links)
buff.write(_struct_I.pack(length))
for val2 in val1.touch_links:
length = len(val2)
if python3 or type(val2) == unicode:
val2 = val2.encode('utf-8')
length = len(val2)
buff.write(struct.pack('<I%ss'%length, length, val2))
_x = self
buff.write(_struct_3I.pack(_x.planning_scene_diff.collision_map.header.seq, _x.planning_scene_diff.collision_map.header.stamp.secs, _x.planning_scene_diff.collision_map.header.stamp.nsecs))
_x = self.planning_scene_diff.collision_map.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
length = len(self.planning_scene_diff.collision_map.boxes)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene_diff.collision_map.boxes:
_v82 = val1.center
_x = _v82
buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))
_v83 = val1.extents
_x = _v83
buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))
_v84 = val1.axis
_x = _v84
buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))
buff.write(_struct_f.pack(val1.angle))
length = len(self.operations.collision_operations)
buff.write(_struct_I.pack(length))
for val1 in self.operations.collision_operations:
_x = val1.object1
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = val1.object2
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = val1
buff.write(_struct_di.pack(_x.penetration_distance, _x.operation))
except struct.error as se: self._check_types(se)
except TypeError as te: self._check_types(te)
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
try:
if self.planning_scene_diff is None:
self.planning_scene_diff = arm_navigation_msgs.msg.PlanningScene()
if self.operations is None:
self.operations = arm_navigation_msgs.msg.OrderedCollisionOperations()
end = 0
_x = self
start = end
end += 12
(_x.planning_scene_diff.robot_state.joint_state.header.seq, _x.planning_scene_diff.robot_state.joint_state.header.stamp.secs, _x.planning_scene_diff.robot_state.joint_state.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.planning_scene_diff.robot_state.joint_state.header.frame_id = str[start:end].decode('utf-8')
else:
self.planning_scene_diff.robot_state.joint_state.header.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.robot_state.joint_state.name = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.planning_scene_diff.robot_state.joint_state.name.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
self.planning_scene_diff.robot_state.joint_state.position = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
self.planning_scene_diff.robot_state.joint_state.velocity = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
self.planning_scene_diff.robot_state.joint_state.effort = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
_x = self
start = end
end += 8
(_x.planning_scene_diff.robot_state.multi_dof_joint_state.stamp.secs, _x.planning_scene_diff.robot_state.multi_dof_joint_state.stamp.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.planning_scene_diff.robot_state.multi_dof_joint_state.joint_names.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.planning_scene_diff.robot_state.multi_dof_joint_state.frame_ids.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.planning_scene_diff.robot_state.multi_dof_joint_state.child_frame_ids.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.robot_state.multi_dof_joint_state.poses = []
for i in range(0, length):
val1 = geometry_msgs.msg.Pose()
_v85 = val1.position
_x = _v85
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v86 = val1.orientation
_x = _v86
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
self.planning_scene_diff.robot_state.multi_dof_joint_state.poses.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.fixed_frame_transforms = []
for i in range(0, length):
val1 = geometry_msgs.msg.TransformStamped()
_v87 = val1.header
start = end
end += 4
(_v87.seq,) = _struct_I.unpack(str[start:end])
_v88 = _v87.stamp
_x = _v88
start = end
end += 8
(_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v87.frame_id = str[start:end].decode('utf-8')
else:
_v87.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.child_frame_id = str[start:end].decode('utf-8')
else:
val1.child_frame_id = str[start:end]
_v89 = val1.transform
_v90 = _v89.translation
_x = _v90
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v91 = _v89.rotation
_x = _v91
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
self.planning_scene_diff.fixed_frame_transforms.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.allowed_collision_matrix.link_names = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.planning_scene_diff.allowed_collision_matrix.link_names.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.allowed_collision_matrix.entries = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.AllowedCollisionEntry()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sB'%length
start = end
end += struct.calcsize(pattern)
val1.enabled = numpy.frombuffer(str[start:end], dtype=numpy.bool, count=length)
val1.enabled = map(bool, val1.enabled)
self.planning_scene_diff.allowed_collision_matrix.entries.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.allowed_contacts = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.AllowedContactSpecification()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.name = str[start:end].decode('utf-8')
else:
val1.name = str[start:end]
_v92 = val1.shape
start = end
end += 1
(_v92.type,) = _struct_b.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
_v92.dimensions = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%si'%length
start = end
end += struct.calcsize(pattern)
_v92.triangles = numpy.frombuffer(str[start:end], dtype=numpy.int32, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v92.vertices = []
for i in range(0, length):
val3 = geometry_msgs.msg.Point()
_x = val3
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v92.vertices.append(val3)
_v93 = val1.pose_stamped
_v94 = _v93.header
start = end
end += 4
(_v94.seq,) = _struct_I.unpack(str[start:end])
_v95 = _v94.stamp
_x = _v95
start = end
end += 8
(_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v94.frame_id = str[start:end].decode('utf-8')
else:
_v94.frame_id = str[start:end]
_v96 = _v93.pose
_v97 = _v96.position
_x = _v97
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v98 = _v96.orientation
_x = _v98
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.link_names = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val2 = str[start:end].decode('utf-8')
else:
val2 = str[start:end]
val1.link_names.append(val2)
start = end
end += 8
(val1.penetration_depth,) = _struct_d.unpack(str[start:end])
self.planning_scene_diff.allowed_contacts.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.link_padding = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.LinkPadding()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.link_name = str[start:end].decode('utf-8')
else:
val1.link_name = str[start:end]
start = end
end += 8
(val1.padding,) = _struct_d.unpack(str[start:end])
self.planning_scene_diff.link_padding.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.collision_objects = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.CollisionObject()
_v99 = val1.header
start = end
end += 4
(_v99.seq,) = _struct_I.unpack(str[start:end])
_v100 = _v99.stamp
_x = _v100
start = end
end += 8
(_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v99.frame_id = str[start:end].decode('utf-8')
else:
_v99.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.id = str[start:end].decode('utf-8')
else:
val1.id = str[start:end]
start = end
end += 4
(val1.padding,) = _struct_f.unpack(str[start:end])
_v101 = val1.operation
start = end
end += 1
(_v101.operation,) = _struct_b.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.shapes = []
for i in range(0, length):
val2 = arm_navigation_msgs.msg.Shape()
start = end
end += 1
(val2.type,) = _struct_b.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
val2.dimensions = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%si'%length
start = end
end += struct.calcsize(pattern)
val2.triangles = numpy.frombuffer(str[start:end], dtype=numpy.int32, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val2.vertices = []
for i in range(0, length):
val3 = geometry_msgs.msg.Point()
_x = val3
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
val2.vertices.append(val3)
val1.shapes.append(val2)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.poses = []
for i in range(0, length):
val2 = geometry_msgs.msg.Pose()
_v102 = val2.position
_x = _v102
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v103 = val2.orientation
_x = _v103
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
val1.poses.append(val2)
self.planning_scene_diff.collision_objects.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.attached_collision_objects = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.AttachedCollisionObject()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.link_name = str[start:end].decode('utf-8')
else:
val1.link_name = str[start:end]
_v104 = val1.object
_v105 = _v104.header
start = end
end += 4
(_v105.seq,) = _struct_I.unpack(str[start:end])
_v106 = _v105.stamp
_x = _v106
start = end
end += 8
(_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v105.frame_id = str[start:end].decode('utf-8')
else:
_v105.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v104.id = str[start:end].decode('utf-8')
else:
_v104.id = str[start:end]
start = end
end += 4
(_v104.padding,) = _struct_f.unpack(str[start:end])
_v107 = _v104.operation
start = end
end += 1
(_v107.operation,) = _struct_b.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v104.shapes = []
for i in range(0, length):
val3 = arm_navigation_msgs.msg.Shape()
start = end
end += 1
(val3.type,) = _struct_b.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
val3.dimensions = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%si'%length
start = end
end += struct.calcsize(pattern)
val3.triangles = numpy.frombuffer(str[start:end], dtype=numpy.int32, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val3.vertices = []
for i in range(0, length):
val4 = geometry_msgs.msg.Point()
_x = val4
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
val3.vertices.append(val4)
_v104.shapes.append(val3)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v104.poses = []
for i in range(0, length):
val3 = geometry_msgs.msg.Pose()
_v108 = val3.position
_x = _v108
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v109 = val3.orientation
_x = _v109
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
_v104.poses.append(val3)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.touch_links = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val2 = str[start:end].decode('utf-8')
else:
val2 = str[start:end]
val1.touch_links.append(val2)
self.planning_scene_diff.attached_collision_objects.append(val1)
_x = self
start = end
end += 12
(_x.planning_scene_diff.collision_map.header.seq, _x.planning_scene_diff.collision_map.header.stamp.secs, _x.planning_scene_diff.collision_map.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.planning_scene_diff.collision_map.header.frame_id = str[start:end].decode('utf-8')
else:
self.planning_scene_diff.collision_map.header.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene_diff.collision_map.boxes = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.OrientedBoundingBox()
_v110 = val1.center
_x = _v110
start = end
end += 12
(_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])
_v111 = val1.extents
_x = _v111
start = end
end += 12
(_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])
_v112 = val1.axis
_x = _v112
start = end
end += 12
(_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])
start = end
end += 4
(val1.angle,) = _struct_f.unpack(str[start:end])
self.planning_scene_diff.collision_map.boxes.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.operations.collision_operations = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.CollisionOperation()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.object1 = str[start:end].decode('utf-8')
else:
val1.object1 = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.object2 = str[start:end].decode('utf-8')
else:
val1.object2 = str[start:end]
_x = val1
start = end
end += 12
(_x.penetration_distance, _x.operation,) = _struct_di.unpack(str[start:end])
self.operations.collision_operations.append(val1)
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
_struct_I = genpy.struct_I
_struct_b = struct.Struct("<b")
_struct_d = struct.Struct("<d")
_struct_f = struct.Struct("<f")
_struct_di = struct.Struct("<di")
_struct_3f = struct.Struct("<3f")
_struct_3I = struct.Struct("<3I")
_struct_4d = struct.Struct("<4d")
_struct_2I = struct.Struct("<2I")
_struct_3d = struct.Struct("<3d")
"""autogenerated by genpy from arm_navigation_msgs/GetPlanningSceneResponse.msg. Do not edit."""
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
import arm_navigation_msgs.msg
import geometry_msgs.msg
import std_msgs.msg
import genpy
import sensor_msgs.msg
class GetPlanningSceneResponse(genpy.Message):
_md5sum = "285525c9abe002fbafa99af84a14b4cb"
_type = "arm_navigation_msgs/GetPlanningSceneResponse"
_has_header = False #flag to mark the presence of a Header object
_full_text = """
PlanningScene planning_scene
================================================================================
MSG: arm_navigation_msgs/PlanningScene
#full robot state
arm_navigation_msgs/RobotState robot_state
#additional frames for duplicating tf
geometry_msgs/TransformStamped[] fixed_frame_transforms
#full allowed collision matrix
AllowedCollisionMatrix allowed_collision_matrix
#allowed contacts
arm_navigation_msgs/AllowedContactSpecification[] allowed_contacts
#all link paddings
arm_navigation_msgs/LinkPadding[] link_padding
#collision objects
arm_navigation_msgs/CollisionObject[] collision_objects
arm_navigation_msgs/AttachedCollisionObject[] attached_collision_objects
#the collision map
arm_navigation_msgs/CollisionMap collision_map
================================================================================
MSG: arm_navigation_msgs/RobotState
# This message contains information about the robot state, i.e. the positions of its joints and links
sensor_msgs/JointState joint_state
arm_navigation_msgs/MultiDOFJointState multi_dof_joint_state
================================================================================
MSG: sensor_msgs/JointState
# This is a message that holds data to describe the state of a set of torque controlled joints.
#
# The state of each joint (revolute or prismatic) is defined by:
# * the position of the joint (rad or m),
# * the velocity of the joint (rad/s or m/s) and
# * the effort that is applied in the joint (Nm or N).
#
# Each joint is uniquely identified by its name
# The header specifies the time at which the joint states were recorded. All the joint states
# in one message have to be recorded at the same time.
#
# This message consists of a multiple arrays, one for each part of the joint state.
# The goal is to make each of the fields optional. When e.g. your joints have no
# effort associated with them, you can leave the effort array empty.
#
# All arrays in this message should have the same size, or be empty.
# This is the only way to uniquely associate the joint name with the correct
# states.
Header header
string[] name
float64[] position
float64[] velocity
float64[] effort
================================================================================
MSG: std_msgs/Header
# Standard metadata for higher-level stamped data types.
# This is generally used to communicate timestamped data
# in a particular coordinate frame.
#
# sequence ID: consecutively increasing ID
uint32 seq
#Two-integer timestamp that is expressed as:
# * stamp.secs: seconds (stamp_secs) since epoch
# * stamp.nsecs: nanoseconds since stamp_secs
# time-handling sugar is provided by the client library
time stamp
#Frame this data is associated with
# 0: no frame
# 1: global frame
string frame_id
================================================================================
MSG: arm_navigation_msgs/MultiDOFJointState
#A representation of a multi-dof joint state
time stamp
string[] joint_names
string[] frame_ids
string[] child_frame_ids
geometry_msgs/Pose[] poses
================================================================================
MSG: geometry_msgs/Pose
# A representation of pose in free space, composed of postion and orientation.
Point position
Quaternion orientation
================================================================================
MSG: geometry_msgs/Point
# This contains the position of a point in free space
float64 x
float64 y
float64 z
================================================================================
MSG: geometry_msgs/Quaternion
# This represents an orientation in free space in quaternion form.
float64 x
float64 y
float64 z
float64 w
================================================================================
MSG: geometry_msgs/TransformStamped
# This expresses a transform from coordinate frame header.frame_id
# to the coordinate frame child_frame_id
#
# This message is mostly used by the
# <a href="http://www.ros.org/wiki/tf">tf</a> package.
# See it's documentation for more information.
Header header
string child_frame_id # the frame id of the child frame
Transform transform
================================================================================
MSG: geometry_msgs/Transform
# This represents the transform between two coordinate frames in free space.
Vector3 translation
Quaternion rotation
================================================================================
MSG: geometry_msgs/Vector3
# This represents a vector in free space.
float64 x
float64 y
float64 z
================================================================================
MSG: arm_navigation_msgs/AllowedCollisionMatrix
# the list of link names in the matrix
string[] link_names
# the individual entries in the allowed collision matrix
# symmetric, with same order as link_names
AllowedCollisionEntry[] entries
================================================================================
MSG: arm_navigation_msgs/AllowedCollisionEntry
# whether or not collision checking is enabled
bool[] enabled
================================================================================
MSG: arm_navigation_msgs/AllowedContactSpecification
# The names of the regions
string name
# The shape of the region in the environment
arm_navigation_msgs/Shape shape
# The pose of the space defining the region
geometry_msgs/PoseStamped pose_stamped
# The set of links that will be allowed to have penetration contact within this region
string[] link_names
# The maximum penetration depth allowed for every link
float64 penetration_depth
================================================================================
MSG: arm_navigation_msgs/Shape
byte SPHERE=0
byte BOX=1
byte CYLINDER=2
byte MESH=3
byte type
#### define sphere, box, cylinder ####
# the origin of each shape is considered at the shape's center
# for sphere
# radius := dimensions[0]
# for cylinder
# radius := dimensions[0]
# length := dimensions[1]
# the length is along the Z axis
# for box
# size_x := dimensions[0]
# size_y := dimensions[1]
# size_z := dimensions[2]
float64[] dimensions
#### define mesh ####
# list of triangles; triangle k is defined by tre vertices located
# at indices triangles[3k], triangles[3k+1], triangles[3k+2]
int32[] triangles
geometry_msgs/Point[] vertices
================================================================================
MSG: geometry_msgs/PoseStamped
# A Pose with reference coordinate frame and timestamp
Header header
Pose pose
================================================================================
MSG: arm_navigation_msgs/LinkPadding
#name for the link
string link_name
# padding to apply to the link
float64 padding
================================================================================
MSG: arm_navigation_msgs/CollisionObject
# a header, used for interpreting the poses
Header header
# the id of the object
string id
# The padding used for filtering points near the object.
# This does not affect collision checking for the object.
# Set to negative to get zero padding.
float32 padding
#This contains what is to be done with the object
CollisionObjectOperation operation
#the shapes associated with the object
arm_navigation_msgs/Shape[] shapes
#the poses associated with the shapes - will be transformed using the header
geometry_msgs/Pose[] poses
================================================================================
MSG: arm_navigation_msgs/CollisionObjectOperation
#Puts the object into the environment
#or updates the object if already added
byte ADD=0
#Removes the object from the environment entirely
byte REMOVE=1
#Only valid within the context of a CollisionAttachedObject message
#Will be ignored if sent with an CollisionObject message
#Takes an attached object, detaches from the attached link
#But adds back in as regular object
byte DETACH_AND_ADD_AS_OBJECT=2
#Only valid within the context of a CollisionAttachedObject message
#Will be ignored if sent with an CollisionObject message
#Takes current object in the environment and removes it as
#a regular object
byte ATTACH_AND_REMOVE_AS_OBJECT=3
# Byte code for operation
byte operation
================================================================================
MSG: arm_navigation_msgs/AttachedCollisionObject
# The CollisionObject will be attached with a fixed joint to this link
# If link name is set to REMOVE_ALL_ATTACHED_OBJECTS and object.operation
# is set to REMOVE will remove all attached bodies attached to any object
string link_name
#Reserved for indicating that all attached objects should be removed
string REMOVE_ALL_ATTACHED_OBJECTS = "all"
#This contains the actual shapes and poses for the CollisionObject
#to be attached to the link
#If action is remove and no object.id is set, all objects
#attached to the link indicated by link_name will be removed
CollisionObject object
# The set of links that the attached objects are allowed to touch
# by default - the link_name is included by default
string[] touch_links
================================================================================
MSG: arm_navigation_msgs/CollisionMap
#header for interpreting box positions
Header header
#boxes for use in collision testing
OrientedBoundingBox[] boxes
================================================================================
MSG: arm_navigation_msgs/OrientedBoundingBox
#the center of the box
geometry_msgs/Point32 center
#the extents of the box, assuming the center is at the point
geometry_msgs/Point32 extents
#the axis of the box
geometry_msgs/Point32 axis
#the angle of rotation around the axis
float32 angle
================================================================================
MSG: geometry_msgs/Point32
# This contains the position of a point in free space(with 32 bits of precision).
# It is recommeded to use Point wherever possible instead of Point32.
#
# This recommendation is to promote interoperability.
#
# This message is designed to take up less space when sending
# lots of points at once, as in the case of a PointCloud.
float32 x
float32 y
float32 z
"""
__slots__ = ['planning_scene']
_slot_types = ['arm_navigation_msgs/PlanningScene']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
planning_scene
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(GetPlanningSceneResponse, self).__init__(*args, **kwds)
#message fields cannot be None, assign default values for those that are
if self.planning_scene is None:
self.planning_scene = arm_navigation_msgs.msg.PlanningScene()
else:
self.planning_scene = arm_navigation_msgs.msg.PlanningScene()
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
_x = self
buff.write(_struct_3I.pack(_x.planning_scene.robot_state.joint_state.header.seq, _x.planning_scene.robot_state.joint_state.header.stamp.secs, _x.planning_scene.robot_state.joint_state.header.stamp.nsecs))
_x = self.planning_scene.robot_state.joint_state.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
length = len(self.planning_scene.robot_state.joint_state.name)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.robot_state.joint_state.name:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.planning_scene.robot_state.joint_state.position)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.pack(pattern, *self.planning_scene.robot_state.joint_state.position))
length = len(self.planning_scene.robot_state.joint_state.velocity)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.pack(pattern, *self.planning_scene.robot_state.joint_state.velocity))
length = len(self.planning_scene.robot_state.joint_state.effort)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.pack(pattern, *self.planning_scene.robot_state.joint_state.effort))
_x = self
buff.write(_struct_2I.pack(_x.planning_scene.robot_state.multi_dof_joint_state.stamp.secs, _x.planning_scene.robot_state.multi_dof_joint_state.stamp.nsecs))
length = len(self.planning_scene.robot_state.multi_dof_joint_state.joint_names)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.robot_state.multi_dof_joint_state.joint_names:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.planning_scene.robot_state.multi_dof_joint_state.frame_ids)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.robot_state.multi_dof_joint_state.frame_ids:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids:
length = len(val1)
if python3 or
| 1 |
b8e18877af990c533c642d4937354198a4676419
|
Python
|
type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.planning_scene.robot_state.multi_dof_joint_state.poses)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.robot_state.multi_dof_joint_state.poses:
_v113 = val1.position
_x = _v113
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v114 = val1.orientation
_x = _v114
buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
length = len(self.planning_scene.fixed_frame_transforms)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.fixed_frame_transforms:
_v115 = val1.header
buff.write(_struct_I.pack(_v115.seq))
_v116 = _v115.stamp
_x = _v116
buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
_x = _v115.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = val1.child_frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_v117 = val1.transform
_v118 = _v117.translation
_x = _v118
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v119 = _v117.rotation
_x = _v119
buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
length = len(self.planning_scene.allowed_collision_matrix.link_names)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.allowed_collision_matrix.link_names:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.planning_scene.allowed_collision_matrix.entries)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.allowed_collision_matrix.entries:
length = len(val1.enabled)
buff.write(_struct_I.pack(length))
pattern = '<%sB'%length
buff.write(struct.pack(pattern, *val1.enabled))
length = len(self.planning_scene.allowed_contacts)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.allowed_contacts:
_x = val1.name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_v120 = val1.shape
buff.write(_struct_b.pack(_v120.type))
length = len(_v120.dimensions)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.pack(pattern, *_v120.dimensions))
length = len(_v120.triangles)
buff.write(_struct_I.pack(length))
pattern = '<%si'%length
buff.write(struct.pack(pattern, *_v120.triangles))
length = len(_v120.vertices)
buff.write(_struct_I.pack(length))
for val3 in _v120.vertices:
_x = val3
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v121 = val1.pose_stamped
_v122 = _v121.header
buff.write(_struct_I.pack(_v122.seq))
_v123 = _v122.stamp
_x = _v123
buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
_x = _v122.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_v124 = _v121.pose
_v125 = _v124.position
_x = _v125
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v126 = _v124.orientation
_x = _v126
buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
length = len(val1.link_names)
buff.write(_struct_I.pack(length))
for val2 in val1.link_names:
length = len(val2)
if python3 or type(val2) == unicode:
val2 = val2.encode('utf-8')
length = len(val2)
buff.write(struct.pack('<I%ss'%length, length, val2))
buff.write(_struct_d.pack(val1.penetration_depth))
length = len(self.planning_scene.link_padding)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.link_padding:
_x = val1.link_name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
buff.write(_struct_d.pack(val1.padding))
length = len(self.planning_scene.collision_objects)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.collision_objects:
_v127 = val1.header
buff.write(_struct_I.pack(_v127.seq))
_v128 = _v127.stamp
_x = _v128
buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
_x = _v127.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = val1.id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
buff.write(_struct_f.pack(val1.padding))
_v129 = val1.operation
buff.write(_struct_b.pack(_v129.operation))
length = len(val1.shapes)
buff.write(_struct_I.pack(length))
for val2 in val1.shapes:
buff.write(_struct_b.pack(val2.type))
length = len(val2.dimensions)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.pack(pattern, *val2.dimensions))
length = len(val2.triangles)
buff.write(_struct_I.pack(length))
pattern = '<%si'%length
buff.write(struct.pack(pattern, *val2.triangles))
length = len(val2.vertices)
buff.write(_struct_I.pack(length))
for val3 in val2.vertices:
_x = val3
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
length = len(val1.poses)
buff.write(_struct_I.pack(length))
for val2 in val1.poses:
_v130 = val2.position
_x = _v130
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v131 = val2.orientation
_x = _v131
buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
length = len(self.planning_scene.attached_collision_objects)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.attached_collision_objects:
_x = val1.link_name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_v132 = val1.object
_v133 = _v132.header
buff.write(_struct_I.pack(_v133.seq))
_v134 = _v133.stamp
_x = _v134
buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
_x = _v133.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = _v132.id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
buff.write(_struct_f.pack(_v132.padding))
_v135 = _v132.operation
buff.write(_struct_b.pack(_v135.operation))
length = len(_v132.shapes)
buff.write(_struct_I.pack(length))
for val3 in _v132.shapes:
buff.write(_struct_b.pack(val3.type))
length = len(val3.dimensions)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.pack(pattern, *val3.dimensions))
length = len(val3.triangles)
buff.write(_struct_I.pack(length))
pattern = '<%si'%length
buff.write(struct.pack(pattern, *val3.triangles))
length = len(val3.vertices)
buff.write(_struct_I.pack(length))
for val4 in val3.vertices:
_x = val4
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
length = len(_v132.poses)
buff.write(_struct_I.pack(length))
for val3 in _v132.poses:
_v136 = val3.position
_x = _v136
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v137 = val3.orientation
_x = _v137
buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
length = len(val1.touch_links)
buff.write(_struct_I.pack(length))
for val2 in val1.touch_links:
length = len(val2)
if python3 or type(val2) == unicode:
val2 = val2.encode('utf-8')
length = len(val2)
buff.write(struct.pack('<I%ss'%length, length, val2))
_x = self
buff.write(_struct_3I.pack(_x.planning_scene.collision_map.header.seq, _x.planning_scene.collision_map.header.stamp.secs, _x.planning_scene.collision_map.header.stamp.nsecs))
_x = self.planning_scene.collision_map.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
length = len(self.planning_scene.collision_map.boxes)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.collision_map.boxes:
_v138 = val1.center
_x = _v138
buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))
_v139 = val1.extents
_x = _v139
buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))
_v140 = val1.axis
_x = _v140
buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))
buff.write(_struct_f.pack(val1.angle))
except struct.error as se: self._check_types(se)
except TypeError as te: self._check_types(te)
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
try:
if self.planning_scene is None:
self.planning_scene = arm_navigation_msgs.msg.PlanningScene()
end = 0
_x = self
start = end
end += 12
(_x.planning_scene.robot_state.joint_state.header.seq, _x.planning_scene.robot_state.joint_state.header.stamp.secs, _x.planning_scene.robot_state.joint_state.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.planning_scene.robot_state.joint_state.header.frame_id = str[start:end].decode('utf-8')
else:
self.planning_scene.robot_state.joint_state.header.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.robot_state.joint_state.name = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.planning_scene.robot_state.joint_state.name.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
self.planning_scene.robot_state.joint_state.position = struct.unpack(pattern, str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
self.planning_scene.robot_state.joint_state.velocity = struct.unpack(pattern, str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
self.planning_scene.robot_state.joint_state.effort = struct.unpack(pattern, str[start:end])
_x = self
start = end
end += 8
(_x.planning_scene.robot_state.multi_dof_joint_state.stamp.secs, _x.planning_scene.robot_state.multi_dof_joint_state.stamp.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.robot_state.multi_dof_joint_state.joint_names = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.planning_scene.robot_state.multi_dof_joint_state.joint_names.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.robot_state.multi_dof_joint_state.frame_ids = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.planning_scene.robot_state.multi_dof_joint_state.frame_ids.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.robot_state.multi_dof_joint_state.poses = []
for i in range(0, length):
val1 = geometry_msgs.msg.Pose()
_v141 = val1.position
_x = _v141
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v142 = val1.orientation
_x = _v142
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
self.planning_scene.robot_state.multi_dof_joint_state.poses.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.fixed_frame_transforms = []
for i in range(0, length):
val1 = geometry_msgs.msg.TransformStamped()
_v143 = val1.header
start = end
end += 4
(_v143.seq,) = _struct_I.unpack(str[start:end])
_v144 = _v143.stamp
_x = _v144
start = end
end += 8
(_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v143.frame_id = str[start:end].decode('utf-8')
else:
_v143.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.child_frame_id = str[start:end].decode('utf-8')
else:
val1.child_frame_id = str[start:end]
_v145 = val1.transform
_v146 = _v145.translation
_x = _v146
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v147 = _v145.rotation
_x = _v147
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
self.planning_scene.fixed_frame_transforms.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.allowed_collision_matrix.link_names = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.planning_scene.allowed_collision_matrix.link_names.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.allowed_collision_matrix.entries = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.AllowedCollisionEntry()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sB'%length
start = end
end += struct.calcsize(pattern)
val1.enabled = struct.unpack(pattern, str[start:end])
val1.enabled = map(bool, val1.enabled)
self.planning_scene.allowed_collision_matrix.entries.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.allowed_contacts = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.AllowedContactSpecification()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.name = str[start:end].decode('utf-8')
else:
val1.name = str[start:end]
_v148 = val1.shape
start = end
end += 1
(_v148.type,) = _struct_b.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
_v148.dimensions = struct.unpack(pattern, str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%si'%length
start = end
end += struct.calcsize(pattern)
_v148.triangles = struct.unpack(pattern, str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v148.vertices = []
for i in range(0, length):
val3 = geometry_msgs.msg.Point()
_x = val3
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v148.vertices.append(val3)
_v149 = val1.pose_stamped
_v150 = _v149.header
start = end
end += 4
(_v150.seq,) = _struct_I.unpack(str[start:end])
_v151 = _v150.stamp
_x = _v151
start = end
end += 8
(_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v150.frame_id = str[start:end].decode('utf-8')
else:
_v150.frame_id = str[start:end]
_v152 = _v149.pose
_v153 = _v152.position
_x = _v153
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v154 = _v152.orientation
_x = _v154
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.link_names = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val2 = str[start:end].decode('utf-8')
else:
val2 = str[start:end]
val1.link_names.append(val2)
start = end
end += 8
(val1.penetration_depth,) = _struct_d.unpack(str[start:end])
self.planning_scene.allowed_contacts.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.link_padding = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.LinkPadding()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.link_name = str[start:end].decode('utf-8')
else:
val1.link_name = str[start:end]
start = end
end += 8
(val1.padding,) = _struct_d.unpack(str[start:end])
self.planning_scene.link_padding.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.collision_objects = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.CollisionObject()
_v155 = val1.header
start = end
end += 4
(_v155.seq,) = _struct_I.unpack(str[start:end])
_v156 = _v155.stamp
_x = _v156
start = end
end += 8
(_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v155.frame_id = str[start:end].decode('utf-8')
else:
_v155.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.id = str[start:end].decode('utf-8')
else:
val1.id = str[start:end]
start = end
end += 4
(val1.padding,) = _struct_f.unpack(str[start:end])
_v157 = val1.operation
start = end
end += 1
(_v157.operation,) = _struct_b.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.shapes = []
for i in range(0, length):
val2 = arm_navigation_msgs.msg.Shape()
start = end
end += 1
(val2.type,) = _struct_b.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
val2.dimensions = struct.unpack(pattern, str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%si'%length
start = end
end += struct.calcsize(pattern)
val2.triangles = struct.unpack(pattern, str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val2.vertices = []
for i in range(0, length):
val3 = geometry_msgs.msg.Point()
_x = val3
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
val2.vertices.append(val3)
val1.shapes.append(val2)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.poses = []
for i in range(0, length):
val2 = geometry_msgs.msg.Pose()
_v158 = val2.position
_x = _v158
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v159 = val2.orientation
_x = _v159
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
val1.poses.append(val2)
self.planning_scene.collision_objects.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.attached_collision_objects = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.AttachedCollisionObject()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.link_name = str[start:end].decode('utf-8')
else:
val1.link_name = str[start:end]
_v160 = val1.object
_v161 = _v160.header
start = end
end += 4
(_v161.seq,) = _struct_I.unpack(str[start:end])
_v162 = _v161.stamp
_x = _v162
start = end
end += 8
(_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v161.frame_id = str[start:end].decode('utf-8')
else:
_v161.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v160.id = str[start:end].decode('utf-8')
else:
_v160.id = str[start:end]
start = end
end += 4
(_v160.padding,) = _struct_f.unpack(str[start:end])
_v163 = _v160.operation
start = end
end += 1
(_v163.operation,) = _struct_b.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v160.shapes = []
for i in range(0, length):
val3 = arm_navigation_msgs.msg.Shape()
start = end
end += 1
(val3.type,) = _struct_b.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
val3.dimensions = struct.unpack(pattern, str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%si'%length
start = end
end += struct.calcsize(pattern)
val3.triangles = struct.unpack(pattern, str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val3.vertices = []
for i in range(0, length):
val4 = geometry_msgs.msg.Point()
_x = val4
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
val3.vertices.append(val4)
_v160.shapes.append(val3)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v160.poses = []
for i in range(0, length):
val3 = geometry_msgs.msg.Pose()
_v164 = val3.position
_x = _v164
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v165 = val3.orientation
_x = _v165
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
_v160.poses.append(val3)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.touch_links = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val2 = str[start:end].decode('utf-8')
else:
val2 = str[start:end]
val1.touch_links.append(val2)
self.planning_scene.attached_collision_objects.append(val1)
_x = self
start = end
end += 12
(_x.planning_scene.collision_map.header.seq, _x.planning_scene.collision_map.header.stamp.secs, _x.planning_scene.collision_map.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.planning_scene.collision_map.header.frame_id = str[start:end].decode('utf-8')
else:
self.planning_scene.collision_map.header.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.collision_map.boxes = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.OrientedBoundingBox()
_v166 = val1.center
_x = _v166
start = end
end += 12
(_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])
_v167 = val1.extents
_x = _v167
start = end
end += 12
(_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])
_v168 = val1.axis
_x = _v168
start = end
end += 12
(_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])
start = end
end += 4
(val1.angle,) = _struct_f.unpack(str[start:end])
self.planning_scene.collision_map.boxes.append(val1)
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
_x = self
buff.write(_struct_3I.pack(_x.planning_scene.robot_state.joint_state.header.seq, _x.planning_scene.robot_state.joint_state.header.stamp.secs, _x.planning_scene.robot_state.joint_state.header.stamp.nsecs))
_x = self.planning_scene.robot_state.joint_state.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
length = len(self.planning_scene.robot_state.joint_state.name)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.robot_state.joint_state.name:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.planning_scene.robot_state.joint_state.position)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(self.planning_scene.robot_state.joint_state.position.tostring())
length = len(self.planning_scene.robot_state.joint_state.velocity)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(self.planning_scene.robot_state.joint_state.velocity.tostring())
length = len(self.planning_scene.robot_state.joint_state.effort)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(self.planning_scene.robot_state.joint_state.effort.tostring())
_x = self
buff.write(_struct_2I.pack(_x.planning_scene.robot_state.multi_dof_joint_state.stamp.secs, _x.planning_scene.robot_state.multi_dof_joint_state.stamp.nsecs))
length = len(self.planning_scene.robot_state.multi_dof_joint_state.joint_names)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.robot_state.multi_dof_joint_state.joint_names:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.planning_scene.robot_state.multi_dof_joint_state.frame_ids)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.robot_state.multi_dof_joint_state.frame_ids:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.planning_scene.robot_state.multi_dof_joint_state.poses)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.robot_state.multi_dof_joint_state.poses:
_v169 = val1.position
_x = _v169
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v170 = val1.orientation
_x = _v170
buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
length = len(self.planning_scene.fixed_frame_transforms)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.fixed_frame_transforms:
_v171 = val1.header
buff.write(_struct_I.pack(_v171.seq))
_v172 = _v171.stamp
_x = _v172
buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
_x = _v171.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = val1.child_frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_v173 = val1.transform
_v174 = _v173.translation
_x = _v174
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v175 = _v173.rotation
_x = _v175
buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
length = len(self.planning_scene.allowed_collision_matrix.link_names)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.allowed_collision_matrix.link_names:
length = len(val1)
if python3 or type(val1) == unicode:
val1 = val1.encode('utf-8')
length = len(val1)
buff.write(struct.pack('<I%ss'%length, length, val1))
length = len(self.planning_scene.allowed_collision_matrix.entries)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.allowed_collision_matrix.entries:
length = len(val1.enabled)
buff.write(_struct_I.pack(length))
pattern = '<%sB'%length
buff.write(val1.enabled.tostring())
length = len(self.planning_scene.allowed_contacts)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.allowed_contacts:
_x = val1.name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_v176 = val1.shape
buff.write(_struct_b.pack(_v176.type))
length = len(_v176.dimensions)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(_v176.dimensions.tostring())
length = len(_v176.triangles)
buff.write(_struct_I.pack(length))
pattern = '<%si'%length
buff.write(_v176.triangles.tostring())
length = len(_v176.vertices)
buff.write(_struct_I.pack(length))
for val3 in _v176.vertices:
_x = val3
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v177 = val1.pose_stamped
_v178 = _v177.header
buff.write(_struct_I.pack(_v178.seq))
_v179 = _v178.stamp
_x = _v179
buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
_x = _v178.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_v180 = _v177.pose
_v181 = _v180.position
_x = _v181
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v182 = _v180.orientation
_x = _v182
buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
length = len(val1.link_names)
buff.write(_struct_I.pack(length))
for val2 in val1.link_names:
length = len(val2)
if python3 or type(val2) == unicode:
val2 = val2.encode('utf-8')
length = len(val2)
buff.write(struct.pack('<I%ss'%length, length, val2))
buff.write(_struct_d.pack(val1.penetration_depth))
length = len(self.planning_scene.link_padding)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.link_padding:
_x = val1.link_name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
buff.write(_struct_d.pack(val1.padding))
length = len(self.planning_scene.collision_objects)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.collision_objects:
_v183 = val1.header
buff.write(_struct_I.pack(_v183.seq))
_v184 = _v183.stamp
_x = _v184
buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
_x = _v183.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = val1.id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
buff.write(_struct_f.pack(val1.padding))
_v185 = val1.operation
buff.write(_struct_b.pack(_v185.operation))
length = len(val1.shapes)
buff.write(_struct_I.pack(length))
for val2 in val1.shapes:
buff.write(_struct_b.pack(val2.type))
length = len(val2.dimensions)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(val2.dimensions.tostring())
length = len(val2.triangles)
buff.write(_struct_I.pack(length))
pattern = '<%si'%length
buff.write(val2.triangles.tostring())
length = len(val2.vertices)
buff.write(_struct_I.pack(length))
for val3 in val2.vertices:
_x = val3
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
length = len(val1.poses)
buff.write(_struct_I.pack(length))
for val2 in val1.poses:
_v186 = val2.position
_x = _v186
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v187 = val2.orientation
_x = _v187
buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
length = len(self.planning_scene.attached_collision_objects)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.attached_collision_objects:
_x = val1.link_name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_v188 = val1.object
_v189 = _v188.header
buff.write(_struct_I.pack(_v189.seq))
_v190 = _v189.stamp
_x = _v190
buff.write(_struct_2I.pack(_x.secs, _x.nsecs))
_x = _v189.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = _v188.id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
buff.write(_struct_f.pack(_v188.padding))
_v191 = _v188.operation
buff.write(_struct_b.pack(_v191.operation))
length = len(_v188.shapes)
buff.write(_struct_I.pack(length))
for val3 in _v188.shapes:
buff.write(_struct_b.pack(val3.type))
length = len(val3.dimensions)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(val3.dimensions.tostring())
length = len(val3.triangles)
buff.write(_struct_I.pack(length))
pattern = '<%si'%length
buff.write(val3.triangles.tostring())
length = len(val3.vertices)
buff.write(_struct_I.pack(length))
for val4 in val3.vertices:
_x = val4
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
length = len(_v188.poses)
buff.write(_struct_I.pack(length))
for val3 in _v188.poses:
_v192 = val3.position
_x = _v192
buff.write(_struct_3d.pack(_x.x, _x.y, _x.z))
_v193 = val3.orientation
_x = _v193
buff.write(_struct_4d.pack(_x.x, _x.y, _x.z, _x.w))
length = len(val1.touch_links)
buff.write(_struct_I.pack(length))
for val2 in val1.touch_links:
length = len(val2)
if python3 or type(val2) == unicode:
val2 = val2.encode('utf-8')
length = len(val2)
buff.write(struct.pack('<I%ss'%length, length, val2))
_x = self
buff.write(_struct_3I.pack(_x.planning_scene.collision_map.header.seq, _x.planning_scene.collision_map.header.stamp.secs, _x.planning_scene.collision_map.header.stamp.nsecs))
_x = self.planning_scene.collision_map.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
length = len(self.planning_scene.collision_map.boxes)
buff.write(_struct_I.pack(length))
for val1 in self.planning_scene.collision_map.boxes:
_v194 = val1.center
_x = _v194
buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))
_v195 = val1.extents
_x = _v195
buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))
_v196 = val1.axis
_x = _v196
buff.write(_struct_3f.pack(_x.x, _x.y, _x.z))
buff.write(_struct_f.pack(val1.angle))
except struct.error as se: self._check_types(se)
except TypeError as te: self._check_types(te)
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
try:
if self.planning_scene is None:
self.planning_scene = arm_navigation_msgs.msg.PlanningScene()
end = 0
_x = self
start = end
end += 12
(_x.planning_scene.robot_state.joint_state.header.seq, _x.planning_scene.robot_state.joint_state.header.stamp.secs, _x.planning_scene.robot_state.joint_state.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.planning_scene.robot_state.joint_state.header.frame_id = str[start:end].decode('utf-8')
else:
self.planning_scene.robot_state.joint_state.header.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.robot_state.joint_state.name = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.planning_scene.robot_state.joint_state.name.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
self.planning_scene.robot_state.joint_state.position = numpy.frombuffer(str[start:end], dtype=numpy.float6
| 2 |
b8e18877af990c533c642d4937354198a4676419
|
Python
|
4, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
self.planning_scene.robot_state.joint_state.velocity = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
self.planning_scene.robot_state.joint_state.effort = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
_x = self
start = end
end += 8
(_x.planning_scene.robot_state.multi_dof_joint_state.stamp.secs, _x.planning_scene.robot_state.multi_dof_joint_state.stamp.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.robot_state.multi_dof_joint_state.joint_names = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.planning_scene.robot_state.multi_dof_joint_state.joint_names.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.robot_state.multi_dof_joint_state.frame_ids = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.planning_scene.robot_state.multi_dof_joint_state.frame_ids.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.planning_scene.robot_state.multi_dof_joint_state.child_frame_ids.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.robot_state.multi_dof_joint_state.poses = []
for i in range(0, length):
val1 = geometry_msgs.msg.Pose()
_v197 = val1.position
_x = _v197
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v198 = val1.orientation
_x = _v198
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
self.planning_scene.robot_state.multi_dof_joint_state.poses.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.fixed_frame_transforms = []
for i in range(0, length):
val1 = geometry_msgs.msg.TransformStamped()
_v199 = val1.header
start = end
end += 4
(_v199.seq,) = _struct_I.unpack(str[start:end])
_v200 = _v199.stamp
_x = _v200
start = end
end += 8
(_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v199.frame_id = str[start:end].decode('utf-8')
else:
_v199.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.child_frame_id = str[start:end].decode('utf-8')
else:
val1.child_frame_id = str[start:end]
_v201 = val1.transform
_v202 = _v201.translation
_x = _v202
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v203 = _v201.rotation
_x = _v203
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
self.planning_scene.fixed_frame_transforms.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.allowed_collision_matrix.link_names = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1 = str[start:end].decode('utf-8')
else:
val1 = str[start:end]
self.planning_scene.allowed_collision_matrix.link_names.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.allowed_collision_matrix.entries = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.AllowedCollisionEntry()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sB'%length
start = end
end += struct.calcsize(pattern)
val1.enabled = numpy.frombuffer(str[start:end], dtype=numpy.bool, count=length)
val1.enabled = map(bool, val1.enabled)
self.planning_scene.allowed_collision_matrix.entries.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.allowed_contacts = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.AllowedContactSpecification()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.name = str[start:end].decode('utf-8')
else:
val1.name = str[start:end]
_v204 = val1.shape
start = end
end += 1
(_v204.type,) = _struct_b.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
_v204.dimensions = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%si'%length
start = end
end += struct.calcsize(pattern)
_v204.triangles = numpy.frombuffer(str[start:end], dtype=numpy.int32, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v204.vertices = []
for i in range(0, length):
val3 = geometry_msgs.msg.Point()
_x = val3
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v204.vertices.append(val3)
_v205 = val1.pose_stamped
_v206 = _v205.header
start = end
end += 4
(_v206.seq,) = _struct_I.unpack(str[start:end])
_v207 = _v206.stamp
_x = _v207
start = end
end += 8
(_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v206.frame_id = str[start:end].decode('utf-8')
else:
_v206.frame_id = str[start:end]
_v208 = _v205.pose
_v209 = _v208.position
_x = _v209
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v210 = _v208.orientation
_x = _v210
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.link_names = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val2 = str[start:end].decode('utf-8')
else:
val2 = str[start:end]
val1.link_names.append(val2)
start = end
end += 8
(val1.penetration_depth,) = _struct_d.unpack(str[start:end])
self.planning_scene.allowed_contacts.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.link_padding = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.LinkPadding()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.link_name = str[start:end].decode('utf-8')
else:
val1.link_name = str[start:end]
start = end
end += 8
(val1.padding,) = _struct_d.unpack(str[start:end])
self.planning_scene.link_padding.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.collision_objects = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.CollisionObject()
_v211 = val1.header
start = end
end += 4
(_v211.seq,) = _struct_I.unpack(str[start:end])
_v212 = _v211.stamp
_x = _v212
start = end
end += 8
(_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v211.frame_id = str[start:end].decode('utf-8')
else:
_v211.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.id = str[start:end].decode('utf-8')
else:
val1.id = str[start:end]
start = end
end += 4
(val1.padding,) = _struct_f.unpack(str[start:end])
_v213 = val1.operation
start = end
end += 1
(_v213.operation,) = _struct_b.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.shapes = []
for i in range(0, length):
val2 = arm_navigation_msgs.msg.Shape()
start = end
end += 1
(val2.type,) = _struct_b.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
val2.dimensions = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%si'%length
start = end
end += struct.calcsize(pattern)
val2.triangles = numpy.frombuffer(str[start:end], dtype=numpy.int32, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val2.vertices = []
for i in range(0, length):
val3 = geometry_msgs.msg.Point()
_x = val3
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
val2.vertices.append(val3)
val1.shapes.append(val2)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.poses = []
for i in range(0, length):
val2 = geometry_msgs.msg.Pose()
_v214 = val2.position
_x = _v214
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v215 = val2.orientation
_x = _v215
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
val1.poses.append(val2)
self.planning_scene.collision_objects.append(val1)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.attached_collision_objects = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.AttachedCollisionObject()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.link_name = str[start:end].decode('utf-8')
else:
val1.link_name = str[start:end]
_v216 = val1.object
_v217 = _v216.header
start = end
end += 4
(_v217.seq,) = _struct_I.unpack(str[start:end])
_v218 = _v217.stamp
_x = _v218
start = end
end += 8
(_x.secs, _x.nsecs,) = _struct_2I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v217.frame_id = str[start:end].decode('utf-8')
else:
_v217.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v216.id = str[start:end].decode('utf-8')
else:
_v216.id = str[start:end]
start = end
end += 4
(_v216.padding,) = _struct_f.unpack(str[start:end])
_v219 = _v216.operation
start = end
end += 1
(_v219.operation,) = _struct_b.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v216.shapes = []
for i in range(0, length):
val3 = arm_navigation_msgs.msg.Shape()
start = end
end += 1
(val3.type,) = _struct_b.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
end += struct.calcsize(pattern)
val3.dimensions = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%si'%length
start = end
end += struct.calcsize(pattern)
val3.triangles = numpy.frombuffer(str[start:end], dtype=numpy.int32, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val3.vertices = []
for i in range(0, length):
val4 = geometry_msgs.msg.Point()
_x = val4
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
val3.vertices.append(val4)
_v216.shapes.append(val3)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
_v216.poses = []
for i in range(0, length):
val3 = geometry_msgs.msg.Pose()
_v220 = val3.position
_x = _v220
start = end
end += 24
(_x.x, _x.y, _x.z,) = _struct_3d.unpack(str[start:end])
_v221 = val3.orientation
_x = _v221
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _struct_4d.unpack(str[start:end])
_v216.poses.append(val3)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
val1.touch_links = []
for i in range(0, length):
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val2 = str[start:end].decode('utf-8')
else:
val2 = str[start:end]
val1.touch_links.append(val2)
self.planning_scene.attached_collision_objects.append(val1)
_x = self
start = end
end += 12
(_x.planning_scene.collision_map.header.seq, _x.planning_scene.collision_map.header.stamp.secs, _x.planning_scene.collision_map.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.planning_scene.collision_map.header.frame_id = str[start:end].decode('utf-8')
else:
self.planning_scene.collision_map.header.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.planning_scene.collision_map.boxes = []
for i in range(0, length):
val1 = arm_navigation_msgs.msg.OrientedBoundingBox()
_v222 = val1.center
_x = _v222
start = end
end += 12
(_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])
_v223 = val1.extents
_x = _v223
start = end
end += 12
(_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])
_v224 = val1.axis
_x = _v224
start = end
end += 12
(_x.x, _x.y, _x.z,) = _struct_3f.unpack(str[start:end])
start = end
end += 4
(val1.angle,) = _struct_f.unpack(str[start:end])
self.planning_scene.collision_map.boxes.append(val1)
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
_struct_I = genpy.struct_I
_struct_b = struct.Struct("<b")
_struct_d = struct.Struct("<d")
_struct_f = struct.Struct("<f")
_struct_3f = struct.Struct("<3f")
_struct_3I = struct.Struct("<3I")
_struct_4d = struct.Struct("<4d")
_struct_2I = struct.Struct("<2I")
_struct_3d = struct.Struct("<3d")
class GetPlanningScene(object):
_type = 'arm_navigation_msgs/GetPlanningScene'
_md5sum = '0a7b07718e4e5c5d35740c730509a151'
_request_class = GetPlanningSceneRequest
_response_class = GetPlanningSceneResponse
| 3 |
b90678c8f7ad9b97e13e5603bdf1dc8cb3511ca5
|
Python
|
# PySNMP SMI module. Autogenerated from smidump -f python DOCS-IETF-QOS-MIB
# by libsmi2pysnmp-0.1.3 at Thu May 22 11:57:36 2014,
# Python version sys.version_info(major=2, minor=7, micro=2, releaselevel='final', serial=0)
# Imports
( Integer, ObjectIdentifier, OctetString, ) = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString")
( NamedValues, ) = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
( ConstraintsIntersection, ConstraintsUnion, SingleValueConstraint, ValueRangeConstraint, ValueSizeConstraint, ) = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsIntersection", "ConstraintsUnion", "SingleValueConstraint", "ValueRangeConstraint", "ValueSizeConstraint")
( DscpOrAny, ) = mibBuilder.importSymbols("DIFFSERV-DSCP-TC", "DscpOrAny")
( InterfaceIndex, ifIndex, ) = mibBuilder.importSymbols("IF-MIB", "InterfaceIndex", "ifIndex")
( InetAddress, InetAddressType, InetPortNumber, ) = mibBuilder.importSymbols("INET-ADDRESS-MIB", "InetAddress", "InetAddressType", "InetPortNumber")
( SnmpAdminString, ) = mibBuilder.importSymbols("SNMP-FRAMEWORK-MIB", "SnmpAdminString")
( ModuleCompliance, ObjectGroup, ) = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "ObjectGroup")
( Bits, Counter32, Counter64, Integer32, Integer32, ModuleIdentity, MibIdentifier, MibScalar, MibTable, MibTableRow, MibTableColumn, TimeTicks, Unsigned32, mib_2, ) = mibBuilder.importSymbols("SNMPv2-SMI", "Bits", "Counter32", "Counter64", "Integer32", "Integer32", "ModuleIdentity", "MibIdentifier", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "TimeTicks", "Unsigned32", "mib-2")
( MacAddress, RowStatus, StorageType, TextualConvention, TimeStamp, TruthValue, ) = mibBuilder.importSymbols("SNMPv2-TC", "MacAddress", "RowStatus", "StorageType", "TextualConvention", "TimeStamp", "TruthValue")
# Types
class DocsIetfQosBitRate(TextualConvention, Unsigned32):
displayHint = "d"
class DocsIetfQosRfMacIfDirection(Integer):
subtypeSpec = Integer.subtypeSpec+SingleValueConstraint(2,1,)
namedValues = NamedValues(("downstream", 1), ("upstream", 2), )
class DocsIetfQosSchedulingType(Integer):
subtypeSpec = Integer.subtypeSpec+SingleValueConstraint(3,1,5,6,2,4,)
namedValues = NamedValues(("undefined", 1), ("bestEffort", 2), ("nonRealTimePollingService", 3), ("realTimePollingService", 4), ("unsolictedGrantServiceWithAD", 5), ("unsolictedGrantService", 6), )
# Objects
docsIetfQosMIB = ModuleIdentity((1, 3, 6, 1, 2, 1, 127)).setRevisions(("2006-01-23 00:00",))
if mibBuilder.loadTexts: docsIetfQosMIB.setOrganization("IETF IP over Cable Data Network (IPCDN)\nWorking Group")
if mibBuilder.loadTexts: docsIetfQosMIB.setContactInfo("\nCo-Author: Michael Patrick\nPostal: Motorola BCS\n 111 Locke Drive\n Marlborough, MA 01752-7214\n U.S.A.\nPhone: +1 508 786 7563\nE-mail: [email protected]\n\nCo-Author: William Murwin\nPostal: Motorola BCS\n 111 Locke Drive\n Marlborough, MA 01752-7214\n U.S.A.\nPhone: +1 508 786 7594\nE-mail: [email protected]\n\nIETF IPCDN Working Group\nGeneral Discussion: [email protected]\nSubscribe: http://www.ietf.org/mailman/listinfo/ipcdn\nArchive: ftp://ftp.ietf.org/ietf-mail-archive/ipcdn\nCo-chairs: Richard Woundy, [email protected]\n Jean-Francois Mule, [email protected]")
if mibBuilder.loadTexts: docsIetfQosMIB.setDescription("This is the management information for\nQuality Of Service (QOS) for DOCSIS 1.1 and 2.0.\n\n\n\nCopyright (C) The Internet Society (2006). This version of\nthis MIB module is part of RFC 4323; see the RFC itself for\nfull legal notices.")
docsIetfQosNotifications = MibIdentifier((1, 3, 6, 1, 2, 1, 127, 0))
docsIetfQosMIBObjects = MibIdentifier((1, 3, 6, 1, 2, 1, 127, 1))
docsIetfQosPktClassTable = MibTable((1, 3, 6, 1, 2, 1, 127, 1, 1))
if mibBuilder.loadTexts: docsIetfQosPktClassTable.setDescription("This table describes the packet classification\nconfigured on the CM or CMTS.\nThe model is that a packet either received\nas input from an interface or transmitted\nfor output on an interface may be compared\nagainst an ordered list of rules pertaining to\nthe packet contents. Each rule is a row of this\ntable. A matching rule provides a Service Flow\nID to which the packet is classified.\nAll rules need to match for a packet to match\na classifier.\n\nThe objects in this row correspond to a set of\nClassifier Encoding parameters in a DOCSIS\nMAC management message. The\ndocsIetfQosPktClassBitMap indicates which\nparticular parameters were present in the\nclassifier as signaled in the DOCSIS message.\nIf the referenced parameter was not present\nin the signaled DOCSIS 1.1 and 2.0 Classifier, the\ncorresponding object in this row reports a\nvalue as specified in the DESCRIPTION section.")
docsIetfQosPktClassEntry = MibTableRow((1, 3, 6, 1, 2, 1, 127, 1, 1, 1)).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowId"), (0, "DOCS-IETF-QOS-MIB", "docsIetfQosPktClassId"))
if mibBuilder.loadTexts: docsIetfQosPktClassEntry.setDescription("An entry in this table provides a single packet\nclassifier rule. The index ifIndex is an ifType\nof docsCableMaclayer(127).")
docsIetfQosPktClassId = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("noaccess")
if mibBuilder.loadTexts: docsIetfQosPktClassId.setDescription("Index assigned to packet classifier entry by\nthe CMTS, which is unique per Service Flow.")
docsIetfQosPktClassDirection = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 2), DocsIetfQosRfMacIfDirection()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassDirection.setDescription("Indicates the direction to which the classifier\nis applied.")
docsIetfQosPktClassPriority = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassPriority.setDescription("The value specifies the order of evaluation\nof the classifiers.\n\nThe higher the value, the higher the priority.\nThe value of 0 is used as default in\nprovisioned Service Flows Classifiers.\nThe default value of 64 is used for dynamic\nService Flow Classifiers.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the default\nvalue as defined above.")
docsIetfQosPktClassIpTosLow = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 4), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1)).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassIpTosLow.setDescription("The low value of a range of TOS byte values.\nIf the referenced parameter is not present\nin a classifier, this object reports the value\nof 0.\n\nThe IP TOS octet, as originally defined in RFC 791,\nhas been superseded by the 6-bit Differentiated\nServices Field (DSField, RFC 3260) and the 2-bit\nExplicit Congestion Notification Field (ECN field,\nRFC 3168). This object is defined as an 8-bit\noctet as per the DOCSIS Specification\nfor packet classification.")
docsIetfQosPktClassIpTosHigh = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 5), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1)).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassIpTosHigh.setDescription("The 8-bit high value of a range of TOS byte\nvalues.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the\nvalue of 0.\n\nThe IP TOS octet as originally defined in RFC 791\nhas been superseded by the 6-bit Differentiated\nServices Field (DSField, RFC 3260) and the 2-bit\nExplicit Congestion Notification Field (ECN field,\nRFC 3168). This object is defined as an 8-bit\noctet as defined by the DOCSIS Specification\nfor packet classification.")
docsIetfQosPktClassIpTosMask = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 6), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1)).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassIpTosMask.setDescription("The mask value is bitwise ANDed with TOS byte\nin an IP packet, and this value is used for\nrange checking of TosLow and TosHigh.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value\nof 0.\n\nThe IP TOS octet as originally defined in RFC 791\nhas been superseded by the 6-bit Differentiated\nServices Field (DSField, RFC 3260) and the 2-bit\nExplicit Congestion Notification Field (ECN field,\nRFC 3168). This object is defined as an 8-bit\noctet per the DOCSIS Specification for packet\nclassification.")
docsIetfQosPktClassIpProtocol = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 258))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassIpProtocol.setDescription("This object indicates the value of the IP\nProtocol field required for IP packets to match\nthis rule.\n\n\n\n\nThe value 256 matches traffic with any IP Protocol\nvalue. The value 257 by convention matches both TCP\nand UDP.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value\nof 258.")
docsIetfQosPktClassInetAddressType = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 8), InetAddressType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassInetAddressType.setDescription("The type of the Internet address for\ndocsIetfQosPktClassInetSourceAddr,\ndocsIetfQosPktClassInetSourceMask,\ndocsIetfQosPktClassInetDestAddr, and\ndocsIetfQosPktClassInetDestMask.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value of\nipv4(1).")
docsIetfQosPktClassInetSourceAddr = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 9), InetAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassInetSourceAddr.setDescription("This object specifies the value of the IP\nSource Address required for packets to match\nthis rule.\n\nAn IP packet matches the rule when the packet\nIP Source Address bitwise ANDed with the\ndocsIetfQosPktClassInetSourceMask value equals the\ndocsIetfQosPktClassInetSourceAddr value.\n\nThe address type of this object is specified by\ndocsIetfQosPktClassInetAddressType.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value of\n'00000000'H.")
docsIetfQosPktClassInetSourceMask = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 10), InetAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassInetSourceMask.setDescription("This object specifies which bits of a packet's\nIP Source Address are compared to match\nthis rule.\n\nAn IP packet matches the rule when the packet\nsource address bitwise ANDed with the\ndocsIetfQosPktClassInetSourceMask value equals the\ndocsIetfQosIpPktClassInetSourceAddr value.\n\nThe address type of this object is specified by\ndocsIetfQosPktClassInetAddressType.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value of\n'FFFFFFFF'H.")
docsIetfQosPktClassInetDestAddr = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 11), InetAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassInetDestAddr.setDescription("This object specifies the value of the IP\nDestination Address required for packets to match\nthis rule.\n\nAn IP packet matches the rule when the packet\nIP Destination Address bitwise ANDed with the\ndocsIetfQosPktClassInetDestMask value\nequals the docsIetfQosPktClassInetDestAddr value.\n\nThe address type of this object is specified by\ndocsIetfQosPktClassInetAddressType.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value of\n'00000000'H.")
docsIetfQosPktClassInetDestMask = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 12), InetAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassInetDestMask.setDescription("This object specifies which bits of a packet's\nIP Destination Address are compared to\nmatch this rule.\n\nAn IP packet matches the rule when the packet\ndestination address bitwise ANDed with the\ndocsIetfQosPktClassInetDestMask value equals the\ndocsIetfQosIpPktClassInetDestAddr value.\n\nThe address type of this object is specified by\ndocsIetfQosPktClassInetAddressType.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value of\n'FFFFFFFF'H.")
docsIetfQosPktClassSourcePortStart = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 13), InetPortNumber()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassSourcePortStart.setDescription("This object specifies the low-end inclusive\nrange of TCP/UDP source port numbers to which\na packet is compared. This object is irrelevant\nfor non-TCP/UDP IP packets.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value\nof 0.")
docsIetfQosPktClassSourcePortEnd = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 14), InetPortNumber()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassSourcePortEnd.setDescription("This object specifies the high-end inclusive\nrange of TCP/UDP source port numbers to which\na packet is compared. This object is irrelevant\nfor non-TCP/UDP IP packets.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value of\n65535.")
docsIetfQosPktClassDestPortStart = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 15), InetPortNumber()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassDestPortStart.setDescription("This object specifies the low-end inclusive\nrange of TCP/UDP destination port numbers to\nwhich a packet is compared.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value\nof 0.")
docsIetfQosPktClassDestPortEnd = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 16), InetPortNumber()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassDestPortEnd.setDescription("This object specifies the high-end inclusive\nrange of TCP/UDP destination port numbers to which\na packet is compared.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value of\n65535.")
docsIetfQosPktClassDestMacAddr = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 17), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassDestMacAddr.setDescription("An Ethernet packet matches an entry when its\ndestination MAC address bitwise ANDed with\ndocsIetfQosPktClassDestMacMask equals the value of\ndocsIetfQosPktClassDestMacAddr.\n\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value of\n'000000000000'H.")
docsIetfQosPktClassDestMacMask = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 18), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassDestMacMask.setDescription("An Ethernet packet matches an entry when its\ndestination MAC address bitwise ANDed with\ndocsIetfQosPktClassDestMacMask equals the value of\ndocsIetfQosPktClassDestMacAddr.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value of\n'000000000000'H.")
docsIetfQosPktClassSourceMacAddr = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 19), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassSourceMacAddr.setDescription("An Ethernet packet matches this entry when its\nsource MAC address equals the value of\nthis object.\n\nIf the referenced parameter is not present\nin a classifier, this object reports the value of\n'FFFFFFFFFFFF'H.")
docsIetfQosPktClassEnetProtocolType = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 20), Integer().subtype(subtypeSpec=SingleValueConstraint(2,0,1,4,3,)).subtype(namedValues=NamedValues(("none", 0), ("ethertype", 1), ("dsap", 2), ("mac", 3), ("all", 4), ))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassEnetProtocolType.setDescription("This object indicates the format of the layer 3\nprotocol ID in the Ethernet packet. A value of\nnone(0) means that the rule does not use the\nlayer 3 protocol type as a matching criteria.\n\nA value of ethertype(1) means that the rule\napplies only to frames that contain an\nEtherType value. Ethertype values are contained\nin packets using the Dec-Intel-Xerox (DIX)\nencapsulation or the RFC1042 Sub-Network Access\nProtocol (SNAP) encapsulation formats.\n\nA value of dsap(2) means that the rule applies\n\n\n\nonly to frames using the IEEE802.3\nencapsulation format with a Destination Service\nAccess Point (DSAP) other\nthan 0xAA (which is reserved for SNAP).\n\nA value of mac(3) means that the rule applies\nonly to MAC management messages for MAC management\nmessages.\n\nA value of all(4) means that the rule matches\nall Ethernet packets.\n\nIf the Ethernet frame contains an 802.1P/Q Tag\nheader (i.e., EtherType 0x8100), this object\napplies to the embedded EtherType field within\nthe 802.1P/Q header.\n\nIf the referenced parameter is not present in a\nclassifier, this object reports the value of 0.")
docsIetfQosPktClassEnetProtocol = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 21), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassEnetProtocol.setDescription("If docsIetfQosEthPktClassProtocolType is none(0),\nthis object is ignored when considering whether\na packet matches the current rule.\n\nIf dosQosPktClassEnetProtocolType is ethertype(1),\nthis object gives the 16-bit value of the\nEtherType that the packet must match in order to\nmatch the rule.\n\nIf docsIetfQosPktClassEnetProtocolType is dsap(2),\nthe lower 8 bits of this object's value must match\nthe DSAP byte of the packet in order to match the\nrule.\n\nIf docsIetfQosPktClassEnetProtocolType is mac(3),\nthe lower 8 bits of this object's value represent a\nlower bound (inclusive) of MAC management message\ntype codes matched, and the upper 8 bits represent\nthe upper bound (inclusive) of matched MAC message\ntype codes. Certain message type codes are\nexcluded from matching, as specified in the\nreference.\n\n\n\nIf the Ethernet frame contains an 802.1P/Q Tag\nheader (i.e., EtherType 0x8100), this object applies\nto the embedded EtherType field within the 802.1P/Q\nheader.\n\nIf the referenced parameter is not present in the\nclassifier, the value of this object is reported\nas 0.")
docsIetfQosPktClassUserPriLow = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 22), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 7))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassUserPriLow.setDescription("This object applies only to Ethernet frames\nusing the 802.1P/Q tag header (indicated with\nEtherType 0x8100). Such frames include a 16-bit\nTag that contains a 3-bit Priority field and\na 12-bit VLAN number.\n\nTagged Ethernet packets must have a 3-bit\nPriority field within the range of\ndocsIetfQosPktClassPriLow to\ndocsIetfQosPktClassPriHigh in order to match this\nrule.\n\nIf the referenced parameter is not present in the\nclassifier, the value of this object is reported\nas 0.")
docsIetfQosPktClassUserPriHigh = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 23), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 7))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassUserPriHigh.setDescription("This object applies only to Ethernet frames\nusing the 802.1P/Qtag header (indicated with\nEtherType 0x8100). Such frames include a 16-bit\nTag that contains a 3-bit Priority field and\na 12-bit VLAN number.\n\nTagged Ethernet packets must have a 3-bit\nPriority field within the range of\ndocsIetfQosPktClassPriLow to\ndocsIetfQosPktClassPriHigh in order to match this\nrule.\n\n\n\nIf the referenced parameter is not present in the\nclassifier, the value of this object is reported\nas 7.")
docsIetfQosPktClassVlanId = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 24), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4094))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassVlanId.setDescription("This object applies only to Ethernet frames\nusing the 802.1P/Q tag header.\n\nTagged packets must have a VLAN Identifier that\nmatches the value in order to match the rule.\n\nIf the referenced parameter is not present in the\nclassifier, the value of this object is reported\nas 0.")
docsIetfQosPktClassStateActive = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 25), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassStateActive.setDescription("This object indicates whether or not the classifier\nis enabled to classify packets to a Service Flow.\n\nIf the referenced parameter is not present in the\nclassifier, the value of this object is reported\nas true(1).")
docsIetfQosPktClassPkts = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 26), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassPkts.setDescription("This object counts the number of packets that have\nbeen classified using this entry. This\nincludes all packets delivered to a Service Flow\nmaximum rate policing function, whether or not that\nfunction drops the packets.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosPktClassBitMap = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 1, 1, 27), Bits().subtype(namedValues=NamedValues(("rulePriority", 0), ("activationState", 1), ("destPortStart", 10), ("destPortEnd", 11), ("destMac", 12), ("sourceMac", 13), ("ethertype", 14), ("userPri", 15), ("vlanId", 16), ("ipTos", 2), ("ipProtocol", 3), ("ipSourceAddr", 4), ("ipSourceMask", 5), ("ipDestAddr", 6), ("ipDestMask", 7), ("sourcePortStart", 8), ("sourcePortEnd", 9), ))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPktClassBitMap.setDescription("This object indicates which parameter encodings\nwere actually present in the DOCSIS packet\nclassifier encoding signaled in the DOCSIS message\nthat created or modified the classifier. Note that\nDynamic Service Change messages have replace\nsemantics, so that all non-default parameters must\nbe present whether the classifier is being created\nor changed.\n\nA bit of this object is set to 1 if the parameter\nindicated by the comment was present in the\nclassifier encoding, and to 0 otherwise.\n\nNote that BITS are encoded most significant bit\nfirst, so that if, for example, bits 6 and 7 are\nset, this object is encoded as the octet string\n'030000'H.")
docsIetfQosParamSetTable = MibTable((1, 3, 6, 1, 2, 1, 127, 1, 2))
if mibBuilder.loadTexts: docsIetfQosParamSetTable.setDescription("This table describes the set of DOCSIS 1.1 and 2.0\nQOS parameters defined in a managed device.\n\nThe ifIndex index specifies a DOCSIS MAC Domain.\nThe docsIetfQosServiceFlowId index specifies a\nparticular Service Flow.\nThe docsIetfQosParamSetType index indicates whether\nthe active, admitted, or provisioned QOS Parameter\nSet is being described by the row.\n\nOnly the QOS Parameter Sets of DOCSIS 1.1 and 2.0\nService Flows are represented in this table.\n\nDOCSIS 1.0 QOS service profiles are not\nrepresented in this table.\n\nEach row corresponds to a DOCSIS QOS Parameter Set\nas signaled via DOCSIS MAC management messages.\nEach object in the row corresponds to one or\npart of one DOCSIS 1.1 Service Flow Encoding.\nThe docsIetfQosParamSetBitMap object in the row\nindicates which particular parameters were signaled\nin the original registration or dynamic service\nrequest message that created the QOS Parameter Set.\n\nIn many cases, even if a QOS Parameter Set parameter\nwas not signaled, the DOCSIS specification calls\nfor a default value to be used. That default value\nis reported as the value of the corresponding object\nin this row.\n\nMany objects are not applicable, depending on\nthe Service Flow direction or upstream scheduling\ntype. The object value reported in this case\nis specified in the DESCRIPTION clause.")
docsIetfQosParamSetEntry = MibTableRow((1, 3, 6, 1, 2, 1, 127, 1, 2, 1)).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowId"), (0, "DOCS-IETF-QOS-MIB", "docsIetfQosParamSetType"))
if mibBuilder.loadTexts: docsIetfQosParamSetEntry.setDescription("A unique set of QOS parameters.")
docsIetfQosParamSetServiceClassName = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 1), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetServiceClassName.setDescription("Refers to the Service Class Name from which the\nparameter set values were derived.\n\nIf the referenced parameter is not present in the\ncorresponding DOCSIS QOS Parameter Set, the default\nvalue of this object is a zero-length string.")
docsIetfQosParamSetPriority = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 7))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetPriority.setDescription("The relative priority of a Service Flow.\nHigher numbers indicate higher priority.\nThis priority should only be used to differentiate\n\n\n\nService Flow from identical parameter sets.\n\nIf the referenced parameter is not present in the\ncorresponding DOCSIS QOS Parameter Set, the default\nvalue of this object is 0. If the parameter is\nnot applicable, the reported value is 0.")
docsIetfQosParamSetMaxTrafficRate = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 3), DocsIetfQosBitRate()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetMaxTrafficRate.setDescription("Maximum sustained traffic rate allowed for this\nService Flow in bits/sec. Must count all MAC frame\ndata PDU from the bytes following the MAC header\nHCS to the end of the CRC. The number of bytes\nforwarded is limited during any time interval.\nThe value 0 means no maximum traffic rate is\nenforced. This object applies to both upstream and\ndownstream Service Flows.\n\nIf the referenced parameter is not present in the\ncorresponding DOCSIS QOS Parameter Set, the default\nvalue of this object is 0. If the parameter is\nnot applicable, it is reported as 0.")
docsIetfQosParamSetMaxTrafficBurst = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 4), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetMaxTrafficBurst.setDescription("Specifies the token bucket size in bytes\nfor this parameter set. The value is calculated\nfrom the byte following the MAC header HCS to\nthe end of the CRC. This object is applied in\nconjunction with docsIetfQosParamSetMaxTrafficRate\nto calculate maximum sustained traffic rate.\n\nIf the referenced parameter is not present in the\ncorresponding DOCSIS QOS Parameter Set, the default\nvalue of this object for scheduling types\nbestEffort (2), nonRealTimePollingService(3),\nand realTimePollingService(4) is 3044.\n\nIf this parameter is not applicable, it is reported\nas 0.")
docsIetfQosParamSetMinReservedRate = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 5), DocsIetfQosBitRate()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetMinReservedRate.setDescription("Specifies the guaranteed minimum rate in\nbits/sec for this parameter set. The value is\ncalculated from the byte following the MAC\nheader HCS to the end of the CRC. The default\nvalue of 0 means that no bandwidth is reserved.\n\nIf the referenced parameter is not present in the\ncorresponding DOCSIS QOS Parameter Set, the default\nvalue of this object is 0. If the parameter\nis not applicable, it is reported as 0.")
docsIetfQosParamSetMinReservedPkt = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetMinReservedPkt.setDescription("Specifies an assumed minimum packet size in\nbytes for which the\ndocsIetfQosParamSetMinReservedRate will be\nprovided. The value is calculated from the byte\nfollowing the MAC header HCS to the end of the\nCRC.\n\nIf the referenced parameter is omitted from a\nDOCSIS QOS parameter set, the default value is\nCMTS implementation dependent. In this case, the\nCMTS reports the default value it is using, and the\nCM reports a value of 0. If the referenced\nparameter is not applicable to the direction or\nscheduling type of the Service Flow, both CMTS and\nCM report this object's value as 0.")
docsIetfQosParamSetActiveTimeout = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetActiveTimeout.setDescription("Specifies the maximum duration in seconds that\nresources remain unused on an active service\nflow before CMTS signals that both active and\nadmitted parameters set are null. The default\nvalue of 0 signifies an infinite amount of time.\n\nIf the referenced parameter is not present in the\ncorresponding DOCSIS QOS Parameter Set, the default\nvalue of this object is 0.")
docsIetfQosParamSetAdmittedTimeout = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535)).clone(200)).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetAdmittedTimeout.setDescription("Specifies the maximum duration in seconds that\nresources remain in admitted state before\nresources must be released.\n\nThe value of 0 signifies an infinite amount\nof time.\n\nIf the referenced parameter is not present in the\ncorresponding DOCSIS QOS Parameter Set, the\ndefault value of this object is 200.")
docsIetfQosParamSetMaxConcatBurst = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetMaxConcatBurst.setDescription("Specifies the maximum concatenated burst in\nbytes that an upstream Service Flow is allowed.\nThe value is calculated from the FC byte of the\nConcatenation MAC Header to the last CRC byte in\nof the last concatenated MAC frame, inclusive.\nThe value of 0 specifies no maximum burst.\n\nIf the referenced parameter is not present in the\ncorresponding DOCSIS QOS Parameter Set, the default\nvalue of this object for scheduling types\nbestEffort(2), nonRealTimePollingService(3), and\n\n\n\nrealTimePollingService(4) is 1522. If the parameter\nis not applicable, this object's value is reported\nas 0.")
docsIetfQosParamSetSchedulingType = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 10), DocsIetfQosSchedulingType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetSchedulingType.setDescription("Specifies the upstream scheduling service used for\nupstream Service Flow.\n\nIf the referenced parameter is not present in the\ncorresponding DOCSIS QOS Parameter Set of an\nupstream Service Flow, the default value of this\nobject is bestEffort(2). For QOS parameter sets of\ndownstream Service Flows, this object's value is\nreported as undefined(1).")
docsIetfQosParamSetNomPollInterval = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 11), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetNomPollInterval.setDescription("Specifies the nominal interval in microseconds\nbetween successive unicast request\nopportunities on an upstream Service Flow.\n\nThis object applies only to upstream Service Flows\nwith DocsIetfQosSchedulingType of value\nnonRealTimePollingService(3),\nrealTimePollingService(4), and\nunsolictedGrantServiceWithAD(5). The parameter is\nmandatory for realTimePollingService(4). If the\nparameter is omitted with\nnonRealTimePollingService(3), the CMTS uses an\nimplementation-dependent value. If the parameter\nis omitted with unsolictedGrantServiceWithAD(5),\nthe CMTS uses as a default value the value of the\nNominal Grant Interval parameter. In all cases,\nthe CMTS reports the value it is using when the\nparameter is applicable. The CM reports the\nsignaled parameter value if it was signaled,\nand 0 otherwise.\n\n\n\nIf the referenced parameter is not applicable to\nthe direction or scheduling type of the\ncorresponding DOCSIS QOS Parameter Set, both\nCMTS and CM report this object's value as 0.")
docsIetfQosParamSetTolPollJitter = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 12), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetTolPollJitter.setDescription("Specifies the maximum amount of time in\nmicroseconds that the unicast request interval\nmay be delayed from the nominal periodic\nschedule on an upstream Service Flow.\n\nThis parameter is applicable only to upstream\nService Flows with a DocsIetfQosSchedulingType of\nrealTimePollingService(4) or\nunsolictedGrantServiceWithAD(5).\n\nIf the referenced parameter is applicable but not\npresent in the corresponding DOCSIS QOS Parameter\nSet, the CMTS uses an implementation-dependent\nvalue and reports the value it is using.\nThe CM reports a value of 0 in this case.\n\nIf the parameter is not applicable to the\ndirection or upstream scheduling type of the\nService Flow, both CMTS and CM report this\nobject's value as 0.")
docsIetfQosParamSetUnsolicitGrantSize = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 13), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetUnsolicitGrantSize.setDescription("Specifies the unsolicited grant size in bytes.\nThe grant size includes the entire MAC frame\ndata PDU from the Frame Control byte to the end\nof the MAC frame.\n\nThe referenced parameter is applicable only\nfor upstream flows with a DocsIetfQosSchedulingType\nof unsolicitedGrantServicewithAD(5) or\nunsolicitedGrantService(6), and it is mandatory\n\n\n\nwhen applicable. Both CMTS and CM report\nthe signaled value of the parameter in this\ncase.\n\nIf the referenced parameter is not applicable to\nthe direction or scheduling type of the\ncorresponding DOCSIS QOS Parameter Set, both\nCMTS and CM report this object's value as 0.")
docsIetfQosParamSetNomGrantInterval = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 14), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetNomGrantInterval.setDescription("Specifies the nominal interval in microseconds\nbetween successive data grant opportunities\non an upstream Service Flow.\n\nThe referenced parameter is applicable only\nfor upstream flows with a DocsIetfQosSchedulingType\nof unsolicitedGrantServicewithAD(5) or\nunsolicitedGrantService(6), and it is mandatory\nwhen applicable. Both CMTS and CM report the\nsignaled value of the parameter in this case.\n\nIf the referenced parameter is not applicable to\nthe direction or scheduling type of the\ncorresponding DOCSIS QOS Parameter Set, both\nCMTS and CM report this object's value as 0.")
docsIetfQosParamSetTolGrantJitter = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 15), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetTolGrantJitter.setDescription("Specifies the maximum amount of time in\nmicroseconds that the transmission opportunities\nmay be delayed from the nominal periodic schedule.\n\nThe referenced parameter is applicable only\nfor upstream flows with a DocsIetfQosSchedulingType\nof unsolicitedGrantServicewithAD(5) or\nunsolicitedGrantService(6), and it is mandatory\nwhen applicable. Both CMTS and CM report the\n\n\n\nsignaled value of the parameter in this case.\n\nIf the referenced parameter is not applicable to\nthe direction or scheduling type of the\ncorresponding DOCSIS QOS Parameter Set, both\nCMTS and CM report this object's value as 0.")
docsIetfQosParamSetGrantsPerInterval = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 16), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetGrantsPerInterval.setDescription("Specifies the number of data grants per Nominal\nGrant Interval\n(docsIetfQosParamSetNomGrantInterval).\n\nThe referenced parameter is applicable only\nfor upstream flows with a DocsIetfQosSchedulingType\nof unsolicitedGrantServicewithAD(5) or\nunsolicitedGrantService(6), and it is mandatory\nwhen applicable. Both CMTS and CM report the\nsignaled value of the parameter in this case.\n\nIf the referenced parameter is not applicable to\nthe direction or scheduling type of the\ncorresponding DOCSIS QOS Parameter Set, both\nCMTS and CM report this object's value as 0.")
docsIetfQosParamSetTosAndMask = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 17), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1)).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetTosAndMask.setDescription("Specifies the AND mask for the IP TOS byte for\noverwriting IP packet's TOS value. The IP packet\nTOS byte is bitwise ANDed with\ndocsIetfQosParamSetTosAndMask, and the result is\nbitwise ORed with docsIetfQosParamSetTosORMask and\nthe result is written to the IP packet TOS byte.\nA value of 'FF'H for docsIetfQosParamSetTosAndMask\nand a value of '00'H for\ndocsIetfQosParamSetTosOrMask means that the IP\nPacket TOS byte is not overwritten.\n\nThis combination is reported if the referenced\nparameter is not present in a QOS Parameter Set.\n\n\n\nThe IP TOS octet as originally defined in RFC 791\nhas been superseded by the 6-bit Differentiated\nServices Field (DSField, RFC 3260) and the 2-bit\nExplicit Congestion Notification Field (ECN field,\nRFC 3168). Network operators SHOULD avoid\nspecifying values of docsIetfQosParamSetTosAndMask\nand docsIetfQosParamSetTosORMask that would result\nin the modification of the ECN bits.\n\nIn particular, operators should not use values of\ndocsIetfQosParamSetTosAndMask that have either of\nthe least-significant two bits set to 0. Similarly,\noperators should not use values of\ndocsIetfQosParamSetTosORMask that have either of\nthe least-significant two bits set to 1.\n\nEven though this object is only enforced by the\nCable Modem Termination System (CMTS),\nCable Modems MUST report the value as signaled in\nthe referenced parameter.")
docsIetfQosParamSetTosOrMask = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 18), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1)).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetTosOrMask.setDescription("Specifies the OR mask for the IP TOS byte.\n\nSee the description of docsIetfQosParamSetTosAndMask\nfor further details.\n\nThe IP TOS octet as originally defined in RFC 791\nhas been superseded by the 6-bit Differentiated\nServices Field (DSField, RFC 3260) and the 2-bit\nExplicit Congestion Notification Field (ECN field,\nRFC 3168). Network operators SHOULD avoid\nspecifying values of docsIetfQosParamSetTosAndMask\nand docsIetfQosParamSetTosORMask that would result\nin the modification of the ECN bits.")
docsIetfQosParamSetMaxLatency = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 19), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetMaxLatency.setDescription("Specifies the maximum latency between the\nreception of a packet by the CMTS on its NSI\nand the forwarding of the packet to the RF\ninterface. A value of 0 signifies no maximum\nlatency is enforced. This object only applies to\ndownstream Service Flows.\n\nIf the referenced parameter is not present in the\ncorresponding downstream DOCSIS QOS Parameter Set,\nthe default value is 0. This parameter is\nnot applicable to upstream DOCSIS QOS Parameter\nSets, and its value is reported as 0 in this case.")
docsIetfQosParamSetType = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 20), Integer().subtype(subtypeSpec=SingleValueConstraint(1,3,2,)).subtype(namedValues=NamedValues(("active", 1), ("admitted", 2), ("provisioned", 3), ))).setMaxAccess("noaccess")
if mibBuilder.loadTexts: docsIetfQosParamSetType.setDescription("Defines the type of the QOS parameter set defined\nby this row. active(1) indicates the Active QOS\nparameter set, describing the service currently\nbeing provided by the DOCSIS MAC domain to the\nService Flow. admitted(2) indicates the Admitted\nQOS Parameter Set, describing services reserved by\nthe DOCSIS MAC domain for use by the service\nflow. provisioned (3) describes the QOS Parameter\nSet defined in the DOCSIS CM Configuration file for\nthe Service Flow.")
docsIetfQosParamSetRequestPolicyOct = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 21), OctetString().subtype(subtypeSpec=ValueSizeConstraint(4, 4)).setFixedLength(4)).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetRequestPolicyOct.setDescription("Specifies which transmit interval opportunities\nthe CM omits for upstream transmission requests and\npacket transmissions. This object takes its\ndefault value for downstream Service Flows.\n\nUnless otherwise indicated, a bit value of 1 means\nthat a CM must not use that opportunity for\nupstream transmission.\n\nIf bit 0 is the least significant bit of the\nleast significant (4th) octet, and if bit number\nis increased with significance, the bit definitions\nare defined as follows:\n\nbroadcastReqOpp(0):\n all CMs broadcast request opportunities\n\npriorityReqMulticastReq(1):\n priority request multicast request\n opportunities\n\nreqDataForReq(2):\n request/data opportunities for requests\n\nreqDataForData(3):\n request/data opportunities for data\n\npiggybackReqWithData(4):\n piggyback requests with data\n\nconcatenateData(5):\n concatenate data\n\nfragmentData(6):\n fragment data\n\nsuppresspayloadheaders(7):\n suppress payload headers\n\n\n\n\ndropPktsExceedUGSize(8):\n A value of 1 means that the Service Flow must\n drop packets that do not fit in the Unsolicited\n Grant size.\n\nIf the referenced parameter is not present in\na QOS Parameter Set, the value of this object is\nreported as '00000000'H.")
docsIetfQosParamSetBitMap = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 2, 1, 22), Bits().subtype(namedValues=NamedValues(("trafficPriority", 0), ("maxTrafficRate", 1), ("nomPollInterval", 10), ("tolPollJitter", 11), ("unsolicitGrantSize", 12), ("nomGrantInterval", 13), ("tolGrantJitter", 14), ("grantsPerInterval", 15), ("tosOverwrite", 16), ("maxLatency", 17), ("maxTrafficBurst", 2), ("minReservedRate", 3), ("minReservedPkt", 4), ("activeTimeout", 5), ("admittedTimeout", 6), ("maxConcatBurst", 7), ("schedulingType", 8), ("requestPolicy", 9), ))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosParamSetBitMap.setDescription("This object indicates the set of QOS Parameter\nSet parameters actually signaled in the\nDOCSIS registration or dynamic service request\nmessage that created or modified the QOS Parameter\nSet. A bit is set to 1 when the parameter described\nby the indicated reference section is present\nin the original request.\n\nNote that when Service Class names are expanded,\nthe registration or dynamic response message may\ncontain parameters as expanded by the CMTS based\n\n\n\non a stored service class. These expanded\nparameters are not indicated by a 1 bit in this\nobject.\n\nNote that even though some QOS Parameter Set\nparameters may not be signaled in a message\n(so that the paramater's bit in this object is 0),\nthe DOCSIS specification requires that default\nvalues be used. These default values are reported\nas the corresponding object's value in the row.\n\nNote that BITS objects are encoded most\nsignificant bit first. For example, if bits\n1 and 16 are set, the value of this object\nis the octet string '400080'H.")
docsIetfQosServiceFlowTable = MibTable((1, 3, 6, 1, 2, 1, 127, 1, 3))
if mibBuilder.loadTexts: docsIetfQosServiceFlowTable.setDescription("This table describes the set of DOCSIS-QOS\nService Flows in a managed device.")
docsIetfQosServiceFlowEntry = MibTableRow((1, 3, 6, 1, 2, 1, 127, 1, 3, 1)).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowId"))
if mibBuilder.loadTexts: docsIetfQosServiceFlowEntry.setDescription("Describes a Service Flow.\nAn entry in the table exists for each\nService Flow ID. The ifIndex is an\nifType of docsCableMaclayer(127).")
docsIetfQosServiceFlowId = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 3, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295))).setMaxAccess("noaccess")
if mibBuilder.loadTexts: docsIetfQosServiceFlowId.setDescription("An index assigned to a Service Flow by CMTS.")
docsIetfQosServiceFlowSID = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 3, 1, 2), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 16383))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowSID.setDescription("Service Identifier (SID) assigned to an\nadmitted or active Service Flow. This object\nreports a value of 0 if a Service ID is not\nassociated with the Service Flow. Only active\nor admitted upstream Service Flows will have a\nService ID (SID).")
docsIetfQosServiceFlowDirection = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 3, 1, 3), DocsIetfQosRfMacIfDirection()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowDirection.setDescription("The direction of the Service Flow.")
docsIetfQosServiceFlowPrimary = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 3, 1, 4), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowPrimary.setDescription("Object reflects whether Service Flow is the primary\nor a secondary Service Flow.\n\nA primary Service Flow is the default Service Flow\nfor otherwise unclassified traffic and all MAC\nmessages.")
docsIetfQosServiceFlowStatsTable = MibTable((1, 3, 6, 1, 2, 1, 127, 1, 4))
if mibBuilder.loadTexts: docsIetfQosServiceFlowStatsTable.setDescription("This table describes statistics associated with the\nService Flows in a managed device.")
docsIetfQosServiceFlowStatsEntry = MibTableRow((1, 3, 6, 1, 2, 1, 127, 1, 4, 1)).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowId"))
if mibBuilder.loadTexts: docsIetfQosServiceFlowStatsEntry.setDescription("Describes a set of Service Flow statistics.\nAn entry in the table exists for each\nService Flow ID. The ifIndex is an\nifType of docsCableMaclayer(127).")
docsIetfQosServiceFlowPkts = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 4, 1, 1), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowPkts.setDescription("For outgoing Service Flows, this object counts the\nnumber of Packet Data PDUs forwarded to this\nService Flow. For incoming upstream CMTS service\nflows, this object counts the number of
| 0 |
b90678c8f7ad9b97e13e5603bdf1dc8cb3511ca5
|
Python
|
Packet\nData PDUs actually received on the Service Flow\nidentified by the SID for which the packet was\nscheduled. CMs not classifying downstream packets\nmay report this object's value as 0 for downstream\nService Flows. This object does not count\nMAC-specific management messages.\n\nParticularly for UGS flows, packets sent on the\nprimary Service Flow in violation of the UGS grant\nsize should be counted only by the instance of this\nobject that is associated with the primary service\n\n\n\nflow.\n\nUnclassified upstream user data packets (i.e., non-\nMAC-management) forwarded to the primary upstream\nService Flow should be counted by the instance of\nthis object that is associated with the primary\nservice flow.\n\nThis object does include packets counted by\ndocsIetfQosServiceFlowPolicedDelayPkts, but does not\ninclude packets counted by\ndocsIetfQosServiceFlowPolicedDropPkts\nand docsIetfQosServiceFlowPHSUnknowns.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosServiceFlowOctets = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 4, 1, 2), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowOctets.setDescription("The number of octets from the byte after the MAC\nheader HCS to the end of the CRC for all packets\ncounted in the docsIetfQosServiceFlowPkts object for\nthis row. Note that this counts the octets after\npayload header suppression and before payload\nheader expansion have been applied.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosServiceFlowTimeCreated = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 4, 1, 3), TimeStamp()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowTimeCreated.setDescription("The value of sysUpTime when the service flow\nwas created.")
docsIetfQosServiceFlowTimeActive = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 4, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowTimeActive.setDescription("The number of seconds that the service flow\nhas been active.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosServiceFlowPHSUnknowns = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 4, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowPHSUnknowns.setDescription("For incoming upstream CMTS service flows, this\nobject counts the number of packets received\nwith an unknown payload header suppression index.\nThe service flow is identified by the SID for which\nthe packet was scheduled.\n\nOn a CM, only this object's instance for the primary\ndownstream service flow counts packets received with\nan unknown payload header suppression index. All\nother downstream service flows on CM report this\nobjects value as 0.\n\nAll outgoing service flows report this object's\nvalue as 0.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosServiceFlowPolicedDropPkts = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 4, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowPolicedDropPkts.setDescription("For outgoing service flows, this object counts the\nnumber of Packet Data PDUs classified to this\nservice flow dropped due to:\n (1) implementation-dependent excessive delay\n while enforcing the Maximum Sustained\n Traffic Rate; or\n (2) UGS packets dropped due to exceeding the\n Unsolicited Grant Size with a\n Request/Transmission policy that requires\n such packets to be dropped.\n\nClassified packets dropped due to other reasons\n\n\n\nmust be counted in ifOutDiscards for the interface\nof this service flow. This object reports 0 for\nincoming service flows.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosServiceFlowPolicedDelayPkts = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 4, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowPolicedDelayPkts.setDescription("This object counts only outgoing packets delayed in\norder to maintain the Maximum Sustained Traffic\nRate. This object will always report a value of 0\nfor UGS flows because the Maximum Sustained Traffic\nRate does not apply. This object is 0 for incoming\nservice flows.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosUpstreamStatsTable = MibTable((1, 3, 6, 1, 2, 1, 127, 1, 5))
if mibBuilder.loadTexts: docsIetfQosUpstreamStatsTable.setDescription("This table describes statistics associated with\nupstream service flows. All counted frames must\nbe received without a Frame Check Sequence (FCS)\nerror.")
docsIetfQosUpstreamStatsEntry = MibTableRow((1, 3, 6, 1, 2, 1, 127, 1, 5, 1)).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "DOCS-IETF-QOS-MIB", "docsIetfQosSID"))
if mibBuilder.loadTexts: docsIetfQosUpstreamStatsEntry.setDescription("Describes a set of upstream service flow\nstatistics. An entry in the table exists for each\nupstream Service Flow in a managed device.\nThe ifIndex is an ifType of\ndocsCableMaclayer(127).")
docsIetfQosSID = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 5, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 16383))).setMaxAccess("noaccess")
if mibBuilder.loadTexts: docsIetfQosSID.setDescription("Identifies a service ID for an admitted or active\nupstream service flow.")
docsIetfQosUpstreamFragments = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 5, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosUpstreamFragments.setDescription("The number of fragmentation headers received on an\nupstream service flow, regardless of whether\nthe fragment was correctly reassembled into a\nvalid packet.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosUpstreamFragDiscards = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 5, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosUpstreamFragDiscards.setDescription("The number of upstream fragments discarded and not\nassembled into a valid upstream packet.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosUpstreamConcatBursts = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 5, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosUpstreamConcatBursts.setDescription("The number of concatenation headers received on an\nupstream service flow.\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosDynamicServiceStatsTable = MibTable((1, 3, 6, 1, 2, 1, 127, 1, 6))
if mibBuilder.loadTexts: docsIetfQosDynamicServiceStatsTable.setDescription("This table describes statistics associated with the\nDynamic Service Flows in a managed device.")
docsIetfQosDynamicServiceStatsEntry = MibTableRow((1, 3, 6, 1, 2, 1, 127, 1, 6, 1)).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "DOCS-IETF-QOS-MIB", "docsIetfQosIfDirection"))
if mibBuilder.loadTexts: docsIetfQosDynamicServiceStatsEntry.setDescription("Describes a set of dynamic service flow statistics.\nTwo entries exist for each DOCSIS MAC layer\ninterface for the upstream and downstream\ndirection. On the CMTS, the downstream direction\nrow indicates messages transmitted or transactions\noriginated by the CMTS. The upstream direction row\nindicates messages received or transaction\noriginated by the CM. On the CM, the downstream\ndirection row indicates messages received or\ntransactions originated by the CMTS. The upstream\ndirection row indicates messages transmitted by\nthe CM or transactions originated by the CM.\nThe ifIndex is an ifType of\ndocsCableMaclayer(127).")
docsIetfQosIfDirection = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 1), DocsIetfQosRfMacIfDirection()).setMaxAccess("noaccess")
if mibBuilder.loadTexts: docsIetfQosIfDirection.setDescription("The direction of interface.")
docsIetfQosDSAReqs = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosDSAReqs.setDescription("The number of Dynamic Service Addition Requests,\nincluding retries.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosDSARsps = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosDSARsps.setDescription("The number of Dynamic Service Addition Responses,\nincluding retries.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\n\n\n\nindexes this object.")
docsIetfQosDSAAcks = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosDSAAcks.setDescription("The number of Dynamic Service Addition\nAcknowledgements, including retries.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosDSCReqs = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosDSCReqs.setDescription("The number of Dynamic Service Change Requests,\nincluding retries.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosDSCRsps = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosDSCRsps.setDescription("The number of Dynamic Service Change Responses,\nincluding retries.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosDSCAcks = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosDSCAcks.setDescription("The number of Dynamic Service Change\nAcknowledgements, including retries.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\n\n\n\nindexes this object.")
docsIetfQosDSDReqs = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosDSDReqs.setDescription("The number of Dynamic Service Delete Requests,\nincluding retries.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosDSDRsps = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosDSDRsps.setDescription("The number of Dynamic Service Delete Responses,\nincluding retries.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosDynamicAdds = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 10), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosDynamicAdds.setDescription("The number of successful Dynamic Service Addition\ntransactions.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosDynamicAddFails = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 11), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosDynamicAddFails.setDescription("The number of failed Dynamic Service Addition\ntransactions.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\n\n\n\nindexes this object.")
docsIetfQosDynamicChanges = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 12), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosDynamicChanges.setDescription("The number of successful Dynamic Service Change\ntransactions.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosDynamicChangeFails = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 13), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosDynamicChangeFails.setDescription("The number of failed Dynamic Service Change\ntransactions.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosDynamicDeletes = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 14), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosDynamicDeletes.setDescription("The number of successful Dynamic Service Delete\ntransactions.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosDynamicDeleteFails = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 15), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosDynamicDeleteFails.setDescription("The number of failed Dynamic Service Delete\ntransactions.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\n\n\n\nindexes this object.")
docsIetfQosDCCReqs = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 16), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosDCCReqs.setDescription("The number of Dynamic Channel Change Request\nmessages traversing an interface. This count\nis nonzero only on downstream direction rows.\nThis count should include the number of retries.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex\nthat indexes this object.")
docsIetfQosDCCRsps = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 17), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosDCCRsps.setDescription("The number of Dynamic Channel Change Response\nmessages traversing an interface. This count is\nnonzero only on upstream direction rows. This count\nshould include the number of retries.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosDCCAcks = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 18), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosDCCAcks.setDescription("The number of Dynamic Channel Change Acknowledgement\nmessages traversing an interface. This count\nis nonzero only on downstream direction rows.\nThis count should include the number of retries.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosDCCs = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 19), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosDCCs.setDescription("The number of successful Dynamic Channel Change\ntransactions. This count is nonzero only on\ndownstream direction rows.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosDCCFails = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 6, 1, 20), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosDCCFails.setDescription("The number of failed Dynamic Channel Change\ntransactions. This count is nonzero only on\ndownstream direction rows.\n\nThis counter's last discontinuity is the\nifCounterDiscontinuityTime for the same ifIndex that\nindexes this object.")
docsIetfQosServiceFlowLogTable = MibTable((1, 3, 6, 1, 2, 1, 127, 1, 7))
if mibBuilder.loadTexts: docsIetfQosServiceFlowLogTable.setDescription("This table contains a log of the disconnected\nService Flows in a managed device.")
docsIetfQosServiceFlowLogEntry = MibTableRow((1, 3, 6, 1, 2, 1, 127, 1, 7, 1)).setIndexNames((0, "DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowLogIndex"))
if mibBuilder.loadTexts: docsIetfQosServiceFlowLogEntry.setDescription("The information regarding a single disconnected\nservice flow.")
docsIetfQosServiceFlowLogIndex = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295))).setMaxAccess("noaccess")
if mibBuilder.loadTexts: docsIetfQosServiceFlowLogIndex.setDescription("Unique index for a logged service flow.")
docsIetfQosServiceFlowLogIfIndex = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 2), InterfaceIndex()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowLogIfIndex.setDescription("The ifIndex of ifType docsCableMaclayer(127)\non the CMTS where the service flow was present.")
docsIetfQosServiceFlowLogSFID = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 3), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowLogSFID.setDescription("The index assigned to the service flow by the CMTS.")
docsIetfQosServiceFlowLogCmMac = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 4), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowLogCmMac.setDescription("The MAC address for the cable modem associated with\nthe service flow.")
docsIetfQosServiceFlowLogPkts = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 5), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowLogPkts.setDescription("The number of packets counted on this service flow\nafter payload header suppression.")
docsIetfQosServiceFlowLogOctets = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 6), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowLogOctets.setDescription("The number of octets counted on this service flow\nafter payload header suppression.")
docsIetfQosServiceFlowLogTimeDeleted = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 7), TimeStamp()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowLogTimeDeleted.setDescription("The value of sysUpTime when the service flow\nwas deleted.")
docsIetfQosServiceFlowLogTimeCreated = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 8), TimeStamp()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowLogTimeCreated.setDescription("The value of sysUpTime when the service flow\nwas created.")
docsIetfQosServiceFlowLogTimeActive = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowLogTimeActive.setDescription("The total time that the service flow was active.")
docsIetfQosServiceFlowLogDirection = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 10), DocsIetfQosRfMacIfDirection()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowLogDirection.setDescription("The value of docsIetfQosServiceFlowDirection\nfor the service flow.")
docsIetfQosServiceFlowLogPrimary = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 11), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowLogPrimary.setDescription("The value of docsIetfQosServiceFlowPrimary for the\nservice flow.")
docsIetfQosServiceFlowLogServiceClassName = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 12), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowLogServiceClassName.setDescription("The value of docsIetfQosParamSetServiceClassName for\nthe provisioned QOS Parameter Set of the\nservice flow.")
docsIetfQosServiceFlowLogPolicedDropPkts = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 13), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowLogPolicedDropPkts.setDescription("The final value of\ndocsIetfQosServiceFlowPolicedDropPkts for the\nservice flow.")
docsIetfQosServiceFlowLogPolicedDelayPkts = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 14), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceFlowLogPolicedDelayPkts.setDescription("The final value of\ndocsIetfQosServiceFlowPolicedDelayPkts for the\nservice flow.")
docsIetfQosServiceFlowLogControl = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 7, 1, 15), Integer().subtype(subtypeSpec=SingleValueConstraint(1,6,)).subtype(namedValues=NamedValues(("active", 1), ("destroy", 6), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: docsIetfQosServiceFlowLogControl.setDescription("Setting this object to the value destroy(6) removes\nthis entry from the table.\n\nReading this object returns the value active(1).")
docsIetfQosServiceClassTable = MibTable((1, 3, 6, 1, 2, 1, 127, 1, 8))
if mibBuilder.loadTexts: docsIetfQosServiceClassTable.setDescription("This table describes the set of DOCSIS-QOS\nService Classes in a CMTS.")
docsIetfQosServiceClassEntry = MibTableRow((1, 3, 6, 1, 2, 1, 127, 1, 8, 1)).setIndexNames((0, "DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassName"))
if mibBuilder.loadTexts: docsIetfQosServiceClassEntry.setDescription("A provisioned service class on a CMTS.\nEach entry defines a template for certain\nDOCSIS QOS Parameter Set values. When a CM\ncreates or modifies an Admitted QOS Parameter Set\nfor a Service Flow, it may reference a Service Class\nName instead of providing explicit QOS Parameter\nSet values. In this case, the CMTS populates\nthe QOS Parameter Set with the applicable\ncorresponding values from the named Service Class.\nSubsequent changes to a Service Class row do not\naffect the QOS Parameter Set values of any service\nflows already admitted.\n\nA service class template applies to only\na single direction, as indicated in the\ndocsIetfQosServiceClassDirection object.")
docsIetfQosServiceClassName = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 1), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(1, 15))).setMaxAccess("noaccess")
if mibBuilder.loadTexts: docsIetfQosServiceClassName.setDescription("Service Class Name. DOCSIS specifies that the\nmaximum size is 16 ASCII characters including\na terminating zero. The terminating zero is not\nrepresented in this SnmpAdminString syntax object.")
docsIetfQosServiceClassStatus = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 2), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassStatus.setDescription("Used to create or delete rows in this table.\nThere is no restriction on the ability to change\nvalues in this row while the row is active.\nInactive rows need not be timed out.")
docsIetfQosServiceClassPriority = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 7)).clone(0)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassPriority.setDescription("Template for docsIetfQosParamSetPriority.")
docsIetfQosServiceClassMaxTrafficRate = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 4), DocsIetfQosBitRate().clone('0')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassMaxTrafficRate.setDescription("Template for docsIetfQosParamSetMaxTrafficRate.")
docsIetfQosServiceClassMaxTrafficBurst = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 5), Unsigned32().clone(3044)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassMaxTrafficBurst.setDescription("Template for docsIetfQosParamSetMaxTrafficBurst.")
docsIetfQosServiceClassMinReservedRate = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 6), DocsIetfQosBitRate().clone('0')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassMinReservedRate.setDescription("Template for docsIetfQosParamSEtMinReservedRate.")
docsIetfQosServiceClassMinReservedPkt = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassMinReservedPkt.setDescription("Template for docsIetfQosParamSetMinReservedPkt.")
docsIetfQosServiceClassMaxConcatBurst = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535)).clone(1522)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassMaxConcatBurst.setDescription("Template for docsIetfQosParamSetMaxConcatBurst.")
docsIetfQosServiceClassNomPollInterval = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 9), Unsigned32().clone(0)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassNomPollInterval.setDescription("Template for docsIetfQosParamSetNomPollInterval.")
docsIetfQosServiceClassTolPollJitter = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 10), Unsigned32().clone(0)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassTolPollJitter.setDescription("Template for docsIetfQosParamSetTolPollJitter.")
docsIetfQosServiceClassUnsolicitGrantSize = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 11), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535)).clone(0)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassUnsolicitGrantSize.setDescription("Template for docsIetfQosParamSetUnsolicitGrantSize.")
docsIetfQosServiceClassNomGrantInterval = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 12), Unsigned32().clone(0)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassNomGrantInterval.setDescription("Template for docsIetfQosParamSetNomGrantInterval.")
docsIetfQosServiceClassTolGrantJitter = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 13), Unsigned32().clone(0)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassTolGrantJitter.setDescription("Template for docsIetfQosParamSetTolGrantJitter.")
docsIetfQosServiceClassGrantsPerInterval = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 14), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 127)).clone(0)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassGrantsPerInterval.setDescription("Template for docsIetfQosParamSetGrantsPerInterval.")
docsIetfQosServiceClassMaxLatency = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 15), Unsigned32().clone(0)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassMaxLatency.setDescription("Template for docsIetfQosParamSetClassMaxLatency.")
docsIetfQosServiceClassActiveTimeout = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 16), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535)).clone(0)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassActiveTimeout.setDescription("Template for docsIetfQosParamSetActiveTimeout.")
docsIetfQosServiceClassAdmittedTimeout = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 17), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535)).clone(200)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassAdmittedTimeout.setDescription("Template for docsIetfQosParamSetAdmittedTimeout.")
docsIetfQosServiceClassSchedulingType = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 18), DocsIetfQosSchedulingType().clone('bestEffort')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassSchedulingType.setDescription("Template for docsIetfQosParamSetSchedulingType.")
docsIetfQosServiceClassRequestPolicy = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 19), OctetString().subtype(subtypeSpec=ValueSizeConstraint(4, 4)).setFixedLength(4).clone(hexValue='00000000')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassRequestPolicy.setDescription("Template for docsIetfQosParamSetRequestPolicyOct.")
docsIetfQosServiceClassTosAndMask = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 20), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1)).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceClassTosAndMask.setDescription("Template for docsIetfQosParamSetTosAndMask.\nThe IP TOS octet as originally defined in RFC 791\nhas been superseded by the 6-bit Differentiated\nServices Field (DSField, RFC 3260) and the 2-bit\nExplicit Congestion Notification Field (ECN field,\nRFC 3168). Network operators SHOULD avoid\nspecifying values of\ndocsIetfQosServiceClassTosAndMask and\ndocsIetfQosServiceClassTosOrMask that would result\nin the modification of the ECN bits.\n\n\n\nIn particular, operators should not use values of\ndocsIetfQosServiceClassTosAndMask that have either\nof the least-significant two bits set to 0.\nSimilarly,operators should not use values of\ndocsIetfQosServiceClassTosOrMask that have either\nof the least-significant two bits set to 1.")
docsIetfQosServiceClassTosOrMask = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 21), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1)).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosServiceClassTosOrMask.setDescription("Template for docsIetfQosParamSetTosOrMask.\nThe IP TOS octet as originally defined in RFC 791\nhas been superseded by the 6-bit Differentiated\nServices Field (DSField, RFC 3260) and the 2-bit\nExplicit Congestion Notification Field (ECN field,\nRFC 3168). Network operators SHOULD avoid\nspecifying values of\ndocsIetfQosServiceClassTosAndMask and\ndocsIetfQosServiceClassTosOrMask that would result\nin the modification of the ECN bits.\n\nIn particular, operators should not use values of\ndocsIetfQosServiceClassTosAndMask that have either\nof the least-significant two bits set to 0.\nSimilarly, operators should not use values of\ndocsIetfQosServiceClassTosOrMask that have either\nof the least-significant two bits set to 1.")
docsIetfQosServiceClassDirection = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 22), DocsIetfQosRfMacIfDirection().clone('upstream')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassDirection.setDescription("Specifies whether the service class template\napplies to upstream or downstream service flows.")
docsIetfQosServiceClassStorageType = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 23), StorageType().clone('nonVolatile')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassStorageType.setDescription("This object defines whether this row is kept in\nvolatile storage and lost upon reboot or whether\nit is backed up by non-volatile or permanent\nstorage. 'permanent' entries need not allow\nwritable access to any object.")
docsIetfQosServiceClassDSCPOverwrite = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 8, 1, 24), DscpOrAny().clone('-1')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassDSCPOverwrite.setDescription("This object allows the overwrite of the DSCP\nfield per RFC 3260.\n\nIf this object is -1, then the corresponding entry's\ndocsIetfQosServiceClassTosAndMask value MUST be\n'FF'H and docsIetfQosServiceClassTosOrMask MUST be\n'00'H. Otherwise, this object is in the range of\n0..63, and the corresponding entry's\ndocsIetfQosServiceClassTosAndMask value MUST be\n'03'H and the docsIetfQosServiceClassTosOrMask MUST\nbe this object's value shifted left by two bit\npositions.")
docsIetfQosServiceClassPolicyTable = MibTable((1, 3, 6, 1, 2, 1, 127, 1, 9))
if mibBuilder.loadTexts: docsIetfQosServiceClassPolicyTable.setDescription("This table describes the set of DOCSIS-QOS\nService Class Policies.\n\nThis table is an adjunct to the\n\n\n\ndocsDevFilterPolicy table. Entries in the\ndocsDevFilterPolicy table can point to\nspecific rows in this table.\n\nThis table permits mapping a packet to a service\nclass name of an active service flow so long as\na classifier does not exist at a higher\npriority.")
docsIetfQosServiceClassPolicyEntry = MibTableRow((1, 3, 6, 1, 2, 1, 127, 1, 9, 1)).setIndexNames((0, "DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassPolicyIndex"))
if mibBuilder.loadTexts: docsIetfQosServiceClassPolicyEntry.setDescription("A service class name policy entry.")
docsIetfQosServiceClassPolicyIndex = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 9, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("noaccess")
if mibBuilder.loadTexts: docsIetfQosServiceClassPolicyIndex.setDescription("Index value to identify an entry in\nthis table uniquely.")
docsIetfQosServiceClassPolicyName = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 9, 1, 2), SnmpAdminString()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassPolicyName.setDescription("Service Class Name to identify the name of the\nservice class flow to which the packet should be\ndirected.")
docsIetfQosServiceClassPolicyRulePriority = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 9, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassPolicyRulePriority.setDescription("Service Class Policy rule priority for the\nentry.")
docsIetfQosServiceClassPolicyStatus = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 9, 1, 4), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassPolicyStatus.setDescription("Used to create or delete rows in this table.\nThis object should not be deleted if it is\nreferenced by an entry in docsDevFilterPolicy.\nThe reference should be deleted first.\nThere is no restriction on the ability\nto change values in this row while the row is\nactive. Inactive rows need not be timed out.")
docsIetfQosServiceClassPolicyStorageType = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 9, 1, 5), StorageType().clone('nonVolatile')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: docsIetfQosServiceClassPolicyStorageType.setDescription("This object defines whether this row is kept in\nvolatile storage and lost upon reboot or whether\nit is backed up by non-volatile or permanent\nstorage. 'permanent' entries need not allow\nwritable access to any object.")
docsIetfQosPHSTable = MibTable((1, 3, 6, 1, 2, 1, 127, 1, 10))
if mibBuilder.loadTexts: docsIetfQosPHSTable.setDescription("This table describes the set of payload header\nsuppression entries.")
docsIetfQosPHSEntry = MibTableRow((1, 3, 6, 1, 2, 1, 127, 1, 10, 1)).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowId"), (0, "DOCS-IETF-QOS-MIB", "docsIetfQosPktClassId"))
if mibBuilder.loadTexts: docsIetfQosPHSEntry.setDescription("A payload header suppression entry.\n\nThe ifIndex is an ifType of docsCableMaclayer(127).\nThe index docsIetfQosServiceFlowId selects one\nservice flow from the cable MAC layer interface.\nThe docsIetfQosPktClassId index matches an\nindex of the docsIetfQosPktClassTable.")
docsIetfQosPHSField = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 10, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPHSField.setDescription("Payload header suppression field defines the\nbytes of the header that must be\nsuppressed/restored by the sending/receiving\ndevice.\n\nThe number of octets in this object should be\nthe same as the value of docsIetfQosPHSSize.")
docsIetfQosPHSMask = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 10, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPHSMask.setDescription("Payload header suppression mask defines the\nbit mask that is used in combination with the\ndocsIetfQosPHSField. It defines which bytes in\nthe header must be suppressed/restored by the\nsending or receiving device.\n\nEach bit of this bit mask corresponds to a byte\nin the docsIetfQosPHSField, with the least\n\n\n\nsignificant bit corresponding to the first byte\nof the docsIetfQosPHSField.\n\nEach bit of the bit mask specifies whether\nthe corresponding byte should be suppressed\nin the packet. A bit value of '1' indicates that\nthe byte should be suppressed by the sending\ndevice and restored by the receiving device.\nA bit value of '0' indicates that\nthe byte should not be suppressed by the sending\ndevice or restored by the receiving device.\n\nIf the bit mask does not contain a bit for each\nbyte in the docsIetfQosPHSField, then the bit mask\nis extended with bit values of '1' to be the\nnecessary length.")
docsIetfQosPHSSize = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 10, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPHSSize.setDescription("Payload header suppression size specifies the\nnumber of bytes in the header to be suppressed\nand restored.\n\nThe value of this object must match the number\nof bytes in the docsIetfQosPHSField.")
docsIetfQosPHSVerify = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 10, 1, 4), TruthValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPHSVerify.setDescription("Payload header suppression verification value. If\n'true', the sender must verify docsIetfQosPHSField\nis the same as what is contained in the packet\nto be suppressed.")
docsIetfQosPHSIndex = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 10, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosPHSIndex.setDescription("Payload header suppression index uniquely\n\n\n\nreferences the PHS rule for a given service flow.")
docsIetfQosCmtsMacToSrvFlowTable = MibTable((1, 3, 6, 1, 2, 1, 127, 1, 11))
if mibBuilder.loadTexts: docsIetfQosCmtsMacToSrvFlowTable.setDescription("This table provides for referencing the service\nflows associated with a particular cable modem.\nThis allows indexing into other docsIetfQos\ntables that are indexed by docsIetfQosServiceFlowId\nand ifIndex.")
docsIetfQosCmtsMacToSrvFlowEntry = MibTableRow((1, 3, 6, 1, 2, 1, 127, 1, 11, 1)).setIndexNames((0, "DOCS-IETF-QOS-MIB", "docsIetfQosCmtsCmMac"), (0, "DOCS-IETF-QOS-MIB", "docsIetfQosCmtsServiceFlowId"))
if mibBuilder.loadTexts: docsIetfQosCmtsMacToSrvFlowEntry.setDescription("An entry is created by CMTS for each service flow\nconnected to this CMTS.")
docsIetfQosCmtsCmMac = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 11, 1, 1), MacAddress()).setMaxAccess("noaccess")
if mibBuilder.loadTexts: docsIetfQosCmtsCmMac.setDescription("The MAC address for the referenced CM.")
docsIetfQosCmtsServiceFlowId = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 11, 1, 2), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295))).setMaxAccess("noaccess")
if mibBuilder.loadTexts: docsIetfQosCmtsServiceFlowId.setDescription("An index assigned to a service flow by CMTS.")
docsIetfQosCmtsIfIndex = MibTableColumn((1, 3, 6, 1, 2, 1, 127, 1, 11, 1, 3), InterfaceIndex()).setMaxAccess("readonly")
if mibBuilder.loadTexts: docsIetfQosCmtsIfIndex.setDescription("The ifIndex of ifType docsCableMacLayer(127)\non the CMTS that is connected to the Cable Modem.")
docsIetfQosConformance = MibIdentifier((1, 3, 6, 1, 2, 1, 127, 2))
docsIetfQosGroups = MibIdentifier((1, 3, 6, 1, 2, 1, 127, 2, 1))
docsIetfQosCompliances = MibIdentifier((1, 3, 6, 1, 2, 1, 127, 2, 2))
# Augmentions
# Groups
docsIetfQosBaseGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 127, 2, 1, 1)).setObjects(*(("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassUserPriLow"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassSourcePortStart"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassEnetProtocol"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassIpTosMask"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassInetDestAddr"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowTimeActive"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowTimeCreated"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassStateActive"), ("DOCS-IETF-QOS-MIB", "docsIetfQosDSAReqs"), ("DOCS-IETF-QOS-MIB", "docsIetfQosDSCAcks"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassInetDestMask"), ("DOCS-IETF-QOS-MIB", "docsIetfQosDCCFails"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassDestPortStart"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassInetSourceMask"), ("DOCS-IETF-QOS-MIB", "docsIetfQosDSDRsps"), ("DOCS-IETF-QOS-MIB", "docsIetfQosDCCReqs"), ("DOCS-IETF-QOS-MIB", "docsIetfQosDCCs"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassPriority"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPHSMask"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPHSVerify"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPHSIndex"), ("DOCS-IETF-QOS-MIB", "docsIetfQosDSARsps"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassEnetProtocolType"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassIpTosLow"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassInetSourceAddr"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPHSField"), ("DOCS-IETF-QOS-MIB", "docsIetfQosDSCReqs"), ("DOCS-IETF-QOS-MIB", "docsIetfQosDynamicChangeFails"), ("DOCS-IETF-QOS-MIB", "docsIetfQosDSDReqs"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassDestPortEnd"), ("DOCS-IETF-QOS-MIB", "docsIetfQosDynamicAdds"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassVlanId"), ("DOCS-IETF-QOS-MIB", "docsIetfQosDynamicDeleteFails"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassPkts"), ("DOCS-IETF-QOS-MIB", "docsIetfQosDynamicDeletes"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassIpProtocol"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowSID"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowPHSUnknowns"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowPrimary"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPHSSize"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowPkts"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassSourcePortEnd"), ("DOCS-IETF-QOS-MIB", "docsIetfQosDSAAcks"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowOctets"), ("DOCS-IETF-QOS-MIB", "docsIetfQosDCCRsps"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassUserPriHigh"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowDirection"), ("DOCS-IETF-QOS-MIB", "docsIetfQosDSCRsps"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowPolicedDelayPkts"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowPolicedDropPkts"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassIpTosHigh"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassSourceMacAddr"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassDestMacMask"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassDirection"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassDestMacAddr"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassBitMap"), ("DOCS-IETF-QOS-MIB", "docsIetfQosDynamicAddFails"), ("DOCS-IETF-QOS-MIB", "docsIetfQosDCCAcks"), ("DOCS-IETF-QOS-MIB", "docsIetfQosPktClassInetAddressType"), ("DOCS-IETF-QOS-MIB", "docsIetfQosDynamicChanges"), ) )
if mibBuilder.loadTexts: docsIetfQosBaseGroup.setDescription("Group of objects implemented in both Cable Modems and\nCable Modem Termination Systems.")
docsIetfQosParamSetGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 127, 2, 1, 2)).setObjects(*(("DOCS-IETF-QOS-MIB", "docsIetfQosParamSetMaxConcatBurst"), ("DOCS-IETF-QOS-MIB", "docsIetfQosParamSetGrantsPerInterval"), ("DOCS-IETF-QOS-MIB", "docsIetfQosParamSetMaxTrafficRate"), ("DOCS-IETF-QOS-MIB", "docsIetfQosParamSetActiveTimeout"), ("DOCS-IETF-QOS-MIB", "docsIetfQosParamSetMinReservedPkt"), ("DOCS-IETF-QOS-MIB", "docsIetfQosParamSetPriority"), ("DOCS-IETF-QOS-MIB", "docsI
| 1 |
b90678c8f7ad9b97e13e5603bdf1dc8cb3511ca5
|
Python
|
etfQosParamSetRequestPolicyOct"), ("DOCS-IETF-QOS-MIB", "docsIetfQosParamSetServiceClassName"), ("DOCS-IETF-QOS-MIB", "docsIetfQosParamSetTosOrMask"), ("DOCS-IETF-QOS-MIB", "docsIetfQosParamSetMinReservedRate"), ("DOCS-IETF-QOS-MIB", "docsIetfQosParamSetMaxTrafficBurst"), ("DOCS-IETF-QOS-MIB", "docsIetfQosParamSetBitMap"), ("DOCS-IETF-QOS-MIB", "docsIetfQosParamSetSchedulingType"), ("DOCS-IETF-QOS-MIB", "docsIetfQosParamSetTolPollJitter"), ("DOCS-IETF-QOS-MIB", "docsIetfQosParamSetTosAndMask"), ("DOCS-IETF-QOS-MIB", "docsIetfQosParamSetMaxLatency"), ("DOCS-IETF-QOS-MIB", "docsIetfQosParamSetTolGrantJitter"), ("DOCS-IETF-QOS-MIB", "docsIetfQosParamSetNomPollInterval"), ("DOCS-IETF-QOS-MIB", "docsIetfQosParamSetNomGrantInterval"), ("DOCS-IETF-QOS-MIB", "docsIetfQosParamSetAdmittedTimeout"), ("DOCS-IETF-QOS-MIB", "docsIetfQosParamSetUnsolicitGrantSize"), ) )
if mibBuilder.loadTexts: docsIetfQosParamSetGroup.setDescription("Group of objects implemented in both Cable Modems and\nCable Modem Termination Systems for QOS Parameter Sets.")
docsIetfQosCmtsGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 127, 2, 1, 3)).setObjects(*(("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowLogSFID"), ("DOCS-IETF-QOS-MIB", "docsIetfQosUpstreamFragDiscards"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowLogPolicedDropPkts"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowLogControl"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowLogTimeCreated"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowLogOctets"), ("DOCS-IETF-QOS-MIB", "docsIetfQosUpstreamConcatBursts"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowLogCmMac"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowLogPrimary"), ("DOCS-IETF-QOS-MIB", "docsIetfQosCmtsIfIndex"), ("DOCS-IETF-QOS-MIB", "docsIetfQosUpstreamFragments"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowLogTimeActive"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowLogPkts"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowLogIfIndex"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowLogDirection"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowLogPolicedDelayPkts"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowLogServiceClassName"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceFlowLogTimeDeleted"), ) )
if mibBuilder.loadTexts: docsIetfQosCmtsGroup.setDescription("Group of objects implemented only in the CMTS.")
docsIetfQosSrvClassPolicyGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 127, 2, 1, 4)).setObjects(*(("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassPolicyStorageType"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassPolicyName"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassPolicyRulePriority"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassPolicyStatus"), ) )
if mibBuilder.loadTexts: docsIetfQosSrvClassPolicyGroup.setDescription("Group of objects implemented in both Cable Modems and\nCable Modem Termination Systems when supporting policy-based\nservice flows.")
docsIetfQosServiceClassGroup = ObjectGroup((1, 3, 6, 1, 2, 1, 127, 2, 1, 5)).setObjects(*(("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassSchedulingType"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassNomGrantInterval"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassTolGrantJitter"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassDSCPOverwrite"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassGrantsPerInterval"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassDirection"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassMaxTrafficBurst"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassPriority"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassMaxTrafficRate"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassStorageType"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassTolPollJitter"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassTosOrMask"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassStatus"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassMaxConcatBurst"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassTosAndMask"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassUnsolicitGrantSize"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassNomPollInterval"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassRequestPolicy"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassMinReservedRate"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassActiveTimeout"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassMinReservedPkt"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassAdmittedTimeout"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassMaxLatency"), ) )
if mibBuilder.loadTexts: docsIetfQosServiceClassGroup.setDescription("Group of objects implemented only in Cable Modem\nTermination Systems when supporting expansion of Service\nClass Names in a QOS Parameter Set")
# Compliances
docsIetfQosCompliance = ModuleCompliance((1, 3, 6, 1, 2, 1, 127, 2, 2, 1)).setObjects(*(("DOCS-IETF-QOS-MIB", "docsIetfQosCmtsGroup"), ("DOCS-IETF-QOS-MIB", "docsIetfQosServiceClassGroup"), ("DOCS-IETF-QOS-MIB", "docsIetfQosSrvClassPolicyGroup"), ("DOCS-IETF-QOS-MIB", "docsIetfQosBaseGroup"), ("DOCS-IETF-QOS-MIB", "docsIetfQosParamSetGroup"), ) )
if mibBuilder.loadTexts: docsIetfQosCompliance.setDescription("The compliance statement for MCNS Cable Modems and\nCable Modem Termination Systems that implement DOCSIS\nService Flows.")
# Exports
# Module identity
mibBuilder.exportSymbols("DOCS-IETF-QOS-MIB", PYSNMP_MODULE_ID=docsIetfQosMIB)
# Types
mibBuilder.exportSymbols("DOCS-IETF-QOS-MIB", DocsIetfQosBitRate=DocsIetfQosBitRate, DocsIetfQosRfMacIfDirection=DocsIetfQosRfMacIfDirection, DocsIetfQosSchedulingType=DocsIetfQosSchedulingType)
# Objects
mibBuilder.exportSymbols("DOCS-IETF-QOS-MIB", docsIetfQosMIB=docsIetfQosMIB, docsIetfQosNotifications=docsIetfQosNotifications, docsIetfQosMIBObjects=docsIetfQosMIBObjects, docsIetfQosPktClassTable=docsIetfQosPktClassTable, docsIetfQosPktClassEntry=docsIetfQosPktClassEntry, docsIetfQosPktClassId=docsIetfQosPktClassId, docsIetfQosPktClassDirection=docsIetfQosPktClassDirection, docsIetfQosPktClassPriority=docsIetfQosPktClassPriority, docsIetfQosPktClassIpTosLow=docsIetfQosPktClassIpTosLow, docsIetfQosPktClassIpTosHigh=docsIetfQosPktClassIpTosHigh, docsIetfQosPktClassIpTosMask=docsIetfQosPktClassIpTosMask, docsIetfQosPktClassIpProtocol=docsIetfQosPktClassIpProtocol, docsIetfQosPktClassInetAddressType=docsIetfQosPktClassInetAddressType, docsIetfQosPktClassInetSourceAddr=docsIetfQosPktClassInetSourceAddr, docsIetfQosPktClassInetSourceMask=docsIetfQosPktClassInetSourceMask, docsIetfQosPktClassInetDestAddr=docsIetfQosPktClassInetDestAddr, docsIetfQosPktClassInetDestMask=docsIetfQosPktClassInetDestMask, docsIetfQosPktClassSourcePortStart=docsIetfQosPktClassSourcePortStart, docsIetfQosPktClassSourcePortEnd=docsIetfQosPktClassSourcePortEnd, docsIetfQosPktClassDestPortStart=docsIetfQosPktClassDestPortStart, docsIetfQosPktClassDestPortEnd=docsIetfQosPktClassDestPortEnd, docsIetfQosPktClassDestMacAddr=docsIetfQosPktClassDestMacAddr, docsIetfQosPktClassDestMacMask=docsIetfQosPktClassDestMacMask, docsIetfQosPktClassSourceMacAddr=docsIetfQosPktClassSourceMacAddr, docsIetfQosPktClassEnetProtocolType=docsIetfQosPktClassEnetProtocolType, docsIetfQosPktClassEnetProtocol=docsIetfQosPktClassEnetProtocol, docsIetfQosPktClassUserPriLow=docsIetfQosPktClassUserPriLow, docsIetfQosPktClassUserPriHigh=docsIetfQosPktClassUserPriHigh, docsIetfQosPktClassVlanId=docsIetfQosPktClassVlanId, docsIetfQosPktClassStateActive=docsIetfQosPktClassStateActive, docsIetfQosPktClassPkts=docsIetfQosPktClassPkts, docsIetfQosPktClassBitMap=docsIetfQosPktClassBitMap, docsIetfQosParamSetTable=docsIetfQosParamSetTable, docsIetfQosParamSetEntry=docsIetfQosParamSetEntry, docsIetfQosParamSetServiceClassName=docsIetfQosParamSetServiceClassName, docsIetfQosParamSetPriority=docsIetfQosParamSetPriority, docsIetfQosParamSetMaxTrafficRate=docsIetfQosParamSetMaxTrafficRate, docsIetfQosParamSetMaxTrafficBurst=docsIetfQosParamSetMaxTrafficBurst, docsIetfQosParamSetMinReservedRate=docsIetfQosParamSetMinReservedRate, docsIetfQosParamSetMinReservedPkt=docsIetfQosParamSetMinReservedPkt, docsIetfQosParamSetActiveTimeout=docsIetfQosParamSetActiveTimeout, docsIetfQosParamSetAdmittedTimeout=docsIetfQosParamSetAdmittedTimeout, docsIetfQosParamSetMaxConcatBurst=docsIetfQosParamSetMaxConcatBurst, docsIetfQosParamSetSchedulingType=docsIetfQosParamSetSchedulingType, docsIetfQosParamSetNomPollInterval=docsIetfQosParamSetNomPollInterval, docsIetfQosParamSetTolPollJitter=docsIetfQosParamSetTolPollJitter, docsIetfQosParamSetUnsolicitGrantSize=docsIetfQosParamSetUnsolicitGrantSize, docsIetfQosParamSetNomGrantInterval=docsIetfQosParamSetNomGrantInterval, docsIetfQosParamSetTolGrantJitter=docsIetfQosParamSetTolGrantJitter, docsIetfQosParamSetGrantsPerInterval=docsIetfQosParamSetGrantsPerInterval, docsIetfQosParamSetTosAndMask=docsIetfQosParamSetTosAndMask, docsIetfQosParamSetTosOrMask=docsIetfQosParamSetTosOrMask, docsIetfQosParamSetMaxLatency=docsIetfQosParamSetMaxLatency, docsIetfQosParamSetType=docsIetfQosParamSetType, docsIetfQosParamSetRequestPolicyOct=docsIetfQosParamSetRequestPolicyOct, docsIetfQosParamSetBitMap=docsIetfQosParamSetBitMap, docsIetfQosServiceFlowTable=docsIetfQosServiceFlowTable, docsIetfQosServiceFlowEntry=docsIetfQosServiceFlowEntry, docsIetfQosServiceFlowId=docsIetfQosServiceFlowId, docsIetfQosServiceFlowSID=docsIetfQosServiceFlowSID, docsIetfQosServiceFlowDirection=docsIetfQosServiceFlowDirection, docsIetfQosServiceFlowPrimary=docsIetfQosServiceFlowPrimary, docsIetfQosServiceFlowStatsTable=docsIetfQosServiceFlowStatsTable, docsIetfQosServiceFlowStatsEntry=docsIetfQosServiceFlowStatsEntry, docsIetfQosServiceFlowPkts=docsIetfQosServiceFlowPkts, docsIetfQosServiceFlowOctets=docsIetfQosServiceFlowOctets, docsIetfQosServiceFlowTimeCreated=docsIetfQosServiceFlowTimeCreated, docsIetfQosServiceFlowTimeActive=docsIetfQosServiceFlowTimeActive, docsIetfQosServiceFlowPHSUnknowns=docsIetfQosServiceFlowPHSUnknowns, docsIetfQosServiceFlowPolicedDropPkts=docsIetfQosServiceFlowPolicedDropPkts, docsIetfQosServiceFlowPolicedDelayPkts=docsIetfQosServiceFlowPolicedDelayPkts, docsIetfQosUpstreamStatsTable=docsIetfQosUpstreamStatsTable, docsIetfQosUpstreamStatsEntry=docsIetfQosUpstreamStatsEntry, docsIetfQosSID=docsIetfQosSID, docsIetfQosUpstreamFragments=docsIetfQosUpstreamFragments, docsIetfQosUpstreamFragDiscards=docsIetfQosUpstreamFragDiscards, docsIetfQosUpstreamConcatBursts=docsIetfQosUpstreamConcatBursts, docsIetfQosDynamicServiceStatsTable=docsIetfQosDynamicServiceStatsTable, docsIetfQosDynamicServiceStatsEntry=docsIetfQosDynamicServiceStatsEntry, docsIetfQosIfDirection=docsIetfQosIfDirection, docsIetfQosDSAReqs=docsIetfQosDSAReqs, docsIetfQosDSARsps=docsIetfQosDSARsps, docsIetfQosDSAAcks=docsIetfQosDSAAcks, docsIetfQosDSCReqs=docsIetfQosDSCReqs, docsIetfQosDSCRsps=docsIetfQosDSCRsps, docsIetfQosDSCAcks=docsIetfQosDSCAcks, docsIetfQosDSDReqs=docsIetfQosDSDReqs, docsIetfQosDSDRsps=docsIetfQosDSDRsps, docsIetfQosDynamicAdds=docsIetfQosDynamicAdds, docsIetfQosDynamicAddFails=docsIetfQosDynamicAddFails, docsIetfQosDynamicChanges=docsIetfQosDynamicChanges, docsIetfQosDynamicChangeFails=docsIetfQosDynamicChangeFails, docsIetfQosDynamicDeletes=docsIetfQosDynamicDeletes, docsIetfQosDynamicDeleteFails=docsIetfQosDynamicDeleteFails, docsIetfQosDCCReqs=docsIetfQosDCCReqs, docsIetfQosDCCRsps=docsIetfQosDCCRsps, docsIetfQosDCCAcks=docsIetfQosDCCAcks, docsIetfQosDCCs=docsIetfQosDCCs, docsIetfQosDCCFails=docsIetfQosDCCFails, docsIetfQosServiceFlowLogTable=docsIetfQosServiceFlowLogTable, docsIetfQosServiceFlowLogEntry=docsIetfQosServiceFlowLogEntry, docsIetfQosServiceFlowLogIndex=docsIetfQosServiceFlowLogIndex, docsIetfQosServiceFlowLogIfIndex=docsIetfQosServiceFlowLogIfIndex, docsIetfQosServiceFlowLogSFID=docsIetfQosServiceFlowLogSFID, docsIetfQosServiceFlowLogCmMac=docsIetfQosServiceFlowLogCmMac, docsIetfQosServiceFlowLogPkts=docsIetfQosServiceFlowLogPkts, docsIetfQosServiceFlowLogOctets=docsIetfQosServiceFlowLogOctets, docsIetfQosServiceFlowLogTimeDeleted=docsIetfQosServiceFlowLogTimeDeleted, docsIetfQosServiceFlowLogTimeCreated=docsIetfQosServiceFlowLogTimeCreated, docsIetfQosServiceFlowLogTimeActive=docsIetfQosServiceFlowLogTimeActive, docsIetfQosServiceFlowLogDirection=docsIetfQosServiceFlowLogDirection, docsIetfQosServiceFlowLogPrimary=docsIetfQosServiceFlowLogPrimary, docsIetfQosServiceFlowLogServiceClassName=docsIetfQosServiceFlowLogServiceClassName, docsIetfQosServiceFlowLogPolicedDropPkts=docsIetfQosServiceFlowLogPolicedDropPkts, docsIetfQosServiceFlowLogPolicedDelayPkts=docsIetfQosServiceFlowLogPolicedDelayPkts, docsIetfQosServiceFlowLogControl=docsIetfQosServiceFlowLogControl, docsIetfQosServiceClassTable=docsIetfQosServiceClassTable, docsIetfQosServiceClassEntry=docsIetfQosServiceClassEntry, docsIetfQosServiceClassName=docsIetfQosServiceClassName, docsIetfQosServiceClassStatus=docsIetfQosServiceClassStatus, docsIetfQosServiceClassPriority=docsIetfQosServiceClassPriority, docsIetfQosServiceClassMaxTrafficRate=docsIetfQosServiceClassMaxTrafficRate, docsIetfQosServiceClassMaxTrafficBurst=docsIetfQosServiceClassMaxTrafficBurst, docsIetfQosServiceClassMinReservedRate=docsIetfQosServiceClassMinReservedRate, docsIetfQosServiceClassMinReservedPkt=docsIetfQosServiceClassMinReservedPkt, docsIetfQosServiceClassMaxConcatBurst=docsIetfQosServiceClassMaxConcatBurst)
mibBuilder.exportSymbols("DOCS-IETF-QOS-MIB", docsIetfQosServiceClassNomPollInterval=docsIetfQosServiceClassNomPollInterval, docsIetfQosServiceClassTolPollJitter=docsIetfQosServiceClassTolPollJitter, docsIetfQosServiceClassUnsolicitGrantSize=docsIetfQosServiceClassUnsolicitGrantSize, docsIetfQosServiceClassNomGrantInterval=docsIetfQosServiceClassNomGrantInterval, docsIetfQosServiceClassTolGrantJitter=docsIetfQosServiceClassTolGrantJitter, docsIetfQosServiceClassGrantsPerInterval=docsIetfQosServiceClassGrantsPerInterval, docsIetfQosServiceClassMaxLatency=docsIetfQosServiceClassMaxLatency, docsIetfQosServiceClassActiveTimeout=docsIetfQosServiceClassActiveTimeout, docsIetfQosServiceClassAdmittedTimeout=docsIetfQosServiceClassAdmittedTimeout, docsIetfQosServiceClassSchedulingType=docsIetfQosServiceClassSchedulingType, docsIetfQosServiceClassRequestPolicy=docsIetfQosServiceClassRequestPolicy, docsIetfQosServiceClassTosAndMask=docsIetfQosServiceClassTosAndMask, docsIetfQosServiceClassTosOrMask=docsIetfQosServiceClassTosOrMask, docsIetfQosServiceClassDirection=docsIetfQosServiceClassDirection, docsIetfQosServiceClassStorageType=docsIetfQosServiceClassStorageType, docsIetfQosServiceClassDSCPOverwrite=docsIetfQosServiceClassDSCPOverwrite, docsIetfQosServiceClassPolicyTable=docsIetfQosServiceClassPolicyTable, docsIetfQosServiceClassPolicyEntry=docsIetfQosServiceClassPolicyEntry, docsIetfQosServiceClassPolicyIndex=docsIetfQosServiceClassPolicyIndex, docsIetfQosServiceClassPolicyName=docsIetfQosServiceClassPolicyName, docsIetfQosServiceClassPolicyRulePriority=docsIetfQosServiceClassPolicyRulePriority, docsIetfQosServiceClassPolicyStatus=docsIetfQosServiceClassPolicyStatus, docsIetfQosServiceClassPolicyStorageType=docsIetfQosServiceClassPolicyStorageType, docsIetfQosPHSTable=docsIetfQosPHSTable, docsIetfQosPHSEntry=docsIetfQosPHSEntry, docsIetfQosPHSField=docsIetfQosPHSField, docsIetfQosPHSMask=docsIetfQosPHSMask, docsIetfQosPHSSize=docsIetfQosPHSSize, docsIetfQosPHSVerify=docsIetfQosPHSVerify, docsIetfQosPHSIndex=docsIetfQosPHSIndex, docsIetfQosCmtsMacToSrvFlowTable=docsIetfQosCmtsMacToSrvFlowTable, docsIetfQosCmtsMacToSrvFlowEntry=docsIetfQosCmtsMacToSrvFlowEntry, docsIetfQosCmtsCmMac=docsIetfQosCmtsCmMac, docsIetfQosCmtsServiceFlowId=docsIetfQosCmtsServiceFlowId, docsIetfQosCmtsIfIndex=docsIetfQosCmtsIfIndex, docsIetfQosConformance=docsIetfQosConformance, docsIetfQosGroups=docsIetfQosGroups, docsIetfQosCompliances=docsIetfQosCompliances)
# Groups
mibBuilder.exportSymbols("DOCS-IETF-QOS-MIB", docsIetfQosBaseGroup=docsIetfQosBaseGroup, docsIetfQosParamSetGroup=docsIetfQosParamSetGroup, docsIetfQosCmtsGroup=docsIetfQosCmtsGroup, docsIetfQosSrvClassPolicyGroup=docsIetfQosSrvClassPolicyGroup, docsIetfQosServiceClassGroup=docsIetfQosServiceClassGroup)
# Compliances
mibBuilder.exportSymbols("DOCS-IETF-QOS-MIB", docsIetfQosCompliance=docsIetfQosCompliance)
| 2 |
4abcca52095a169b71d2527ce52b8367534c42a4
|
Python
|
# -*- python -*-
# ex: set syntax=python:
# vim: set syntax=python:
import os
import re
from collections import defaultdict, namedtuple
from enum import Enum
from pathlib import Path
import buildbot.www.authz.endpointmatchers as ems
from buildbot.changes.filter import ChangeFilter
from buildbot.changes.gitpoller import GitPoller
from buildbot.config import BuilderConfig
from buildbot.locks import WorkerLock
from buildbot.process.factory import BuildFactory
from buildbot.process.properties import Interpolate, Property, renderer, Transform
from buildbot.reporters.generators.build import BuildStartEndStatusGenerator
from buildbot.reporters.github import GitHubStatusPush
from buildbot.reporters.message import MessageFormatterRenderable
from buildbot.schedulers.basic import AnyBranchScheduler
from buildbot.schedulers.forcesched import ForceScheduler
from buildbot.schedulers.timed import Nightly
from buildbot.steps.cmake import CMake
from buildbot.steps.master import SetProperties
from buildbot.steps.shell import SetPropertyFromCommand, ShellCommand
from buildbot.steps.source.git import Git
from buildbot.steps.source.github import GitHub
from buildbot.steps.transfer import FileUpload, FileDownload
from buildbot.steps.worker import MakeDirectory, SetPropertiesFromEnv, RemoveDirectory
from buildbot.worker import Worker
from buildbot.www.auth import UserPasswordAuth
from buildbot.www.authz import Authz
from buildbot.www.authz.roles import RolesFromUsername
from buildbot.www.hooks.github import GitHubEventHandler
from twisted.internet import defer
from custom_steps import CTest, CleanOldFiles, SetPropertiesFromCMakeCache
# This is the dictionary that the buildmaster pays attention to. We also use
# a shorter alias to save typing.
c = BuildmasterConfig = {}
# SECRETS
GITHUB_TOKEN = Path('github_token.txt').read_text().strip()
WORKER_SECRET = Path('halide_bb_pass.txt').read_text().strip()
WEBHOOK_SECRET = Path('webhook_token.txt').read_text().strip()
WWW_PASSWORD = Path('buildbot_www_pass.txt').read_text().strip()
# SERVER SETTINGS
ARTIFACTS_DIR = os.environ.get('HALIDE_BB_MASTER_ARTIFACTS_DIR', '/home/halidenightly/artifacts')
REPO_DIR = Path(__file__, '..', '..').resolve()
# LLVM
# At any given time, we test (at least) 3 LLVM versions:
# - the current main (changes daily)
# - the most recent release (expected to be stable)
# - an older release (expected to be stable)
#
# The branches that correspond to these will rotate as new versions
# are released, but the underlying test logic should not need changing.
Version = namedtuple('Version', ['major', 'minor', 'patch'])
VersionedBranch = namedtuple('VersionedBranch', ['ref', 'version'])
LLVM_MAIN = 'main'
LLVM_RELEASE_17 = 'release_17'
LLVM_RELEASE_16 = 'release_16'
LLVM_RELEASE_15 = 'release_15'
LLVM_BRANCHES = {LLVM_MAIN: VersionedBranch(ref='main', version=Version(18, 0, 0)),
LLVM_RELEASE_17: VersionedBranch(ref='release/17.x', version=Version(17, 0, 0)),
LLVM_RELEASE_16: VersionedBranch(ref='llvmorg-16.0.6', version=Version(16, 0, 6)),
LLVM_RELEASE_15: VersionedBranch(ref='llvmorg-15.0.7', version=Version(15, 0, 7))}
# At any given time, Halide has a main branch, which supports (at least)
# the LLVM main branch and the most recent release branch (and maybe one older).
#
# We also support previous release branches; a release branch tracks *only* the
# corresponding version of LLVM (i.e., Halide 13 is 'release/13.x' and is only
# built/tested against LLVM13, even though it might still work with other LLVM versions).
#
# Note that we deliberately chose branch names that match LLVM's conventions.
#
# (Note that there are older releases of Halide that we no longer bother to build/test regularly.)
HALIDE_MAIN = 'main'
HALIDE_RELEASE_16 = 'release_16'
HALIDE_RELEASE_15 = 'release_15'
_HALIDE_RELEASES = [
HALIDE_RELEASE_16,
HALIDE_RELEASE_15,
]
HALIDE_BRANCHES = {HALIDE_MAIN: VersionedBranch(ref='main', version=Version(17, 0, 0)),
HALIDE_RELEASE_16: VersionedBranch(ref='release/16.x', version=Version(16, 0, 6)),
HALIDE_RELEASE_15: VersionedBranch(ref='release/15.x', version=Version(15, 0, 1))}
# This lists the Halide branch(es) for which we want to build nightlies;
# it's usually desirable to constrain these to save buildbot time (esp on the slower bots)
# and avoid branches that aren't changing much (i.e. -- recent releases that aren't
# likely to need new updates soon).
HALIDE_NIGHTLIES = [HALIDE_MAIN]
# Given a halide branch, return the 'native' llvm version we expect to use with it.
# For halide release branches, this is the corresponding llvm release branch; for
# halide main, it's llvm main.
LLVM_FOR_HALIDE = {
HALIDE_MAIN: [LLVM_MAIN, LLVM_RELEASE_17, LLVM_RELEASE_16],
HALIDE_RELEASE_16: [LLVM_RELEASE_16],
HALIDE_RELEASE_15: [LLVM_RELEASE_15],
}
# WORKERS
# Can use Python 3.7 dataclasses instead, if we choose to upgrade to that.
WorkerConfig = namedtuple('WorkerConfig', ['max_builds', 'j', 'arch', 'bits', 'os'])
# Using nproc+2 on the arm32 builds causes internal errors in gcc-armeabihf. Let's just use nproc.
_NPROC = Interpolate("%(worker:numcpus)s")
# For machines with max_builds=1, using nproc+2 cores for building is the conventional choice
# (and what ninja defaults to). Oddly, "ninja -j 0" means "use as many threads as you like" which
# is definitely not what we want.
_NPROC_PLUS_2 = Transform(lambda x: f'{int(x) + 2}', _NPROC)
_WORKERS = [
('linux-worker-1', WorkerConfig(max_builds=4, j=8, arch='x86', bits=[32, 64], os='linux')),
('linux-worker-4', WorkerConfig(max_builds=4, j=8, arch='x86', bits=[32, 64], os='linux')),
# 2013 Mac Pro running a 6-core Xeon.
('mac-x86-worker-1', WorkerConfig(max_builds=2, j=8, arch='x86', bits=[64], os='osx')),
# Mac Mini 2018, 3.2 GHz 6-Core Intel Core i7, 16GB memory
('mac-x86-worker-2', WorkerConfig(max_builds=2, j=8, arch='x86', bits=[64], os='osx')),
# Mac Mini 2018, ??? details TBD
('mac-x86-worker-3', WorkerConfig(max_builds=2, j=8, arch='x86', bits=[64], os='osx')),
('mac-arm-worker-1', WorkerConfig(max_builds=2, j=8, arch='arm', bits=[64], os='osx')),
# The arm-linux bots here have 4 cores but apparently don't have enough RAM to do more
# than -j=2 without crashing during LLVM builds.
('arm32-linux-worker-1', WorkerConfig(max_builds=1, j=2, arch='arm', bits=[32], os='linux')),
('arm32-linux-worker-2', WorkerConfig(max_builds=1, j=2, arch='arm', bits=[32], os='linux')),
('arm64-linux-worker-1', WorkerConfig(max_builds=1, j=2, arch='arm', bits=[64], os='linux')),
('arm64-linux-worker-2', WorkerConfig(max_builds=1, j=2, arch='arm', bits=[64], os='linux')),
# The rpi4 has 8GB ram, so apparently it's OK with -j=nproc for now.
('rpi4-linux-worker-1', WorkerConfig(max_builds=1, j=_NPROC, arch='arm', bits=[32], os='linux')),
# TODO: should normally be offline because every D3D12 test fails
('win-worker-2', WorkerConfig(max_builds=1, j=_NPROC_PLUS_2, arch='x86', bits=[32, 64], os='windows')),
# TODO: broken, pending repair till Monday
# ('win-worker-3', WorkerConfig(max_builds=2, j=_NPROC_PLUS_2, arch='x86', bits=[32, 64], os='windows')),
]
# The 'workers' list defines the set of recognized buildworkers. Each element is
# a Worker object, specifying a unique worker name and password. The same
# worker name and password must be configured on the worker.
c['workers'] = [Worker(n,
WORKER_SECRET,
keepalive_interval=300, # default is 3600 (1 hour). We'll do 5 mins.
max_builds=cfg.max_builds,
properties={'WORKER_BUILD_PARALLELISM': cfg.j}) for n, cfg in _WORKERS]
_SANITIZERS = [
'asan',
'fuzzer', # this isn't *technically* a sanitizer, but is close enough that it's a good fit
]
# LOCKS
# Performance testing requires exclusive use of a worker
# Compute-intensive build steps will grab this lock in reader
# mode. The performance test will grab it in exclusive mode.
performance_lock = WorkerLock("performance_lock", maxCount=9999)
# When building the LLVM nightlies, we can sync & build LLVM independently
# from other work, but when we update the install directory, we need to ensure
# we have an exclusive lock across the entire worker. (Since we have a small
# number of LLVM versions, and since 'make install' doesn't take very long,
# we could probably just get by with a single lock for *any* llvm install,
# but this isn't much harder to do.)
llvm_build_locks = {}
for llvm_branch, info in LLVM_BRANCHES.items():
for bits in [32, 64]:
llvm_build_locks[llvm_branch + str(bits)] = WorkerLock(
f'llvm_install_lock_{info.version.major}_{bits}', maxCount=9999)
# CHANGESOURCES
# Here we point the buildbot at third-party codebases, ie. dependencies.
# Currently, we only have LLVM's `main` branch configured.
c['change_source'] = [
GitPoller(
repourl='https://github.com/llvm/llvm-project.git',
workdir='gitpoller-llvm-workdir',
branch=LLVM_BRANCHES[LLVM_MAIN].ref,
pollInterval=60 * 60 * 24, # Only check llvm once every 24 hours
pollAtLaunch=True)
]
# CODEBASES
all_repositories = {
'https://github.com/halide/Halide.git': 'halide',
'https://github.com/llvm/llvm-project.git': 'llvm',
}
def codebase_generator(chdict):
repo = chdict['repository']
assert repo in all_repositories, "Codebase not found for chdict: " + str(chdict)
return all_repositories[repo]
c['codebaseGenerator'] = codebase_generator
# BUILDERS
# The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
# what steps, and which workers can execute them. Note that any particular build will
# only take place on one worker.
class Purpose(Enum):
halide_nightly = 1
halide_testbranch = 2
llvm_nightly = 3
class BuildSystem(Enum):
make = 0
cmake = 1
class BuilderType:
"""A class to encapsulate the settings for a specific Builder.
(Do not confuse with CMake's 'BUILD_TYPE', which is something else.)
It includes:
- Halide 'target' in the form of arch-bits-os
- LLVM branch to be used
- CMake vs Make
- halide-nightly vs halide-testbranch vs llvm-nightly
- sanitizers vs none
It doesn't currently include any 'features' because we don't currently
bake any in at build time.
It doesn't currently include the C++ compiler used (eg gcc7 vs gcc8 vs clang),
mainly because we currently never test with multiple compilers for a given
setup. (If we ever need to do so, compiler should be added to this.)
"""
def __init__(self, arch, bits, os, halide_branch, llvm_branch, purpose, sanitizer=None,
buildsystem=BuildSystem.cmake):
assert arch in ['arm', 'x86']
assert bits in [32, 64]
assert os in ['linux', 'windows', 'osx']
assert llvm_branch in LLVM_BRANCHES, f'{llvm_branch} not recognized'
self.arch = arch
self.bits = bits
self.os = os
self.halide_branch = halide_branch
self.llvm_branch = llvm_branch
self.buildsystem = buildsystem
self.purpose = purpose
self.sanitizer = sanitizer
if self.halide_branch:
assert self.purpose != Purpose.llvm_nightly
assert self.halide_branch in HALIDE_BRANCHES, f'unknown branch {self.halide_branch}'
assert (self.purpose == Purpose.halide_testbranch or # if not testbranch...
self.llvm_branch in LLVM_FOR_HALIDE[self.halide_branch])
else:
assert self.purpose == Purpose.llvm_nightly
if self.sanitizer:
assert self.sanitizer in _SANITIZERS
# The armbots aren't configured with Python at all.
# We don't support the Python bindings on 32-bit at all.
def handles_python(self):
if self.bits == 32:
return False
if self.arch == 'arm' and self.os == 'linux':
return False
if self.sanitizer_preset() is not None:
return False
return True
def handles_sanitizers(self):
if self.buildsystem != BuildSystem.cmake:
return False
return (self.arch == 'x86'
and self.bits == 64
and self.os == 'linux'
and self.llvm_branch == LLVM_MAIN)
def sanitizer_preset(self):
if self.handles_sanitizers():
if self.sanitizer == 'asan':
return 'linux-x64-asan'
if self.sanitizer == 'fuzzer':
return 'linux-x64-fuzzer'
return None
def handles_riscv(self):
# Only support RISCV on LLVM16 or later.
return self.llvm_branch not in [LLVM_RELEASE_15]
def handles_hexagon(self):
return (self.arch == 'x86'
and self.bits == 64
and self.os == 'linux'
and self.llvm_branch == LLVM_MAIN)
def handles_wasm(self):
is_linux_x64 = (self.arch == 'x86'
and self.bits == 64
and self.os == 'linux')
return (self.llvm_branch == LLVM_MAIN
and (is_linux_x64 or self.os == 'osx'))
def handles_wasm_wabt(self):
return self.handles_wasm()
def handles_wasm_v8(self):
# OSX machines don't have V8 installed
return self.handles_wasm() and self.os == 'linux'
def has_nvidia(self):
return (self.arch == 'x86'
and self.bits == 64
and self.os in ['windows', 'linux'])
def handles_vulkan(self):
# TODO: disabled temporarily pending fixes to the Vulkan runtime
return False
# Stick with Linux on x86-64 for now. Others TBD.
# return (self.arch == 'x86'
# and self.bits == 64
# and self.os == 'linux'
# and self.halide_branch in [HALIDE_MAIN, HALIDE_RELEASE_16])
def handles_webgpu(self):
# At the moment, the WebGPU team recommends the OSX versions of Dawn/Node
# as the most robust for testing, so that's all we're set up to test with.
# (Note that 'Dawn' must be built/installed on the test machines manually;
# there are no binaries/prebuilts available at this time.)
return self.os == 'osx' and self.halide_branch not in [HALIDE_RELEASE_15]
def has_tflite(self):
if self.arch == 'x86' and self.bits == 64 and self.os == 'linux':
return True
if self.arch == 'arm' and self.bits == 64 and self.os == 'osx':
return True
return False
def has_ccache(self):
return self.os in ['osx', 'linux']
def halide_target(self):
return '%s-%d-%s' % (self.arch, self.bits, self.os)
def llvm_builder_label(self):
return 'llvm-%s-%s' % (LLVM_BRANCHES[self.llvm_branch].version.major, self.halide_target())
def halide_builder_label(self):
# This currently tries to (somewhat) mimic the existing label pattern,
# but is arbitrary. (If changed, manual purging of buildbot temporaries
# is appropriate)
a = ['halide']
if self.sanitizer:
a.append(self.sanitizer)
if self.purpose == Purpose.halide_testbranch:
a.append('testbranch')
elif self.purpose == Purpose.halide_nightly:
a.append('nightly')
a.append(self.halide_branch)
if self.halide_branch == HALIDE_MAIN:
# Halide master is built against multiple LLVM versions,
# so append that here for clarity
a.append(f'llvm{LLVM_BRANCHES[self.llvm_branch].version.major}')
a.append(self.halide_target())
a.append(self.buildsystem.name)
return '-'.join(a)
def builder_label(self):
if self.purpose == Purpose.llvm_nightly:
return self.llvm_builder_label()
else:
return self.halide_builder_label()
def builder_tags(self):
return self.builder_label().split('-')
def get_worker_names(self):
return [n for n, cfg in _WORKERS
if self.arch == cfg.arch and self.bits in cfg.bits and self.os == cfg.os]
def __str__(self):
return self.halide_target()
def get_builddir_subpath(subpath):
# Normalize paths to use forward slashes.
return Transform(lambda x: x.replace('\\', '/'), Interpolate(f'%(prop:builddir)s/{subpath}'))
def get_llvm_toolchains_path(*subpaths):
return get_builddir_subpath(os.path.join('llvm-toolchains', *subpaths))
# TODO: make private to the LLVM code
def get_llvm_source_path(*subpaths):
return get_builddir_subpath(os.path.join('llvm-project', *subpaths))
# TODO: make private to the LLVM code
def get_llvm_build_path(*subpaths):
return get_builddir_subpath(os.path.join('llvm-build', *subpaths))
def get_llvm_install_path(builder_type, *subpaths):
# Note that `builder_type.purpose` can be a Halide builder or an LLVM builder;
# we want to ignore that aspect and produce the same effective path
# regardless (ie, based only on the other aspects of the builder_type).
llvm_workdir = builder_type.llvm_builder_label()
return get_builddir_subpath(os.path.join('..', llvm_workdir, 'llvm-install', *subpaths))
def get_halide_source_path(*subpaths):
return get_builddir_subpath(os.path.join('halide-source', *subpaths))
def get_halide_build_path(*subpaths):
return get_builddir_subpath(os.path.join('halide-build', *subpaths))
def get_halide_install_path(builder_type, *subpaths):
s = 'halide-install'
if builder_type.sanitizer:
s += '-' + builder_type.sanitizer
return get_builddir_subpath(os.path.join(s, *subpaths))
def add_get_halide_source_steps(factory, builder_type):
factory.addStep(GitHub(name='Get Halide source',
locks=[performance_lock.access('counting')],
codebase='halide',
workdir=get_halide_source_path(),
repourl='https://github.com/halide/Halide.git',
branch=HALIDE_BRANCHES[builder_type.halide_branch].ref,
mode='incremental'))
def add_get_llvm_source_steps(factory, builder_type):
factory.addStep(Git(name=f'Get LLVM {LLVM_BRANCHES[builder_type.llvm_branch].version.major}',
locks=[performance_lock.access('counting')],
codebase='llvm',
workdir=get_llvm_source_path(),
repourl='https://github.com/llvm/llvm-project.git',
branch=LLVM_BRANCHES[builder_type.llvm_branch].ref,
mode='incremental'))
# Always download the toolchains, even on platforms we don't need 'em
toolchains_dir = get_llvm_toolchains_path()
factory.addStep(MakeDirectory(name="Make CMake toolchain directory",
locks=[performance_lock.access('counting')],
dir=toolchains_dir,
haltOnFailure=False))
factory.addStep(FileDownload(name='Download CMake toolchains',
mastersrc='toolchain.linux-arm32.cmake', # relative to base dir
workerdest='toolchain.linux-arm32.cmake', # relative to workdir
workdir=toolchains_dir,
locks=[performance_lock.access('counting')],
haltOnFailure=True,
mode=0o644))
# Determined by running `set` in cmd.exe before and after vcvarsall.bat
# and diffing the output. It's likely that we don't need all of these
# to make things work, but I haven't bothered to figure out what is irrelevant,
# so I'm erring on the side of maybe too much.
# noinspection SpellCheckingInspection
VCVARSALL_ENV_VARS = [
"COMMANDPROMPTTYPE",
"DEVENVDIR",
"EXTENSIONSDKDIR",
"FRAMEWORK40VERSION",
"FRAMEWORKDIR",
"FRAMEWORKDIR64",
"FRAMEWORKVERSION",
"FRAMEWORKVERSION64",
"INCLUDE",
"LIB",
"LIBPATH",
"NETFXSDKDIR",
"PATH",
"PLATFORM",
"UCRTVERSION",
"UNIVERSALCRTSDKDIR",
"VCIDEINSTALLDIR",
"VCINSTALLDIR",
"VCTOOLSINSTALLDIR",
"VCTOOLSREDISTDIR",
"VCTOOLSVERSION",
"VISUALSTUDIOVERSION",
"VS110COMNTOOLS",
"VS120COMNTOOLS",
"VS140COMNTOOLS",
"VS160COMNTOOLS",
"VSCMD_ARG_APP_PLAT",
"VSCMD_ARG_HOST_ARCH",
"VSCMD_ARG_TGT_ARCH",
"VSCMD_VER",
"VSINSTALLDIR",
"WINDOWSLIBPATH",
"WINDOWSSDKBINPATH",
"WINDOWSSDKDIR",
"WINDOWSSDKLIBVERSION",
"WINDOWSSDKVERBINPATH",
"WINDOWSSDKVERSION",
"WINDOWSSDK_EXECUTABLEPATH_X64",
"WINDOWSSDK_EXECUTABLEPATH_X86",
"__DOTNET_ADD_64BIT",
"__DOTNET_PREFERRED_BITNESS",
"__VSCMD_PREINIT_PATH",
"__VSCMD_SCRIPT_ERR_COUNT",
]
def get_msvc_config_steps(factory, builder_type):
# ensure that we use the x64 host compiler, not the x86 host compiler
arch_for_bits = {32: 'x64_x86', 64: 'x64'}
vcvarsall = 'vcvarsall.bat %s && set' % arch_for_bits[builder_type.bits]
# TODO: surely there is a better way of locating vcvarsall
# vcvarsdir = "c:/Program Files (x86)/Microsoft Visual Studio/2019/Community/VC/Auxiliary/Build"
vcvarsdir = "C:/Program Files/Microsoft Visual Studio/2022/Community/VC/Auxiliary/Build"
# `vsvarsall && set` dumps all the settings to stdout;
# we'll extract & save just the subset we think are likely to be relevant.
def save_interesting_env_vars(rc, stdout, stderr):
d = {}
for line in stdout.split('\n'):
match = re.match("^([a-zA-Z0-9_-]+)=(.*)$", line.strip())
if match:
key = match.group(1).upper()
value = match.group(2)
if key in VCVARSALL_ENV_VARS:
d[key] = value
return {'env': d}
factory.addStep(
SetPropertyFromCommand(name='Run VcVarsAll',
description='Run VcVarsAll',
workdir=vcvarsdir,
locks=[performance_lock.access('counting')],
haltOnFailure=True,
command=vcvarsall,
extract_fn=save_interesting_env_vars))
def merge_renderable(_base, _extn):
@renderer
@defer.inlineCallbacks
def render(props):
base = yield props.render(_base)
extn = yield props.render(_extn)
base.update(extn)
return base
return render
def get_distrib_name(_version, target, ext):
@renderer
@defer.inlineCallbacks
def render(props):
rev = props.getProperty('got_revision')['halide']
version = yield props.render(_version)
return os.path.join(ARTIFACTS_DIR, f'Halide-{version}-{target}-{rev}.{ext}')
return render
def get_cmake_generator(builder_type):
return 'Ninja'
def get_llvm_cmake_options(builder_type):
options = []
return options
def get_halide_cmake_options(builder_type, build_dir):
options = []
if builder_type.sanitizer and builder_type.purpose != Purpose.llvm_nightly:
assert builder_type.handles_sanitizers()
options.append("--preset=%s" % builder_type.sanitizer_preset())
# append *after* preset so we override the build dir
options += ['-B', build_dir]
return options
def get_ctest_options(builder_type, build_dir):
assert builder_type.purpose != Purpose.llvm_nightly
if builder_type.sanitizer:
assert builder_type.handles_sanitizers()
# No, this won't work, see https://gitlab.kitware.com/cmake/cmake/-/issues/23982 --
# fortunately, we don't need to specify the current sanitizer toolchains
# at test time (just at configure time).
# return {'preset': builder_type.sanitizer_preset(), 'test_dir': build_dir}
return {'build_config': builder_type.sanitizer_preset()}
else:
return {'build_config': 'Release'}
def get_halide_cmake_definitions(builder_type, halide_target='host', wasm_jit='wabt'):
cmake_definitions = {
'Clang_DIR': get_llvm_install_path(builder_type, 'lib/cmake/clang'),
'CMAKE_INSTALL_PREFIX': get_halide_install_path(builder_type),
'Halide_TARGET': halide_target,
'LLD_DIR': get_llvm_install_path(builder_type, 'lib/cmake/lld'),
'LLVM_DIR': get_llvm_install_path(builder_type, 'lib/cmake/llvm'),
'LLVM_ROOT': get_llvm_install_path(builder_type),
'WITH_PYTHON_BINDINGS': 'ON' if builder_type.handles_python() else 'OFF',
'WITH_TEST_FUZZ': 'ON' if builder_type.sanitizer == 'fuzzer' else 'OFF'
}
if builder_type.sanitizer and builder_type.handles_sanitizers():
pass
else:
cmake_definitions['CMAKE_BUILD_TYPE'] = 'Release'
# Sanitizer builds intermittently fail when using CCache for reasons that aren't
# clear ("precompiled header modified") -- for now, just ignore CCache for them
if builder_type.has_ccache() and not builder_type.sanitizer_preset():
cmake_definitions['Halide_CCACHE_BUILD'] = 'ON'
if builder_type.arch == 'arm' and builder_type.bits == 32 and builder_type.os == 'linux':
# Halide always uses its own toolchain files, from the cmake/ subdir.
cmake_definitions['CMAKE_TOOLCHAIN_FILE'] = get_halide_source_path('cmake', 'toolchain.linux-arm32.cmake')
if builder_type.os == 'windows':
cmake_definitions['CMAKE_TOOLCHAIN_FILE'] = Interpolate('%(prop:VCPKG_ROOT)s/scripts/buildsystems/vcpkg.cmake')
# CMake on Windows can't reliably find our pip-installed PyBind11 unless we set CMAKE_PREFIX_PATH to point to is
cmake_definitions['pybind11_DIR'] = Interpolate('%(prop:VIRTUAL_ENV)s/share/cmake/pybind11')
# Don't bother with anything Python-related if we are targeting WebAssembly.
if "wasm" in halide_target:
cmake_definitions['WITH_PYTHON_BINDINGS'] = 'OFF'
# TODO: HALIDE_NODE_JS_PATH is only necessary until EMSDK updates their built-in version of Node
# to v16.13+; when that is done, remove this definition.
if builder_type.handles_wasm() and halide_target.startswith("wasm-"):
cmake_definitions['CMAKE_TOOLCHAIN_FILE'] = Interpolate(
'%(prop:EMSDK)s/upstream/emscripten/cmake/Modules/Platform/Emscripten.cmake')
cmake_definitions['NODE_JS_EXECUTABLE'] = Property('HALIDE_NODE_JS_PATH')
if wasm_jit == 'v8':
cmake_definitions['WITH_WABT'] = 'OFF'
cmake_definitions['WITH_V8'] = 'ON'
cmake_definitions['V8_INCLUDE_PATH'] = '/home/halidenightly/v8/v8/include'
cmake_definitions['V8_LIB_PATH'] = \
'/home/halidenightly/v8/v8/out/x64.release.static/obj/libv8_monolith.a'
elif wasm_jit == 'wabt':
cmake_definitions['WITH_WABT'] = 'ON'
cmake_definitions['WITH_V8'] = 'OFF'
cmake_definitions['V8_INCLUDE_PATH'] = ''
cmake_definitions['V8_LIB_PATH'] = ''
else:
assert False, "Unknown wasm jit " + str(wasm_jit)
if builder_type.handles_webgpu() and "webgpu" in halide_target:
# TODO(srj): remove these after https://github.com/halide/Halide/pull/7422 lands
cmake_definitions['WEBGPU_NODE_BINDINGS'] = Property('HL_WEBGPU_NODE_BINDINGS')
cmake_definitions['WEBGPU_NATIVE_LIB'] = Property('HL_WEBGPU_NATIVE_LIB')
if builder_type.handles_hexagon() and 'hvx' in halide_target:
cmake_definitions['Halide_BUILD_HEXAGON_REMOTE_RUNTIME'] = 'ON'
return cmake_definitions
def get_cmake_build_command(builder_type, build_dir, targets=None):
cmd = ['ninja',
'-C', build_dir,
'-j', Property('WORKER_BUILD_PARALLELISM')]
# TODO(srj): for debugging apps/c_backend
if builder_type.os == "windows":
cmd.append('-v')
if targets:
cmd.extend(targets)
return cmd
def get_llvm_cmake_definitions(builder_type):
# Keep sorted!
definitions = {
'CMAKE_BUILD_TYPE': 'Release',
'CMAKE_INSTALL_PREFIX': get_llvm_install_path(builder_type),
'LLVM_BUILD_32_BITS': ('ON' if builder_type.bits == 32 else 'OFF'),
'LLVM_ENABLE_ASSERTIONS': 'ON',
'LLVM_ENABLE_BINDINGS': 'OFF',
'LLVM_ENABLE_CURL': 'OFF',
'LLVM_ENABLE_DIA_SDK': 'OFF',
'LLVM_ENABLE_HTTPLIB': 'OFF',
'LLVM_ENABLE_IDE': 'OFF',
'LLVM_ENABLE_LIBXML2': 'OFF',
'LLVM_ENABLE_OCAMLDOC': 'OFF',
'LLVM_ENABLE_RTTI': 'ON',
'LLVM_ENABLE_TERMINFO': 'OFF',
'LLVM_ENABLE_WARNINGS': 'OFF', # silence them, it's not like we're gonna fix them
'LLVM_ENABLE_ZLIB': 'ON',
'LLVM_ENABLE_ZSTD': 'OFF',
'LLVM_INCLUDE_BENCHMARKS': 'OFF',
'LLVM_INCLUDE_EXAMPLES': 'OFF',
'LLVM_INCLUDE_TESTS': 'OFF',
'LLVM_TARGETS_TO_BUILD': 'X86;ARM;NVPTX;AArch64;Hexagon;PowerPC;WebAssembly',
}
if builder_type.bits == 32:
definitions['CMAKE_FIND_ROOT_PATH_MODE_INCLUDE'] = "ONLY"
definitions['CMAKE_FIND_ROOT_PATH_MODE_LIBRARY'] = "ONLY"
definitions['CMAKE_FIND_ROOT_PATH_MODE_PACKAGE'] = "ONLY"
definitions['CMAKE_FIND_ROOT_PATH_MODE_PROGRAM'] = "NEVER"
if builder_type.handles_riscv():
definitions['LLVM_TARGETS_TO_BUILD'] += ";RISCV"
if builder_type.handles_sanitizers():
definitions['LLVM_ENABLE_RUNTIMES'] = "compiler-rt;libcxx;libcxxabi;libunwind"
# We only need clang-tools-extra if building for sanitizers -- skip them
# if the builder will never do this, to save time & space.
definitions['LLVM_ENABLE_PROJECTS'] = "clang;lld;clang-tools-extra"
else:
definitions['LLVM_ENABLE_PROJECTS'] = "clang;lld"
# Some versions of GCC will flood the output with useless warnings about
# "parameter passing for argument of type foo changed in GCC 7.1" unless
# we disable this warning. This isn't *essential*, but it makes looking at the
# LLVM build logs much less noisy.
if builder_type.os != 'windows':
definitions['CMAKE_CXX_FLAGS'] = '-Wno-psabi'
if builder_type.arch == 'arm' and builder_type.bits == 32 and builder_type.os == 'linux':
# LLVM doesn't provide a toolchain file, and we can't/don't-want-to rely on the
# one from Halide, so we'll rely on one that the buildbot downloads to each worker.
# (Note that this assumes the file has been properly downloaded.)
definitions['CMAKE_TOOLCHAIN_FILE'] = get_llvm_toolchains_path('toolchain.linux-arm32.cmake')
definitions['LLVM_TARGET_ARCH'] = 'ARM'
definitions['LLVM_DEFAULT_TARGET_TRIPLE'] = 'arm-linux-gnueabihf'
if builder_type.arch == 'x86' and builder_type.bits == 32 and builder_type.os == 'linux':
definitions['CMAKE_FIND_ROOT_PATH'] = '/usr/lib/i386-linux-gnu'
definitions['CMAKE_FIND_ROOT_PATH_MODE_LIBRARY'] = 'ONLY'
# This disables an XCode setting that can get enabled by default
# when assertions are enabled, but only if your XCode install has
# certain frameworks installed; we want it disabled, as it prevents
# prebuilt libraries from working properly with XCode 9.x.
if builder_type.os == 'osx':
definitions['LLVM_ENABLE_SUPPORT_XCODE_SIGNPOSTS'] = 'FORCE_OFF'
# We never build LLVM with sanitizers enabled
if builder_type.has_ccache():
definitions['LLVM_CCACHE_BUILD'] = 'ON'
return definitions
def extend_property(dict_name, **kwargs):
@renderer
def render(props):
table = props.getProperty(dict_name, default={})
table.update(kwargs)
return table
return render
def add_env_setup_step(factory, builder_type, enable_ccache=False):
if builder_type.os == 'windows':
# do this first because the SetPropertyFromCommand step isn't smart enough to merge
get_msvc_config_steps(factory, builder_type)
cxx = 'c++'
cc = 'cc'
ld = 'ld'
if builder_type.os == 'linux':
cc = 'gcc-9'
cxx = 'g++-9'
ld = 'ld'
if builder_type.arch == 'x86' and builder_type.bits == 32:
cxx += ' -m32'
cc += ' -m32'
ld += ' -melf_i386'
elif builder_type.os == 'windows':
cxx = 'cl.exe'
cc = 'cl.exe'
# This is only necessary (or desirable) for make-based builds of Halide;
# CMake-based builds handle it via Halide_CCACHE_BUILD and/or LLVM_CCACHE_BUILD
if enable_ccache and builder_type.has_ccache():
cxx = 'ccache ' + cxx
cc = 'ccache ' + cc
env = {
'CC': cc,
'CXX': cxx,
'LD': ld,
}
# TODO: HALIDE_NODE_JS_PATH is only necessary until EMSDK updates their built-in version of Node
# to v16.13+; when that is done, remove HALIDE_NODE_JS_PATH here and on the workers.
factory.addStep(SetPropertiesFromEnv(name='Read worker environment',
variables=['EMSDK',
'HALIDE_NODE_JS_PATH',
'HL_HEXAGON_TOOLS',
'HL_WEBGPU_NATIVE_LIB',
'HL_WEBGPU_NODE_BINDINGS',
'LD_LIBRARY_PATH',
'VIRTUAL_ENV',
'VCPKG_ROOT']))
vcpkg_root = Property('VCPKG_ROOT', default=None)
if builder_type.handles_hexagon():
# Environment variables for testing Hexagon DSP
hexagon_remote_bin = get_halide_build_path('src', 'runtime', 'hexagon_remote')
# Assume that HL_HEXAGON_TOOLS points to the correct directory (it might not be /usr/local/hexagon)
env['HL_HEXAGON_SIM_REMOTE'] = Transform(os.path.join,
hexagon_remote_bin,
'hexagon',
'bin',
'hexagon_sim_remote')
env['HL_HEXAGON_SIM_CYCLES'] = '1'
env['LD_LIBRARY_PATH'] = [
# no, this will cause a failure at runtime if LD_LIBRARY_PATH is unset (or empty!)
# Property('LD_LIBRARY_PATH'),
hexagon_remote_bin,
Interpolate('%(prop:HL_HEXAGON_TOOLS)s/lib/iss'),
]
env['HEXAGON_SDK_ROOT'] = Interpolate('%(prop:HL_HEXAGON_TOOLS)s/../../../..')
# Force Vulkan validation layer on to catch any driver related errors
# ... this enables a suite of diagnostic checks implemented in the Vulkan SDK
# that verifies the driver and application conform to the Vulkan runtime
# specification. This should not be enabled in production due to the overhead,
# but we want to catch any changes in driver behaviour and/or spurious errors that
# may be hard to find (but easy to fix if the right error messages are present)
if builder_type.has_nvidia() and builder_type.handles_vulkan():
env['VK_INSTANCE_LAYERS'] = "VK_LAYER_KHRONOS_validation"
if builder_type.os == 'osx':
# Environment variable for turning on Metal API validation
# This will have no effect on CPU testing, just Metal testing
env['METAL_DEVICE_WRAPPER_TYPE'] = '1'
if builder_type.os == 'windows':
vcpkg_root = Property('VCPKG_ROOT', default='C:/vcpkg')
env['VCPKG_ROOT'] = vcpkg_root
# Current NVidia drivers on our Windows buildbots can corrupt their own
# cache, leading to many spurious failures. Disable the cache
# for now, pending NVidia investigation.
env['CUDA_CACHE_DISABLE'] = '1'
# We don't ever want an Abort, Retry, Ignore dialog in our tests
env['HL_DISABLE_WINDOWS_ABORT_DIALOG'] = '1'
# Leaving this here (but commented out) in case we need to temporarily
# disable leak-checking in the future.
#
# if builder_type.handles_sanitizers():
# # Disable leak-checking (for now) for ASAN builds
# env['ASAN_OPTIONS'] = 'detect_leaks=0'
factory.addStep(SetProperties(
name='Initialize environment',
properties=dict(
env=extend_property('env', **env),
VCPKG_ROOT=vcpkg_root)))
@renderer
def get_llvm_latest_commit(props):
# Note that this property is a dict for multi-codebase builds,
# but just a string for single-codebase builds.
build_dir = props.getProperty('builddir')
assert not isinstance(build_dir, dict)
build_dir = build_dir.replace('\\', '/')
# Can't use got_revision here since we may be using git directly.
return "cd %s/llvm-project && git log -1 > %s/llvm-install/llvm_latest_commit.txt" % (build_dir, build_dir)
def add_llvm_steps(factory, builder_type, clean_rebuild):
build_dir = get_llvm_build_path()
install_dir = get_llvm_install_path(builder_type)
llvm_name = str(LLVM_BRANCHES[builder_type.llvm_branch].version.major)
if clean_rebuild:
factory.addStep(RemoveDirectory(name="Remove LLVM %s Build Dir" % llvm_name,
locks=[performance_lock.access('counting')],
dir=build_dir,
haltOnFailure=False))
factory.addStep(RemoveDirectory(name="Remove LLVM %s Install Dir" % llvm_name,
locks=[performance_lock.access('counting')],
dir=install_dir,
haltOnFailure=False))
factory.addStep(MakeDirectory(name="Make LLVM %s Build Dir" % llvm_name,
locks=[performance_lock.access('counting')],
dir=build_dir,
haltOnFailure=False))
factory.addStep(MakeDirectory(name="Make LLVM %s Install Dir" % llvm_name,
locks=[performance_lock.access('counting')],
dir=install_dir,
haltOnFailure=False))
factory.addStep(
CMake(name='Configure LLVM %s' % llvm_name,
locks=[performance_lock.access('counting')],
haltOnFailure=True,
env=Property('env'),
workdir=build_dir,
path=get_llvm_source_path('llvm'),
generator=get_cmake_generator(builder_type),
definitions=get_llvm_cmake_definitions(builder_type),
options=get_llvm_cmake_options(builder_type)))
factory.addStep(
ShellCommand(name='Build LLVM %s' % llvm_name,
locks=[performance_lock.access('counting')],
haltOnFailure=True,
workdir=build_dir,
env=Property('env'),
command=get_cmake_build_command(builder_type, build_dir, targets=['install'])))
# Save the SHA of LLVM's head rev into ${INSTALL}/llvm_version.txt,
# just to make debugging simpler
#
factory.addStep(
ShellCommand(name='Stamp Install Directory for LLVM %s' % llvm_name,
locks=[performance_lock.access('counting')],
haltOnFailure=True,
workdir=get_llvm_source_path(),
env=Property('env'),
command=get_llvm_latest_commit))
def add_halide_cmake_build_steps(factory, builder_type):
# Always do a clean build for Halide
source_dir = get_halide_source_path()
build_dir = get_halide_build_path()
install_dir = get_halide_install_path(builder_type)
factory.addStep(RemoveDirectory(name="Remove Halide Build Dir",
locks=[performance_lock.access('counting')],
dir=build_dir,
haltOnFailure=False))
factory.addStep(MakeDirectory(name="Make Halide Build Dir",
locks=[performance_lock.access('counting')],
dir=build_dir,
haltOnFailure=False))
factory.addStep(RemoveDirectory(name="Remove Halide Install Dir",
locks=[performance_lock.access('counting')],
dir=install_dir,
haltOnFailure=False))
factory.addStep(MakeDirectory(name="Make Halide Install Dir",
locks=[performance_lock.access('counting')],
dir=install_dir,
haltOnFailure=False))
factory.addStep(CMake(name='Configure Halide',
description='Configure Halide',
locks=[performance_lock.access('counting')],
haltOnFailure=True,
workdir=build_dir,
env=Property('env'),
path=source_dir,
generator=get_cmake_generator(builder_type),
definitions=get_halide_cmake_definitions(builder_type),
options=get_halide_cmake_options(builder_type, build_dir)))
factory.addStep(
ShellCommand(name='Build Halide',
description='Build Halide',
locks=[performance_lock.access('counting')],
haltOnFailure=True,
workdir=build_dir,
env=Property('env'),
command=get_cmake_build_command(builder_type, build_dir, targets=['all', 'install'])))
def add_halide_cmake_package_steps(factory, builder_type):
source_dir = get_halide_source_path()
target = builder_type.halide_target()
ext = 'zip' if builder_type.os == 'windows' else 'tar.gz'
factory.addStep(
SetPropertiesFromCMakeCache(
name='Get Halide package version',
workdir=get_halide_build_path(),
props=['CMAKE_PROJECT_VERSION']))
extra_env = dict(
Clang_DIR=get_llvm_install_path(builder_type, 'lib/cmake/clang'),
LLD_DIR=get_llvm_install_path(builder_type, 'lib/cmake/lld'),
LLVM_DIR=get_llvm_install_path(builder_type, 'lib/cmake/llvm'),
Halide_VERSION=Property('CMAKE_PROJECT_VERSION')
)
if builder_type.os == 'windows':
# TODO: on Windows, we can't use Ninja for packaging (as we do everywhere
# else in this cfg) due to a bug in CMake 3.18, so we must use MSBuild;
# that means we must use a different build directory entirely. To simplify the
# world, we make this a subdir of the real build dir (so it gets cleaned properly).
# https://github.com/halide/Halide/issues/5264
build_dir = get_halide_build_path("packaging_dir")
if builder_type.arch == 'arm':
arch = 'ARM' if builder_type.bits == 32 else 'ARM64'
else:
arch = 'Win32' if builder_type.bits == 32 else 'x64'
cmd = [get_halide_source_path('packaging/zip/package.bat'), source_dir, build_dir, arch]
else:
build_dir = get_halide_build_path()
cmd = [get_halide_source_path('packaging/tgz/package.sh'), source_dir, build_dir]
if builder_type.arch == 'arm' and builder_type.bits == 32 and builder_type.os == 'linux':
extra_env['CMAKE_TOOLCHAIN_FILE'] = get_halide_source_path('cmake', 'toolchain.linux-arm32.cmake')
factory.addStep(
ShellCommand(name='Package Halide',
description='Package Halide',
workdir=build_dir,
env=extend_property('env', **extra_env),
locks=[performance_lock.access('counting')],
haltOnFailure=True,
command=cmd))
factory.addStep(
FileUpload(name='Upload Halide package',
workersrc=Interpolate(f'Halide-%(prop:CMAKE_PROJECT_VERSION)s-{target}.{ext}'),
locks=[performance_lock.access('counting')],
haltOnFailure=True,
workdir=build_dir,
mode=0o644,
masterdest=get_distrib_name(Property('CMAKE_PROJECT_VERSION'), target, ext)))
def pkg_version_and_target(path: Path):
# Archives names are formatted like: Halide-[version]-[arch]-[commit].[ext]
# This grabs "Halide-[version]-[arch]".
match = re.match(r'^(.*)-[a-f0-9]+\.(tar\.gz|tgz|zip)', path.name)
return match.group(1) if match else None
factory.addStep(CleanOldFiles(
name='Clean old releases',
workdir=ARTIFACTS_DIR,
locks=[performance_lock.access('counting')],
groupfn=pkg_version_and_target))
# Figure out which "non-cpu" (GPU, DSP, etc) targets this builder can handle.
# Return (target, is_simulator)
def get_gpu_dsp_targets(builder_type):
if builder_type.sanitizer_preset() is not None:
return
if builder_type.has_nvidia():
yield 'host-cuda', False
yield 'host-opencl', False
# TODO: temporarily disabled because our only windows bot doesn't support it...
# if builder_type.os == 'windows':
# yield 'host-d3d12compute', False
# If we're running on a capable GPU, add all optional feature flags to the vulkan target
# which are required to get all the correctness tests to pass
if builder_type.handles_vulkan():
yield 'host-vulkan-vk_int8-vk_int16-vk_int64-vk_float16-vk_float64-vk_v13', False
if builder_type.handles_webgpu():
yield 'host-webgpu', False
if builder_type.os == 'osx':
yield 'host-metal', False
if builder_type.handles_hexagon():
# All the buildbots use a simulator for HVX, so performance tests
# won't be useful
yield 'host-hvx', True
# Return a dict with halide-targets as the keys, and a list of test-labels for each value.
def get_test_labels(builder_type):
targets = defaultdict(list)
preset = builder_type.sanitizer_preset()
# For the fuzz sanitizer, run only the fuzz tests
if preset and 'fuzz' in preset:
targets['host'].extend(['fuzz'])
return targets
targets['host'].extend(['internal', 'correctness', 'generator',
'autoschedulers_cpu', 'error', 'warning', 'apps', 'performance', 'tutorial'])
# For all other sanitizers (eg asan), don't bother with the gpu/etc tests.
if preset:
return targets
# TODO: some JIT+generator tests are failing on arm32; disable for now
# pending fixes (see https://github.com/halide/Halide/issues/4940)
if builder_type.arch == 'arm' and builder_type.bits == 32 and builder_type.os == 'linux':
targets['host'].remove('internal')
targets['host'].remove('generator')
if builder_type.handles_python():
targets['host'].extend(['python'])
# Test without SSE4.1 on all x86 systems
if builder_type.arch == 'x86':
t = 'x86-%d-%s' % (builder_type.bits, builder_type.os)
targets[t].extend(['correctness'])
# on x86-64, also test with SSE4.1 (but nothing else that 'host' might sniff)
if builder_type.bits == 64:
targets['%s-sse41' % t].extend(['correctness'])
# Test a subset of things on GPU/DSP targets, as appropriate
for t, is_simulator in get_gpu_dsp_targets(builder_type):
# TODO(https://github.com/halide/Halide/issues/7420): disable apps for host-gpu until the errors are resolved
if t == 'host-webgpu':
targets[t].extend(['correctness', 'generator'])
else:
targets[t].extend(['correctness', 'generator', 'apps'])
if 'cuda' in t:
targets[t].extend(['autoschedulers_cuda'])
if 'hvx' not in t:
targets[t].extend(['autoschedulers_gpu'])
# Don't do performance testing on simulators.
if not is_simulator:
targets[t].extend(['performance'])
# Handle this special case separately
if builder_type.has_nvidia():
targets['host-cuda-opencl'].extend(['correctness_multi_gpu'])
# If we're running on a capable GPU, add all optional feature flags to the vulkan target
# which are required to get all the correctness tests to pass
if builder_type.handles_vulkan():
targets['host-vulkan-vk_int8-vk_int16-vk_int64-vk_float16-vk_float64-vk_v13'].extend(
['internal', 'correctness', 'generator', 'error', 'warning'])
if builder_type.handles_wasm():
if builder_type.handles_wasm_wabt():
# TODO: this is a horrid hack. For now, we want to test JIT with both WABT and V8.
# Add as a horrible wart on the target string.
targets['wasm-32-wasmrt-wasm_simd128-wasm_signext-wasm_sat_float_to_int/wabt'].extend(
['internal', 'correctness', 'generator', 'error', 'warning'])
if builder_type.handles_wasm_v8():
# TODO: this is a horrid hack. For now, we want to test JIT with both WABT and V8.
# Add as a horrible wart on the target string.
targets['wasm-32-wasmrt-wasm_simd128-wasm_signext-wasm_sat_float_to_int/v8'].extend(
['internal', 'correctness', 'generator', 'error', 'warning'])
# WABT (and thus WASM JIT) can't handle code build with wasm_threads yet,
# so only test Generator here
targets['wasm-32-wasmrt-wasm_simd128-wasm_signext-wasm_sat_float_to_int-wasm_threads'].extend(
['generator', 'apps'])
if builder_type.handles_webgpu():
# Most apps can't handle wasm builds yet.
targets['wasm-32-wasmrt-webgpu'].extend(['generator'])
return
| 0 |
4abcca52095a169b71d2527ce52b8367534c42a4
|
Python
|
targets
def is_time_critical_test(test):
# Return true if the test label (or single-test name) is 'time critical' and must
# be run with an exclusive lock on the buildbot (typically, performance tests)
return test in ['performance', 'autoschedulers_cpu', 'autoschedulers_gpu', 'autoschedulers_cuda']
def short_target(halide_target):
s = halide_target.split('-')
if len(s) == 1:
return s[0]
elif len(s) == 2:
return '%s-%s' % (s[0], s[1])
elif len(s) == 3:
return '%s-%s-%s' % (s[0], s[1], s[2])
elif len(s) > 3:
return '%s-%s-%s…' % (s[0], s[1], s[2])
else:
return '<unknown>'
def add_halide_cmake_test_steps(factory, builder_type):
parallelism = Property('WORKER_BUILD_PARALLELISM')
labels = get_test_labels(builder_type)
source_dir = get_halide_source_path()
build_dir = get_halide_build_path()
install_dir = get_halide_install_path(builder_type) # NOQA
# Since we need to do at least a partial rebuild for each different target,
# we want to group things by target. Do host first, followed by a key-sorted
# order, to ensure predictability.
keys = list(labels.keys())
keys.remove('host')
keys.sort()
keys.insert(0, 'host')
for halide_target in keys:
# HL_TARGET is now ignored by CMake builds, no need to set
# (must specify -DHalide_TARGET to CMake instead)
# env['HL_TARGET'] = halide_target
env = extend_property('env', HL_JIT_TARGET=halide_target)
desc = 'T=%s' % short_target(halide_target)
# Do this *before* splitting the horrid wasm-specific target string
test_labels = labels[halide_target]
# wasm targets must ensure that the EMSDK (emcc, etc) are added to the
# active env.
wasm_jit = None
if halide_target.startswith("wasm-"):
halide_target, sep, wasm_jit = halide_target.partition('/')
# Re-set HL_JIT_TARGET with the de-warted target string
env = extend_property('env', HL_JIT_TARGET=halide_target)
if wasm_jit:
desc = '%s + T=%s' % (wasm_jit, short_target(halide_target))
if not wasm_jit:
wasm_jit = 'wabt'
factory.addStep(
CMake(name='Reconfigure for %s' % short_target(halide_target),
description='Reconfigure for %s' % desc,
locks=[performance_lock.access('counting')],
haltOnFailure=True,
env=env,
workdir=build_dir,
path=source_dir,
generator=get_cmake_generator(builder_type),
definitions=get_halide_cmake_definitions(
builder_type, halide_target=halide_target, wasm_jit=wasm_jit),
options=get_halide_cmake_options(builder_type, build_dir)))
factory.addStep(
ShellCommand(name='Rebuild for %s' % (short_target(halide_target)),
description='Rebuild Halide for %s' % desc,
locks=[performance_lock.access('counting')],
haltOnFailure=True,
workdir=build_dir,
env=env,
command=get_cmake_build_command(builder_type, build_dir, targets=['all', 'install'])))
do_apps = 'apps' in test_labels
if do_apps:
test_labels.remove('apps')
if not builder_type.handles_python():
if 'python' in test_labels:
test_labels.remove('python')
# TODO : some of the apps require python, so we must skip them for now also
do_apps = False
parallel_test_labels = [
test for test in test_labels if not is_time_critical_test(test)]
exclusive_test_labels = [test for test in test_labels if is_time_critical_test(test)]
if parallel_test_labels:
if len(parallel_test_labels) > 2:
test_set = ','.join([s[0] for s in parallel_test_labels])
else:
test_set = ', '.join(parallel_test_labels)
# Build up some special cases to exclude
exclude_tests = []
if builder_type.os == 'windows' or builder_type.os == 'linux':
# TODO: disable lens_blur on windows for now due to
# https://bugs.llvm.org/show_bug.cgi?id=46176
# and also due to Windows testbots having inadequate GPU RAM
# and also due to Linux testbots having inadequate GPU RAM
exclude_tests.append('interpolate')
exclude_tests.append('lens_blur')
exclude_tests.append('unsharp')
if builder_type.os == 'linux' or builder_type.bits == 32:
# TODO: disable tutorial_lesson_12_using_the_gpu (both C++ and python) on linux and 32-bit
exclude_tests.append('tutorial_lesson_12')
if builder_type.sanitizer == 'asan':
# lesson 19 can trigger memory leaks in some of the GPU device drivers,
# so just exclude it when doing ASAN
exclude_tests.append('tutorial_lesson_19')
if builder_type.arch == 'arm' or builder_type.bits == 32:
# TODO: disable lesson_19 on arm32
# https://github.com/halide/Halide/issues/5224
exclude_tests.append('tutorial_lesson_19')
factory.addStep(
CTest(name='Test %s %s' % (test_set, desc),
description='Test %s %s' % (test_set, desc),
locks=[performance_lock.access('counting')],
workdir=build_dir,
env=env,
timeout=3600,
labels=parallel_test_labels,
exclude_tests=exclude_tests,
jobs=parallelism,
**get_ctest_options(builder_type, build_dir)))
if exclusive_test_labels:
test_set = ','.join([s.replace('autoschedulers_', 'a_') for s in exclusive_test_labels])
factory.addStep(
CTest(name='Test %s %s' % (test_set, desc),
description='Test %s %s' % (test_set, desc),
locks=[performance_lock.access('exclusive')],
workdir=build_dir,
env=env,
timeout=3600,
labels=exclusive_test_labels,
**get_ctest_options(builder_type, build_dir)))
if do_apps:
apps_build_dir = get_halide_build_path("apps")
apps_source_dir = get_halide_source_path("apps")
# We currently don't attempt to build any of the apps with wasm
apps_cmake_defs = get_halide_cmake_definitions(builder_type, halide_target=halide_target)
apps_cmake_defs['CMAKE_PREFIX_PATH'] = get_halide_install_path(builder_type)
# apps/hannk is expensive to build, and doesn't (yet) build on all systems, so special-case it here
want_hannk = (builder_type.has_tflite() and not halide_target.startswith("wasm-"))
apps_cmake_defs['ENABLE_APPS_HANNK'] = 'ON' if want_hannk else 'OFF'
factory.addStep(
CMake(name='Configure apps for %s' % desc,
description='Configure apps for %s' % desc,
locks=[performance_lock.access('counting')],
haltOnFailure=True,
env=env,
workdir=apps_build_dir,
path=apps_source_dir,
generator=get_cmake_generator(builder_type),
definitions=apps_cmake_defs,
options=get_halide_cmake_options(builder_type, build_dir)))
factory.addStep(
ShellCommand(name='Build apps for %s' % desc,
description='Build apps for %s' % desc,
locks=[performance_lock.access('counting')],
haltOnFailure=True,
workdir=apps_build_dir,
env=env,
command=get_cmake_build_command(builder_type, apps_build_dir)))
# Note: do *not* run the apps/ tests in parallel; many of them expect
# to make full use of all cores, and running in parallel will just slow
# things down.
exclude_tests = []
if builder_type.os == 'windows':
# TODO: disable lens_blur_filter on windows for now due to
# https://github.com/halide/Halide/issues/5552
exclude_tests.append('lens_blur_filter')
factory.addStep(
CTest(name='Test apps for %s' % desc,
description='Test apps for %s' % desc,
locks=[performance_lock.access('exclusive')],
workdir=apps_build_dir,
env=env,
timeout=3600,
exclude_tests=exclude_tests,
exclude_labels=['slow_tests'],
**get_ctest_options(builder_type, apps_build_dir)))
def create_halide_make_factory(builder_type):
assert builder_type.os != 'windows'
make_threads = Property('WORKER_BUILD_PARALLELISM')
build_dir = get_halide_build_path()
factory = BuildFactory()
# We never enable sanitizers for Make builds here (only for CMake)
add_env_setup_step(factory, builder_type, enable_ccache=True)
# It's never necessary to use get_msvc_config_steps() for Make,
# since we never use Make with MSVC
add_get_halide_source_steps(factory, builder_type)
# Force a full rebuild of Halide every time
factory.addStep(RemoveDirectory(name="Remove Halide Build Dir",
locks=[performance_lock.access('counting')],
dir=build_dir))
target_label_pairs = [('host', 'build_tests')]
for halide_target, labels_for_target in get_test_labels(builder_type).items():
# For Make we skip every target that isn't plain 'host'
if halide_target != 'host':
continue
_labels_to_skip = [
# auto_schedule and performance requires exclusive machine access and isn't worth it for Make
"autoschedulers_cpu",
"autoschedulers_gpu",
"autoschedulers_cuda",
"performance",
# Make no longer provides support for building the Python bindings,
# regardless of builder_type.handles_python()
"python",
]
if builder_type.bits == 32:
# Don't test autoschedulers on 32-bit systems via Make;
# it's not set up 100% correctly for crosscompilation there
# and the CMake-based coverage is fine.
_labels_to_skip.extend(['autoschedulers_cpu', 'autoschedulers_gpu', 'autoschedulers_cuda'])
for label in labels_for_target:
if label in _labels_to_skip:
continue
target_label_pairs.append((halide_target, label))
for halide_target, label in target_label_pairs:
env = extend_property('env',
LLVM_CONFIG=get_llvm_install_path(builder_type, 'bin/llvm-config'),
HL_TARGET=halide_target,
HL_JIT_TARGET=halide_target)
if is_time_critical_test(label):
p = 1
lock_mode = 'exclusive'
else:
p = make_threads
lock_mode = 'counting'
if label != 'build_tests':
label = 'test_%s' % label
factory.addStep(ShellCommand(name='make ' + label,
description=label + ' ' + halide_target,
locks=[performance_lock.access(lock_mode)],
workdir=build_dir,
env=env,
haltOnFailure=False,
command=['make',
'-f', get_halide_source_path('Makefile'),
'-j', p,
label],
timeout=3600))
return factory
def create_halide_cmake_factory(builder_type):
factory = BuildFactory()
add_env_setup_step(factory, builder_type)
add_get_halide_source_steps(factory, builder_type)
add_halide_cmake_build_steps(factory, builder_type)
add_halide_cmake_test_steps(factory, builder_type)
# If everything else looks ok, build a distrib.
if builder_type.purpose == Purpose.halide_nightly:
add_halide_cmake_package_steps(factory, builder_type)
return factory
def create_halide_factory(builder_type):
if builder_type.buildsystem == BuildSystem.cmake:
return create_halide_cmake_factory(builder_type)
else:
return create_halide_make_factory(builder_type)
def get_interesting_halide_targets():
for arch in ['arm', 'x86']:
for bits in [32, 64]:
for os in ['linux', 'osx', 'windows']:
if arch == 'arm' and os == 'windows':
# No buildbots for windows-on-arm (yet)
continue
if os == 'osx' and bits != 64:
# osx is 64-bit only, period
continue
yield arch, bits, os
def create_halide_builder(arch, bits, os, halide_branch, llvm_branch, purpose, buildsystem=BuildSystem.cmake):
# Always do a build with no sanitizers
sanitizers = [None]
# Also build with sanitizers (but not if we are doing nightlies)
if purpose != Purpose.halide_nightly:
sanitizers.extend(_SANITIZERS)
for san in sanitizers:
builder_type = BuilderType(arch, bits, os, halide_branch, llvm_branch, purpose, san, buildsystem)
if san and purpose == Purpose.llvm_nightly:
continue
if san and not builder_type.handles_sanitizers():
continue
workers = builder_type.get_worker_names()
builder = BuilderConfig(name=builder_type.builder_label(),
workernames=workers,
factory=create_halide_factory(builder_type),
collapseRequests=True,
# We need counting access to our llvm branch during Halide builds.
# (We could probably get by with access during only a subset of
# our steps, but there doesn't appear to be a way to group
# lock requests across multiple-but-not-all-steps in a Build.)
locks=[llvm_build_locks[llvm_branch + str(bits)].access('counting')],
tags=builder_type.builder_tags())
builder.builder_type = builder_type
yield builder
def create_halide_builders():
for arch, bits, os in get_interesting_halide_targets():
# Create builders for build + package of Halide master + release branches
# (but only against their 'native' LLVM versions)
for halide_branch in HALIDE_NIGHTLIES:
for llvm_branch in LLVM_FOR_HALIDE[halide_branch]:
yield from create_halide_builder(arch, bits, os, halide_branch, llvm_branch, Purpose.halide_nightly)
# Create the builders for testing pull requests to releases.
for halide_branch in _HALIDE_RELEASES:
for llvm_branch in LLVM_FOR_HALIDE[halide_branch]:
yield from create_halide_builder(arch, bits, os, halide_branch, llvm_branch, Purpose.halide_testbranch)
# Create the builders for testing pull requests to main.
yield from create_halide_builder(arch, bits, os, HALIDE_MAIN, LLVM_MAIN, Purpose.halide_testbranch)
# Also test Makefiles on x86-linux & osx (but only on Halide main) to ensure they
# stay healthy. (Note: deliberately skip arm-linux, since they are the slowest bots.)
yield from create_halide_builder('x86', 64, 'linux', HALIDE_MAIN, LLVM_MAIN,
Purpose.halide_testbranch, BuildSystem.make)
yield from create_halide_builder('x86', 32, 'linux', HALIDE_MAIN, LLVM_MAIN,
Purpose.halide_testbranch, BuildSystem.make)
yield from create_halide_builder('x86', 64, 'osx', HALIDE_MAIN, LLVM_MAIN,
Purpose.halide_testbranch, BuildSystem.make)
yield from create_halide_builder('arm', 64, 'osx', HALIDE_MAIN, LLVM_MAIN,
Purpose.halide_testbranch, BuildSystem.make)
# Test pull requests for Halide master against the current and previous LLVM, for at least one target.
for llvm_branch in LLVM_BRANCHES:
if abs(LLVM_BRANCHES[llvm_branch].version.major - LLVM_BRANCHES[LLVM_MAIN].version.major) in [1, 2]:
yield from create_halide_builder('x86', 64, 'linux', HALIDE_MAIN, llvm_branch, Purpose.halide_testbranch)
def create_halide_scheduler(halide_branch):
def is_halide_base_branch(br):
return any(br == hl.ref for hl in HALIDE_BRANCHES.values())
def is_halide_pr_branch(br):
# If it's not one of the well-known branches, assume it's a pull request
return not is_halide_base_branch(br)
def github_base_branch_matches(change):
ref = change.properties.getProperty('basename')
return ref == HALIDE_BRANCHES[halide_branch].ref
# ----- nightlies
builders = [b for b in c['builders']
if b.builder_type.halide_branch == halide_branch and b.builder_type.purpose == Purpose.halide_nightly]
if builders:
builder_names = [str(b.name) for b in builders]
# Start the Halide nightlies at 9PM Pacific; our buildbot master uses UTC for
# cron, so that's 0400. Note that this is (deliberately) well before
# the LLVM nightlies get built (currently 11pm start); the idea is
# that Halide nightlies get built using the previous day's LLVM
# nightlies, on the assumption that those are more likely to get at
# least some test coverage (via testbranch) to minimize breakage.
yield Nightly(
name='halide-package-' + halide_branch,
codebases=['halide'],
builderNames=builder_names,
change_filter=ChangeFilter(codebase='halide'),
hour=4,
minute=0)
yield ForceScheduler(
name='force-halide-nightly-' + halide_branch,
builderNames=builder_names,
codebases=['halide'])
# ----- testbranch
builders = [b for b in c['builders']
if b.builder_type.halide_branch == halide_branch
and b.builder_type.purpose == Purpose.halide_testbranch]
if builders:
# NOT SingleBranchScheduler, because this can process changes from many branches (all PRs)
builder_names = [str(b.name) for b in builders]
yield AnyBranchScheduler(
name='halide-testbranch-' + halide_branch,
codebases=['halide'],
change_filter=ChangeFilter(category='pull', codebase='halide',
branch_fn=is_halide_pr_branch,
filter_fn=github_base_branch_matches),
treeStableTimer=60 * 5, # seconds
builderNames=builder_names)
yield ForceScheduler(
name='force-halide-testbranch-' + halide_branch,
builderNames=builder_names,
codebases=['halide'])
def create_llvm_cmake_factory(builder_type):
factory = BuildFactory()
add_env_setup_step(factory, builder_type)
add_get_llvm_source_steps(factory, builder_type)
clean_llvm_rebuild = (builder_type.llvm_branch == LLVM_MAIN)
add_llvm_steps(factory, builder_type, clean_llvm_rebuild)
return factory
def create_llvm_builders():
for arch, bits, os in get_interesting_halide_targets():
# Note that we want these Builders to run on *every* eligible worker;
# the goal is to ensure that all LLVM builds are updated locally
# on all of the workers.
for llvm_branch in LLVM_BRANCHES:
builder_type = BuilderType(arch, bits, os, None, llvm_branch, Purpose.llvm_nightly)
for w in builder_type.get_worker_names():
# Note that we need the builder name to be unique across workers,
# but we want the builddir on the *worker* side to be the same for all workers
# (to simplify things).
label = builder_type.llvm_builder_label()
builder = BuilderConfig(name="%s/%s" % (label, w),
workerbuilddir=label,
workernames=[w],
factory=create_llvm_cmake_factory(builder_type),
collapseRequests=True,
# We want exclusive access to this workerlock
# thru all this Builder's steps. (We could probably
# get by with holding it just during the install phase,
# but we'd have to finesse some details like removing
# the old install directory within the lock, and this
# is much simpler.)
locks=[llvm_build_locks[llvm_branch + str(bits)].access('exclusive')],
tags=builder_type.builder_tags())
builder.builder_type = builder_type
yield builder
def create_llvm_scheduler(llvm_branch):
builders = [str(b.name) for b in c['builders']
if b.builder_type.llvm_branch == llvm_branch and b.builder_type.purpose == Purpose.llvm_nightly]
# Start every day at 11PM Pacific; our buildbot use UTC for cron, so that's 0600
yield Nightly(
name=f'llvm-nightly-{LLVM_BRANCHES[llvm_branch].version.major}',
codebases=['llvm'],
builderNames=builders,
change_filter=ChangeFilter(codebase='llvm'),
hour=6,
minute=0)
for b in builders:
yield ForceScheduler(
name='force-llvm-nightly-' + b.replace('/', '_'),
codebases=['llvm'],
builderNames=[b])
def create_builders():
yield from create_llvm_builders()
yield from create_halide_builders()
def create_schedulers():
for llvm_branch in LLVM_BRANCHES:
yield from create_llvm_scheduler(llvm_branch)
for halide_branch in HALIDE_BRANCHES:
yield from create_halide_scheduler(halide_branch)
c['builders'] = list(create_builders())
c['schedulers'] = list(create_schedulers())
# Set the builder priorities
def prioritize_builders(buildmaster, builders):
def importance(builder):
builder_type = builder.config.builder_type
assert builder_type
# LLVM nightlies run only once a day (late at night) and should always
# get priority over everything else.
if builder_type.purpose == Purpose.llvm_nightly:
return 0
# Branch testers all need to come back before we can merge a PR,
# so they all have equal next-highest priority.
if builder_type.purpose == Purpose.halide_testbranch:
return 1
# non-branch testers are mostly used for bisecting failures that
# didn't show up in the branch testers and doing binary
# releases. We care most about the most recently-released llvm so
# that we have a full set of builds for releases, then llvm main
# for bisection, then older llvm versions.
if builder_type.llvm_branch in LLVM_FOR_HALIDE[HALIDE_RELEASE_15]:
return 2
if builder_type.llvm_branch in LLVM_FOR_HALIDE[HALIDE_RELEASE_16]:
return 2
if builder_type.llvm_branch in LLVM_FOR_HALIDE[HALIDE_MAIN]:
return 3
return 4
return list(sorted(builders, key=importance))
c['prioritizeBuilders'] = prioritize_builders
# GitHub pull request filter
class SafeGitHubEventHandler(GitHubEventHandler):
def handle_push(self, payload, event):
ref = payload['ref']
if re.match(r"^refs/(heads|tags)/(master|main|release/\d+\.x)$", ref):
return super().handle_push(payload, event)
else:
print(f'SafeGitHubEventHandler: ignoring push event for ref: {ref}')
return self.skip()
def handle_pull_request(self, payload, event):
pr = payload['pull_request']
try:
# Skip anything with the 'skip_buildbots' label
if any(label['name'] == 'skip_buildbots' for label in pr['labels']):
# print("PR %s was skipped due to skip_buildbots" % str(pr['html_url']))
return self.skip()
# Test anything (even external) that has 'halidebuildbots' as a reviewer.
if any(r['login'] == 'halidebuildbots' for r in pr['requested_reviewers']):
# print("PR %s was handled due halidebuildbots" % str(pr['html_url']))
if payload['action'] == 'review_requested':
# Pretend it's a synchronize event instead since private buildbot code
# rejects review_requested for no apparent reason.
payload['action'] = 'synchronize'
return super().handle_pull_request(payload, event)
# Skip external pull requests that originate from untrusted forks
trusted_repos = (
'halide/Halide', # the primary repository is obviously trusted
'CodeLinaro/Halide', # the Qualcomm open-source staging fork is trustworthy
)
if pr['head']['repo']['full_name'] not in trusted_repos:
# print("PR %s was skipped due to being external:" % str(pr['head']['repo']['full_name']))
return self.skip()
# print("PR %s is being handled normally" % str(pr['html_url']))
return super().handle_pull_request(payload, event)
except KeyError as e:
print(f'SafeGitHubEventHandler: malformed payload: {payload}')
print(f'SafeGitHubEventHandler: missing key "{e}"')
return self.skip()
@staticmethod
def skip():
return [], 'git'
# WEB SERVER
# 'protocols' contains information about protocols which master will use for communicating with workers.
# You must define at least 'port' option that workers could connect to your master with this protocol.
# 'port' must match the value configured into the buildworkers (with their --master option)
c['protocols'] = {'pb': {'port': 9990}}
authz = Authz(
allowRules=[ems.ForceBuildEndpointMatcher(role="admins"),
ems.StopBuildEndpointMatcher(role="admins"),
ems.RebuildBuildEndpointMatcher(role="admins"),
ems.EnableSchedulerEndpointMatcher(role="admins")],
roleMatchers=[RolesFromUsername(roles=["admins"], usernames=["halidenightly"])])
c['www'] = dict(
auth=UserPasswordAuth({'halidenightly': WWW_PASSWORD}),
authz=authz,
port=8012,
change_hook_dialects={
'github': {
'secret': WEBHOOK_SECRET,
'codebase': 'halide',
'skips': [],
'class': SafeGitHubEventHandler,
# 'github_property_whitelist': ['github.base.ref'],
},
},
)
# PROJECT IDENTITY
# the 'title' string will appear at the top of this buildbot
# installation's html.WebStatus home page (linked to the
# 'titleURL') and is embedded in the title of the waterfall HTML page.
c['title'] = 'Halide'
c['titleURL'] = 'http://halide-lang.org'
# the 'buildbotURL' string should point to the location where the buildbot's
# internal web server (usually the html.WebStatus page) is visible. This
# typically uses the port number set in the Waterfall 'status' entry, but
# with an externally-visible host name which the buildbot cannot figure out
# without some help.
c['buildbotURL'] = 'https://buildbot.halide-lang.org/master/'
# DB URL
c['db'] = {
# This specifies what database buildbot uses to store its state. You can leave
# this at its default for all but the largest installations.
'db_url': 'sqlite:///state.sqlite',
}
# GitHub Integration
# Only testbranch builders need to be considered here
builders = [str(b.name) for b in c['builders'] if b.builder_type.purpose != Purpose.llvm_nightly]
generator = BuildStartEndStatusGenerator(builders=builders,
start_formatter=MessageFormatterRenderable('Build started.'),
end_formatter=MessageFormatterRenderable('Build done.'))
gs = GitHubStatusPush(token=GITHUB_TOKEN,
context=Interpolate("buildbot/%(prop:buildername)s"),
generators=[generator],
verbose=True)
c['services'] = [gs]
# Disable sending usage data
c['buildbotNetUsageData'] = None
| 1 |
4dde161d25ed41154e13b94cc9640c6aac055f87
|
Python
|
# coding: utf-8
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=too-many-lines
"""Constants."""
UNK_TOKEN = '<unk> '
BOS_TOKEN = '<bos>'
EOS_TOKEN = '<eos>'
PAD_TOKEN = '<pad>'
UNK_IDX = 0 # This should not be changed as long as serialized token
# embeddings redistributed on S3 contain an unknown token.
# Blame this code change and see commit for more context.
LARGE_POSITIVE_FLOAT = 1e18
LARGE_NEGATIVE_FLOAT = -LARGE_POSITIVE_FLOAT
GLOVE_NPZ_SHA1 = \
{'glove.42B.300d': ('glove.42B.300d.npz',
'7deee8f4860744db53ed9e50892effe9883e6d89'),
'glove.6B.100d': ('glove.6B.100d.npz',
'01f80f202fcabcc3e0804898349087bfc191dd1c'),
'glove.6B.200d': ('glove.6B.200d.npz',
'5e6e2bdab346c257f88d80d215d518e680d86e32'),
'glove.6B.300d': ('glove.6B.300d.npz',
'1db264aa936be62f055dfb72854204450bdf4399'),
'glove.6B.50d': ('glove.6B.50d.npz',
'aa16be8d184399d2199f83fd62586f2c30497bfa'),
'glove.840B.300d': ('glove.840B.300d.npz',
'b4ba390c1154736e07c0e67d9180935f5930e83c'),
'glove.twitter.27B.100d': ('glove.twitter.27B.100d.npz',
'0f7b82c223451d0002f79ba23596983cdbe0e2b1'),
'glove.twitter.27B.200d': ('glove.twitter.27B.200d.npz',
'41cc2d26f58a54622ce96bf6c8434360ab524f20'),
'glove.twitter.27B.25d': ('glove.twitter.27B.25d.npz',
'9f563d2f296995598cc46812b2fda05ad4c3c879'),
'glove.twitter.27B.50d': ('glove.twitter.27B.50d.npz',
'ce9959c056f2a0a780c468feeb4f823af51630e9')}
FAST_TEXT_NPZ_SHA1 = \
{'crawl-300d-2M': ('crawl-300d-2M.npz',
'9dd611a1fe280c63050cd546d3595400fc0eede4'),
'wiki.aa': ('wiki.aa.npz',
'48f163b80eb37f1806142169d3d4c05cf75b7339'),
'wiki.ab': ('wiki.ab.npz',
'860ceff119dd27e5b701b605879037c1310cbc3e'),
'wiki.ace': ('wiki.ace.npz',
'62938287464040491719f56a6f521f8f808beee8'),
'wiki.ady': ('wiki.ady.npz',
'646843afa260d018ed711df3f1ca9c3e000447b6'),
'wiki.af': ('wiki.af.npz',
'7b14cd27690b67fea318d0bac2283c16430680e2'),
'wiki.ak': ('wiki.ak.npz',
'20f309adad1c45958c97b6055d5838e05bbaea72'),
'wiki.als': ('wiki.als.npz',
'a8b03aa133c4f7da12fc27c2b167b7918b1e9805'),
'wiki.am': ('wiki.am.npz',
'ed3dd10cea64737f7a1623612ee099df9dc19f66'),
'wiki.ang': ('wiki.ang.npz',
'8efe64706d9d6b8eae38b2c7ff0b277e20592bc7'),
'wiki.an': ('wiki.an.npz',
'168046283c719ab96a29b1abae2e25a6575c7be8'),
'wiki.arc': ('wiki.arc.npz',
'049021b7decea4bc009b12936e56b4dbf5b760e7'),
'wiki.ar': ('wiki.ar.npz',
'7e325e1e98dfcdc9368d2ebe40ee834a2ed44912'),
'wiki.arz': ('wiki.arz.npz',
'7d851c2c7be3ee6f7fd896de7b76ea08e3fb08b0'),
'wiki.as': ('wiki.as.npz',
'01d38c29cd4bd99c1a8534abc058822da14a5b9c'),
'wiki.ast': ('wiki.ast.npz',
'9c9846ba5084505a0adea89c95c66e04efbf5ce9'),
'wiki.av': ('wiki.av.npz',
'7ef6a920c364638504e673cfde5f7675503fa81e'),
'wiki.ay': ('wiki.ay.npz',
'c1202e110930e3902397f5cb64a8359e013b469f'),
'wiki.azb': ('wiki.azb.npz',
'10351b7ef14ec2cb610d290cb6a3f6987ef5d8b3'),
'wiki.az': ('wiki.az.npz',
'74257c3bcd533a606afae509ea835dc036d61546'),
'wiki.ba': ('wiki.ba.npz',
'4a2857ed694d66864df562b376c2fa12fcb03646'),
'wiki.bar': ('wiki.bar.npz',
'e65c6b7e9ff83798d1eea05d166148837d53e615'),
'wiki.bat_smg': ('wiki.bat_smg.npz',
'6420584ae28ba6c9dd145fea8f096243d457c2d8'),
'wiki.bcl': ('wiki.bcl.npz',
'33606c970ab336b678393e2bdb8af2116d11cf7b'),
'wiki.be': ('wiki.be.npz',
'84487d341e333344cf71bc12c7a205d923762498'),
'wiki.bg': ('wiki.bg.npz',
'56f2a175b1a1d1a9cf9f1cea277cd0b46ffd7f66'),
'wiki.bh': ('wiki.bh.npz',
'07473989853a344a41aaa18f41030dc56d0d01c7'),
'wiki.bi': ('wiki.bi.npz',
'08adfa3c9ef3016d30ef69ea539d217ff67eda09'),
'wiki.bjn': ('wiki.bjn.npz',
'998a551283222931d3a26922308449950bfa3ec7'),
'wiki.bm': ('wiki.bm.npz',
'454ff9fbd4790e4a076d9a2087a51da28aa1332f'),
'wiki.bn': ('wiki.bn.npz',
'1f36f6f39c9a9b33bb8035c9a4dc7e04933604fd'),
'wiki.bo': ('wiki.bo.npz',
'b9fe87318428de0a7790de175b5fec80c5af482d'),
'wiki.bpy': ('wiki.bpy.npz',
'5c7853173d27e2c018c24eca69de8d5f34511b0d'),
'wiki.br': ('wiki.br.npz',
'7aa66a2034fbfaa1d39e637385d48610238797c9'),
'wiki.bs': ('wiki.bs.npz',
'a019a4677677c2e9e4d899326b2b6c15ad6c011a'),
'wiki.bug': ('wiki.bug.npz',
'09ae3477941d7a99d1df494368d7efb0b2c18913'),
'wiki.bxr': ('wiki.bxr.npz',
'b832c691b8ddd95896c052d3d15e1f98d72068d5'),
'wiki.ca': ('wiki.ca.npz',
'391e0d4daad08649251274fa1cc2a5f49c7728b1'),
'wiki.cbk_zam': ('wiki.cbk_zam.npz',
'02e57a763bc9f9eadaba57953383dd12a0a78a37'),
'wiki.cdo': ('wiki.cdo.npz',
'd6e8f422327e8b2273f1f2662d793707ece6695d'),
'wiki.ceb': ('wiki.ceb.npz',
'23bc0bb9aeaa57dff35092766941a866de142aae'),
'wiki.ce': ('wiki.ce.npz',
'182b2a889256119a6d379d501c55c7621e5855db'),
'wiki.ch': ('wiki.ch.npz',
'82dd77512fcb463481f43c9cef3507e2baa90d7b'),
'wiki.cho': ('wiki.cho.npz',
'b0b620fc2442d1a6e2440e71a424861c80175f0c'),
'wiki.chr': ('wiki.chr.npz',
'3d62c6b95c5af46abd6234426ae760cca65d5bd0'),
'wiki.chy': ('wiki.chy.npz',
'34a28a22da79aebc100e3714b825c95c8d5f54a3'),
'wiki.ckb': ('wiki.ckb.npz',
'ad19461e4be583d08b7693ff5b1e9d590ed41add'),
'wiki.co': ('wiki.co.npz',
'fa60d9f0e79f1c7e15f381aef983a0f4f31c05a8'),
'wiki.crh': ('wiki.crh.npz',
'540270ba6edd9d7b2f7efca52b3b407524ac67d1'),
'wiki.cr': ('wiki.cr.npz',
'f06b77465a38ec960d7d5a7554b848c37e945c76'),
'wiki.csb': ('wiki.csb.npz',
'b8b28559cf2541341af98e2aa755856765bdeabf'),
'wiki.cs': ('wiki.cs.npz',
'19881e931fe06abf341450f00c342d364313e232'),
'wiki.cu': ('wiki.cu.npz',
'731e0d00abd53bc2a8eb6cf37f6ab883cff34e15'),
'wiki.cv': ('wiki.cv.npz',
'e60034fcffb7dfef7b236ddba1194c3aa20b7967'),
'wiki.cy': ('wiki.cy.npz',
'5a0fb967b5556f007c0d5065f951a3d3b1c1005a'),
'wiki.da': ('wiki.da.npz',
'd06258014ba2c7450bc2d55edfdf1731433e42e5'),
'wiki.de': ('wiki.de.npz',
'a21694dfd2af63bd7bb00f0b60b28e88bd1153f1'),
'wiki.diq': ('wiki.diq.npz',
'4f6c77a86b39834a7130419967759afd8cc26b84'),
'wiki.dsb': ('wiki.dsb.npz',
'e74f1d346a8db96987bff0c33ee5f886907c380a'),
'wiki.dv': ('wiki.dv.npz',
'5d6fe6f0eec2e7704121d5aba03b4edbb28af873'),
'wiki.dz': ('wiki.dz.npz',
'77c639d36d0355b2de5adead7996eae342b852a6'),
'wiki.ee': ('wiki.ee.npz',
'4b5a76127d57515d3e8a76787cdefde5856b754a'),
'wiki.el': ('wiki.el.npz',
'a00bcb97e7898931196a1c69f7a492e5b6202661'),
'wiki.eml': ('wiki.eml.npz',
'b475d626b3d97e7a68c02827fdc7900599e838c6'),
'wiki.en': ('wiki.en.npz',
'ad5ec6d49db6c6fe76b8e85ff05d34e5d0e1eb6a'),
'wiki.eo': ('wiki.eo.npz',
'18049b0010520d13e676f5a82e8bb90153d99003'),
'wiki.es': ('wiki.es.npz',
'a6d192ba7d82d762f8367e75ca951aad4d11e410'),
'wiki.et': ('wiki.et.npz',
'4beb7025cf88f1aa62d025b187f0cb09aee61858'),
'wiki.eu': ('wiki.eu.npz',
'5e1a8197e35f20a2476798bbb935b4c131289c4f'),
'wiki.ext': ('wiki.ext.npz',
'049b2d1b0a8b102b45907cf487cac30aa294e0a0'),
'wiki.fa': ('wiki.fa.npz',
'81ed274997c87ef87d73d25e166ca06272ce426f'),
'wiki.ff': ('wiki.ff.npz',
'4867dc74cd53ca0b0f769af4fa1ea420406b59bf'),
'wiki.fi': ('wiki.fi.npz',
'6d1291b854045179f8171ac7d62ede7d8ac159a2'),
'wiki.fiu_vro': ('wiki.fiu_vro.npz',
'dd87806d9dc8833fa0e21e35a50815ebdbaa6c8b'),
'wiki.fj': ('wiki.fj.npz',
'cf5c31b0a69276f5dd18ab738ed92444abaeb755'),
'wiki.fo': ('wiki.fo.npz',
'ffc19807d528af000861a94cfb8097bd686e14fc'),
'wiki.fr': ('wiki.fr.npz',
'8f06d5dbe3cf7214354fe9b2f6eca0ef7419f063'),
'wiki.frp': ('wiki.frp.npz',
'c8b200ae592478d3cd0bfaafcd7aa19de8a3bfe5'),
'wiki.frr': ('wiki.frr.npz',
'fa5e5c39ea2a45793c679eacea290a35e37405ea'),
'wiki.fur': ('wiki.fur.npz',
'a61a8940d059f25000e3fe23933e5ed0d37e65d3'),
'wiki.fy': ('wiki.fy.npz',
'46f9f41bdf6f4fb8e27a753290413d745465963b'),
'wiki.gag': ('wiki.gag.npz',
'49fb01230e6803544122d47ab7d3fe694d1444f2'),
'wiki.gan': ('wiki.gan.npz',
'716b7b26acc15975f30caf3c6effa111516fcca5'),
'wiki.ga': ('wiki.ga.npz',
'ea934bc1fdc1acf6caf9ac746c6c499251f1fdee'),
'wiki.gd': ('wiki.gd.npz',
'597017b5a32d933f194595d3656f858e37e70a62'),
'wiki.glk': ('wiki.glk.npz',
'91a5834658bc2d48714e8807ef24efb79567b4b5'),
'wiki.gl': ('wiki.gl.npz',
'2fa8e48d6ae1e9c9d542eb3f2156cf9e359e66c2'),
'wiki.gn': ('wiki.gn.npz',
'e359eef3928e1f1b5d8fcf0ea532e8794c66289a'),
'wiki.gom': ('wiki.gom.npz',
'8cd361481c23f7545cc2bd8f1bf22aa7400edd4d'),
'wiki.got': ('wiki.got.npz',
'd05daf105611150695e61775fdff2c500b36be3f'),
'wiki.gu': ('wiki.gu.npz',
'0ce175c5fc39bab4032892f70c9d2bb850af0f4a'),
'wiki.gv': ('wiki.gv.npz',
'2c573f873d607831ff01b64603c17b8db79bd7e1'),
'wiki.hak': ('wiki.hak.npz',
'e6048727799cdf149f5c50037e0fc59300d33a94'),
'wiki.ha': ('wiki.ha.npz',
'f18ea7286bbd390c5470896b2c99cb1adc740064'),
'wiki.haw': ('wiki.haw.npz',
'18bcd85d2e06b1b889f0835fc5b62697fdf32d72'),
'wiki.he': ('wiki.he.npz',
'76915ff167b6ecb7b7e22ff0ca46914a55d344af'),
'wiki.hif': ('wiki.hif.npz',
'12153aaf98d76d5502ab77a27cd0b9a539f61513'),
'wiki.hi': ('wiki.hi.npz',
'249666a598991f6ec147954c6af9e531fd1cd94e'),
'wiki.ho': ('wiki.ho.npz',
'3f804fd69780c0789708b56ea9d48715f8e38f26'),
'wiki.hr': ('wiki.hr.npz',
'9a3de28e69f97048bfb480b4f83eaab6149f66ad'),
'wiki.hsb': ('wiki.hsb.npz',
'7070bf64e13299dd66ac0e9f8e24011a56b6bfe8'),
'wiki.ht': ('wiki.ht.npz',
'a607093d511afeb584d02dc676bc5a27eff66287'),
'wiki.hu': ('wiki.hu.npz',
'9b2c4750daf1bcf39768572e874b5afda0e2f0bc'),
'wiki.hy': ('wiki.hy.npz',
'ec0461a102a6fb00bd324f66cefd3c8d55a7093a'),
'wiki.hz': ('wiki.hz.npz',
'5dfb8afbdae6b4148c3e55ab459c56a74b46b463'),
'wiki.ia': ('wiki.ia.npz',
'4cfaaf053b9513bbf5b2423258c0f01d20256de6'),
'wiki.id': ('wiki.id.npz',
'bace396bb9941cc9e5b2e5f5a19be6db833c5fd4'),
'wiki.ie': ('wiki.ie.npz',
'1bae7256c2e763ce6d692d1c0a603d99a8b22826'),
'wiki.ig': ('wiki.ig.npz',
'23128e54a5e143891d392d621723bad9cfc8cf7b'),
'wiki.ii': ('wiki.ii.npz',
'54bc16d05da512481865a89ecf30260b0acc04dc'),
'wiki.ik': ('wiki.ik.npz',
'f8015227e893d2375699b7d132b306ba381f02ac'),
'wiki.ilo': ('wiki.ilo.npz',
'185a11f81bd5d24a34558dda81ee4735f5ba150b'),
'wiki.io': ('wiki.io.npz',
'ddf8180a90aa6ee5be93a2582cc99c535f21363e'),
'wiki.is': ('wiki.is.npz',
'968f8dd2a093b279a6f7aaa734008454bf51d724'),
'wiki.it': ('wiki.it.npz',
'fdfb857a309b2c3d29482bb5cc55f21b858d2e6f'),
'wiki.iu': ('wiki.iu.npz',
'fa8896730bd6c24c3473daa22116d1016294e7f7'),
'wiki.jam': ('wiki.jam.npz',
'a8f0d0b99c89ace0a6401b8fcda261d06065faaf'),
'wiki.ja': ('wiki.ja.npz',
'8d42e5a40e4d1d8645b2d80b873a65cadcf68b5c'),
'wiki.jbo': ('wiki.jbo.npz',
'145fc999ab004b348cf9bf445f0a93a7a145308b'),
'wiki.jv': ('wiki.jv.npz',
'66978770bf06e42414395cf5fd8c596044d72bec'),
'wiki.kaa': ('wiki.kaa.npz',
'624a640ecb9901b2aba2e9f44ab615146ecb2862'),
'wiki.kab': ('wiki.kab.npz',
'e97f93b6ba65e95c85b7541932cf53c5ad9eb896'),
'wiki.ka': ('wiki.ka.npz',
'1ca8376e1e0cbd58001c1b51a2d488a2874a6743'),
'wiki.kbd': ('wiki.kbd.npz',
'f2d2a05b06723ac549784ad5470d84f5742a1352'),
'wiki.kg': ('wiki.kg.npz',
'fa7f6d5f660a173a3e75342d449980eedcdc789e'),
'wiki.ki': ('wiki.ki.npz',
'21a8c7c616c0050c51c288861f3423f313e4f634'),
'wiki.kj': ('wiki.kj.npz',
'f3c347509a0d81f4f7fdbb8b22889b8d76e5014e'),
'wiki.kk': ('wiki.kk.npz',
'bc24a3289e1c1e18e16b6789c2f9f92af1e73071'),
'wiki.kl': ('wiki.kl.npz',
'b8b7e7359f067836e2be2ecfe9f35a820b00fe1d'),
'wiki.km': ('wiki.km.npz',
'e053799fd01463808432dc035bef3e36620e2f36'),
'wiki.kn': ('wiki.kn.npz',
'2849a0a8b3453e9bf6af05d4c7bd3db881dd1068'),
'wiki.koi': ('wiki.koi.npz',
'a9b02e9bd41833bcd54769f94626019c03f29997'),
'wiki.ko': ('wiki.ko.npz',
'764d9896e74b5a26c6884d48bce3bed8ed3a7822'),
'wiki.krc': ('wiki.krc.npz',
'bfe39598c718f1cc95909db7544b3214b308a97c'),
'wiki.kr': ('wiki.kr.npz',
'1e6af853d4a8ea7830e116eb9b61ac5d7d9a315c'),
'wiki.ksh': ('wiki.ksh.npz',
'66cd0e3e0a0b0282a13960571ebe7cddd7706bf2'),
'wiki.ks': ('wiki.ks.npz',
'85f1adaa05b854df4dede745a1aaab3836e60770'),
'wiki.ku': ('wiki.ku.npz',
'faf90584e5a45e6d0f9eeb88399b82abe037d584'),
'wiki.kv': ('wiki.kv.npz',
'9f2b41822013a412da9c99fac06eed8be03ca192'),
'wiki.kw': ('wiki.kw.npz',
'3eed8a8fc97a2fc79241b8474a458c98d00fc897'),
'wiki.ky': ('wiki.ky.npz',
'0116ff90f10a6c0728e1ea86d8a44896ea83270a'),
'wiki.lad': ('wiki.lad.npz',
'5af2015b3d1c5e8563f0e92721580988ebe2ce50'),
'wiki.la': ('wiki.la.npz',
'7143303a3ea13c7668eb90ea6e3d2ca69857a3be'),
'wiki.lbe': ('wiki.lbe.npz',
'f206a3c35a184ba5d2b32ee68640eadf66c847da'),
'wiki.lb': ('wiki.lb.npz',
'143dc6337f3690379282034c460c613d7f144923'),
'wiki.lez': ('wiki.lez.npz',
'b29a680decc6b29f24e8eb9e4f8e11e3419d45f1'),
'wiki.lg': ('wiki.lg.npz',
'866640ce62cedbc1d453b7ea3c289c291ad76e13'),
'wiki.lij': ('wiki.lij.npz',
'0dcd3d7009ae89b1016ca6cdb99a9f0d70bc4baf'),
'wiki.li': ('wiki.li.npz',
'4666b3c238256d7b7623a136db19b8b9f4754734'),
'wiki.lmo': ('wiki.lmo.npz',
'ac89fa7cfe0675950bcb31c66bf3f88a3cfc98f0'),
'wiki.ln': ('wiki.ln.npz',
'fba158719944aabe58e0002a90be0ed77e11702d'),
'wiki.lo': ('wiki.lo.npz',
'1e113e340a8a93d385e14502c9c4e3bcdf6c3101'),
'wiki.lrc': ('wiki.lrc.npz',
'42cb755f398fba6f0da7949c91e92b55654bd482'),
'wiki.ltg': ('wiki.ltg.npz',
'182f75859e228d1162215f28fe7f2dca127624a4'),
'wiki.lt': ('wiki.lt.npz',
'66aa944bd2e777cb82d6d59b1f2f837b6c48cb37'),
'wiki.lv': ('wiki.lv.npz',
'2be8f926da85694fa998bf79d80b61ebb8d67576'),
'wiki.mai': ('wiki.mai.npz',
'b8a9c36e2a0f1bb84a44dc762250d2a9007ef637'),
'wiki.map_bms': ('wiki.map_bms.npz',
'6f0394d6b3d08a946e3df4b9355efe94148f018a'),
'wiki.mdf': ('wiki.mdf.npz',
'774ee35334641db57f9ac9069961c5372a5d92e8'),
'wiki.mg': ('wiki.mg.npz',
'496c48ef668f08ce95ebb11ce1ce5026b52d935c'),
'wiki.mh': ('wiki.mh.npz',
'352edd84f99c5aa277a7306f6cacea1fab065ed3'),
'wiki.mhr': ('wiki.mhr.npz',
'dd78b27a674ac10411cdf74ac32f9391506b17e0'),
'wiki.min': ('wiki.min.npz',
'628b406441ab03bc8aa68195ada50bfdc8226f34'),
'wiki.mi': ('wiki.mi.npz',
'754127b473861cd4f9ae034c9f527a34827b1f00'),
'wiki.mk': ('wiki.mk.npz',
'b09fed4f56c296f13c4020ef1fec498382a38b73'),
'wiki.ml': ('wiki.ml.npz',
'02fb55d97ca2f0408f0e7e8dd6a661bbc3319a2a'),
'wiki.mn': ('wiki.mn.npz',
'08b2c45689aa5d9ec49df96dc7c777ce9b9a0b4b'),
'wiki.mo': ('wiki.mo.npz',
'638c2e8bd2352fd52921b9ae62f578b8357bab49'),
'wiki.mrj': ('wiki.mrj.npz',
'ec5cf1f4fb8dfdca64d8172974e620eb8fa41626'),
'wiki.mr': ('wiki.mr.npz',
'074dd68c947c2f137a3e84b55012925f00213139'),
'wiki.ms': ('wiki.ms.npz',
'3dbe9e9d70251de8a374776ff1250a9c3103ee59'),
'wiki.mt': ('wiki.mt.npz',
'f5103998a68d1b178387417436a83123d44aba01'),
'wiki.multi.ar': ('wiki.multi.ar.npz',
'a010d1d81a465c56ebaf596b3e8e8795e7f0f8e3'),
'wiki.multi.bg': ('wiki.multi.bg.npz',
'c04018f3a600cee170f12a36cdd35b4727a2aade'),
'wiki.multi.ca': ('wiki.multi.ca.npz',
'eef52a0cf20c133ca9065de25f0702861a8cfa29'),
'wiki.multi.cs': ('wiki.multi.cs.npz',
'c5f547aa78c0e3d7dae67a0334d500bf2a86aa30'),
'wiki.multi.da': ('wiki.multi.da.npz',
'24374f2ee169b33327feeee46da31b0de1622fe4'),
'wiki.multi.de': ('wiki.multi.de.npz',
'2e6c119b345bebd34b56eaaf855d6703889b11f7'),
'wiki.multi.el': ('wiki.multi.el.npz',
'9d122beedb80a2e5334946641e5bafd32c01e76b'),
'wiki.multi.en': ('wiki.multi.en.npz',
'8c3c480b4cb2690304173713a646280613b244a8'),
'wiki.multi.es': ('wiki.multi.es.npz',
'483a22656e4fb2a01e9f4ef8156b261e780850ab'),
'wiki.multi.et': ('wiki.multi.et.npz',
'22498c7b91645a3874fa738b5cfb16bf98b6f97c'),
'wiki.multi.fi': ('wiki.multi.fi.npz',
'765a6f0b63777bff4ae6ca2b461c5889c03d6a70'),
'wiki.multi.fr': ('wiki.multi.fr.npz',
'decd9aacf600114b8a36072535c0309874a37c83'),
'wiki.multi.he': ('wiki.multi.he.npz',
'7eee940c1b85936f59122f4b1a166223dd946674'),
'wiki.multi.hr': ('wiki.multi.hr.npz',
'1673963416af088f8bf15576afb33d58115db35c'),
'wiki.multi.hu': ('wiki.multi.hu.npz',
'a1fbe6ededf3cbaa3eaa22dd8b20cce4b36cfc6d'),
'wiki.multi.id': ('wiki.multi.id.npz',
'6c3e721febb511ede7db7bf978d65769e4270f5c'),
'wiki.multi.it': ('wiki.multi.it.npz',
'fc5bfc11e0165e8d95c1708573dad5e456826c73'),
'wiki.multi.mk': ('wiki.multi.mk.npz',
'6cd50198355674f156fc863108d9bebf11cfabd9'),
'wiki.multi.nl': ('wiki.multi.nl.npz',
'4fa06b9230c95dfa5a9e9a5d80f1f5ba614d3cbf'),
'wiki.multi.no': ('wiki.multi.no.npz',
'63756168c1101e73fba8d1a5015f32b8892819e6'),
'wiki.multi.pl': ('wiki.multi.pl.npz',
'958b8e8bead965ba1bb1433e1c960fc3e12a10fb'),
'wiki.multi.pt': ('wiki.multi.pt.npz',
'22f07df1609d79b95344ee575ea43141424a1528'),
'wiki.multi.ro': ('wiki.multi.ro.npz',
'73180b3e382519004bf38ea7b86237aacbbe813a'),
'wiki.multi.ru': ('wiki.multi.ru.npz',
'3b2eb9163f35e90bf2ce1cd3c997b354d0c34f59'),
'wiki.multi.sk': ('wiki.multi.sk.npz',
'606a0c3ba9849070c6b6b8c22d920fdeed9a1385'),
'wiki.multi.sl': ('wiki.multi.sl.npz',
'3cfdab5043b8cfe1535cb6dbd4c9e68847ad5904'),
'wiki.multi.sv': ('wiki.multi.sv.npz',
'4f1494885b9a831e87cfa3c15f2204c4a73c0779'),
'wiki.multi.tr': ('wiki.multi.tr.npz',
'54f90d5ddb9a65538a41e37c5a67ed933a5e4885'),
'wiki.multi.uk': ('wiki.multi.uk.npz',
'500fd26b1d7a25b42458012e99f9f76642e0c787'),
'wiki.multi.vi': ('wiki.multi.vi.npz',
'3955809cceb300965c15f9372221417719bb0db8'),
'wiki.mus': ('wiki.mus.npz',
'a5f48934a3fa6eaf4929098046c93fc94dd6bcb6'),
'wiki.mwl': ('wiki.mwl.npz',
'8a5e2c272166f8a72c5694ca6c3104d5f49179ec'),
'wiki.my': ('wiki.my.npz',
'5e035aca16700d7d6695af8a6d3a88ac847aaeb7'),
'wiki.myv': ('wiki.myv.npz',
'd4cfaab70c640033e02c0fc0c5a3615ae836c569'),
'wiki.mzn': ('wiki.mzn.npz',
'ad09ac584ae455b5862b95125ef409360ae18445'),
'wiki.nah': ('wiki.nah.npz',
'2dc454ef37d059f2053af46cfa1f4f0ca939cba0'),
'wiki.na': ('wiki.na.npz',
'401f0f880eb7aa78d21348bc1e0a3953b3e81bf0'),
'wiki.nap': ('wiki.nap.npz',
'996da46aeeab5644ba766d00c5e343b1553361d7'),
'wiki.nds_nl': ('wiki.nds_nl.npz',
'5a9307e16b13a5a82ec19a52b33254537e7198e7'),
'wiki.nds': ('wiki.nds.npz',
'b249a87c78c52becf51e7b50aaf9f9b6a36585f1'),
'wiki.ne': ('wiki.ne.npz',
'a601db2647a74ffd2b4b43dcb8584735f555459c'),
'wiki.new': ('wiki.new.npz',
'c398a3775aba9c68ce765cfdfb6b188f7c47e4c6'),
'wiki-news-300d-1M': ('wiki-news-300d-1M.npz',
'0a03bbd508e5381e140476140fb121afeb0050ed'),
'wiki-news-300d-1M-subword': ('wiki-news-300d-1M-subword.npz',
'69edae21375407781c727dcb9e534e79d712d137'),
'wiki.ng': ('wiki.ng.npz',
'befd774d15f69d43547e13e5ea3a97c4cb1ab405'),
'wiki.nl': ('wiki.nl.npz',
'5a7cb6f1dd0a7621202abba9461ac2c5bf905219'),
'wiki.nn': ('wiki.nn.npz',
'8e5059ddeb24050fadaa5cc4622b13feb3e4a226'),
'wiki.no': ('wiki.no.npz',
'5ce6e0f793e66f081652f64013968099de03d9f9'),
'wiki.nov': ('wiki.nov.npz',
'95ed23b4cfd7a65afa1c12c7dbdce6af53923d77'),
'wiki.vec': ('wiki.vec.npz',
'08ebb912efeb9df1c7d05e1af90484d210dff47e'),
'wiki.nrm': ('wiki.nrm.npz',
'e58614b4508ff9810f0b58fd818f973775bc918d'),
'wiki.nso': ('wiki.nso.npz',
'56a2ebe260241402d117cd89c5c872b9c96ff05b'),
'wiki.nv': ('wiki.nv.npz',
'c713051fe03ec1f60314bb42161b2a47fb5e169a'),
'wiki.ny': ('wiki.ny.npz',
'ba5a1725955cbc13e7fd93ab499f8085840c992c'),
'wiki.oc': ('wiki.oc.npz',
'259e7d994c38a4cfc140fb07016b82d6781e5027'),
'wiki.olo': ('wiki.olo.npz',
'0fea70f887def4779ee70a79366b88f1ada65004'),
'wiki.om': ('wiki.om.npz',
'47e2d756b5f8913085d901375c1b4e0b118a4221'),
'wiki.or': ('wiki.or.npz',
'7e274ab060219b019aa02bb97941cc6e162fd01f'),
'wiki.os': ('wiki.os.npz',
'19e8199cc2aaffdb07b6c558dbc5465ac6e03155'),
'wiki.pag': ('wiki.pag.npz',
'eddf4931547649026c02f893297ef673ec6158bb'),
'wiki.pam': ('wiki.pam.npz',
'40109aa174bd9f0fa657839bb548e2b0646c58d3'),
'wiki.pa': ('wiki.pa.npz',
'8a5870717e9e641b1f757f13259171698118de2e'),
'wiki.pap': ('wiki.pap.npz',
'999c8e5b005ca20d9998fbbe4fa79177f69e24c0'),
'wiki.pcd': ('wiki.pcd.npz',
'e975066b323a65cdc5e4c27138ef674d2cf7250b'),
'wiki.pdc': ('wiki.pdc.npz',
'5c770b9d56f276b0aa535845f175c05ee1cea615'),
'wiki.pfl': ('wiki.pfl.npz',
'0063d0b633ee529a75482b36ed4f4da7d64994ec'),
'wiki.pih': ('wiki.pih.npz',
'ce1d76c94d248545eea0d7436c54849dbb380bfc'),
'wiki.pi': ('wiki.pi.npz',
'c7d56c334bf529f8b3655693d207a80feaec4aed'),
'wiki.pl': ('wiki.pl.npz',
'0d612fdf871a1a4084c867f394940475be899443'),
'wiki.pms': ('wiki.pms.npz',
'ca149a2fb138011315bb6d5d61c7a5647e515e51'),
'wiki.pnb': ('wiki.pnb.npz',
'9ec82d02ad8894056c67991cf8ce927bcca74ee2'),
'wiki.pnt': ('wiki.pnt.npz',
'3f90123407bb8fc838a0a0d3700a14e15f5b26aa'),
'wiki.ps': ('wiki.ps.npz',
'7edebc02ac16f5fab83eb10b7d0fab821a9a4d43'),
'wiki.pt': ('wiki.pt.npz',
'f172fd801edd1ad9d319ba44146d40b5d682a473'),
'wiki.qu': ('wiki.qu.npz',
'68bec60ccfe1826c3b3a8968574488dbc74cdf7b'),
'wiki.rm': ('wiki.rm.npz',
'00fb191fc736ba60cb23e76169dfccde9a9daad0'),
'wiki.rmy': ('wiki.rmy.npz',
'c5e93cc37ff7293b9a1d9fe55c42d6fbde372b97'),
'wiki.rn': ('wiki.rn.npz',
'57b8e0d6999269be227af6ef2797a9cf8386ff1b'),
'wiki.roa_rup': ('wiki.roa_rup.npz',
'e06d6b5672a59bb9e83143bc8b28300d23c09546'),
'wiki.roa_tara': ('wiki.roa_tara.npz',
'c083105f40236dc3711f06c1b40e8ee7a714b99d'),
'wiki.ro': ('wiki.ro.npz',
'766bc0cb58a65b0b1763b9a0d90e91ab982eb20d'),
'wiki.rue': ('wiki.rue.npz',
'9a91fa093cd48d7d658d526b0ccda48dc59cd7f4'),
'wiki.ru': ('wiki.ru.npz',
'd59d099481c22d5592ab9635c9ee48060aa0bf45'),
'wiki.rw': ('wiki.rw.npz',
'e99ee87d249f6c157c5c97397d1025d798b85c69'),
'wiki.sah': ('wiki.sah.npz',
'85dae39097b29bc8e2b64f343a77794e4a62f91a'),
'wiki.sa': ('wiki.sa.npz',
'7d1928d7c67400045ac1b35a37a0e3089690d875'),
'wiki.scn': ('wiki.scn.npz',
'27d7b8050bbeed8ce196061c610216760b053c39'),
'wiki.sc': ('wiki.sc.npz',
'69c7b8be0f03a1bbd615695f93bdd78f96a58e16'),
'wiki.sco': ('wiki.sco.npz',
'4880282f59d3338b67fbff75359e2d24896e95bb'),
'wiki.sd': ('wiki.sd.npz',
'0ed8da4d27223db717a612cf0c88582351db6e19'),
'wiki.se': ('wiki.se.npz',
'0f4b2e060d5e29f96ca73aab29c967e79db69c17'),
'wiki.sg': ('wiki.sg.npz',
'a5e4edf34fe1a88b322da4c3922ec5a470e200c6'),
'wiki.sh': ('wiki.sh.npz',
'c13f1e94676bc939560193f7aa7ffd7d604707b3'),
'wiki.simple': ('wiki.simple.npz',
'352d0575e7d60b08e1dfce2c5de713906f0ed78f'),
'wiki.si': ('wiki.si.npz',
'204f9ffbe7770a9f56d3b2fb26999165015f5c33'),
'wiki.sk
| 0 |
4dde161d25ed41154e13b94cc9640c6aac055f87
|
Python
|
': ('wiki.sk.npz',
'7a9820b5a343b242660bf2595d1ecbf6e00a76d6'),
'wiki.sl': ('wiki.sl.npz',
'85f3186f26d6725317a64e290363a7251b928b81'),
'wiki.sm': ('wiki.sm.npz',
'9e13452cc4bff677f4f15db04f9d2f95f6ec054c'),
'wiki.sn': ('wiki.sn.npz',
'e8d5f7dcf51280c5f99bc3df849b4889a61e9fcd'),
'wiki.so': ('wiki.so.npz',
'0f5d71b95768b33fd939a870c15344c4478364a9'),
'wiki.sq': ('wiki.sq.npz',
'8b05826df8575e65c87a2fc0b7630cf644d4216d'),
'wiki.srn': ('wiki.srn.npz',
'2711396ef297ac5dde8904508bc002bdecbcc6f4'),
'wiki.sr': ('wiki.sr.npz',
'546edc8e29a5d2e99ed10eb4a552cbef2bb8f417'),
'wiki.ss': ('wiki.ss.npz',
'2e5911bad79bb5270a64f587e326d31c95ec58f3'),
'wiki.st': ('wiki.st.npz',
'23bc954719a2962e891f02efaea754c9ea025894'),
'wiki.stq': ('wiki.stq.npz',
'dd3ece0c0aa30e53ae0f4b558309bb60ab628652'),
'wiki.su': ('wiki.su.npz',
'7e48732e8a1fcf212e692924a4416a6ac3b3b055'),
'wiki.sv': ('wiki.sv.npz',
'b9ec52e9423688f195f3145c243226c0e0b51e83'),
'wiki.sw': ('wiki.sw.npz',
'5262f0c645322b10eca73f792a970f10b2719e55'),
'wiki.szl': ('wiki.szl.npz',
'fdd6d6b291cdbbcec5ff93451a588fdd103bb2d0'),
'wiki.ta': ('wiki.ta.npz',
'da7c5bc6e1142306ff2669bf1739832beb6c1763'),
'wiki.tcy': ('wiki.tcy.npz',
'baa49e1afa2bb0dcaaef0fac1ee75bbe711d1134'),
'wiki.te': ('wiki.te.npz',
'baf48767ce85e4d41d65d25f2bbf1c5f559ec18f'),
'wiki.tet': ('wiki.tet.npz',
'11e46a893af55344dbe102d530fdfea5d949d3bc'),
'wiki.tg': ('wiki.tg.npz',
'da66abb72ec9ccc602713161e544963d59cc51d7'),
'wiki.th': ('wiki.th.npz',
'25e54bf2d305779ec9baa5f344410bd75c7702fc'),
'wiki.ti': ('wiki.ti.npz',
'1faf98f3a0eafa7559a4b2a111f43dd1f7b9a05b'),
'wiki.tk': ('wiki.tk.npz',
'34c714fa8275fd6abfe86b2d144a043774552a6c'),
'wiki.tl': ('wiki.tl.npz',
'7d7f8a0485155bce7a74a1d778824375b0029f53'),
'wiki.tn': ('wiki.tn.npz',
'd0bc3a9b948753ac2283e5e10480c9fa0f6acb53'),
'wiki.to': ('wiki.to.npz',
'e982fc31bcfcf7339988d7aad21ce29ac9e84b0b'),
'wiki.tpi': ('wiki.tpi.npz',
'448cef043fa4b7f97825dbf8ee205ef05543bcac'),
'wiki.tr': ('wiki.tr.npz',
'c9830607a4c5134c6191006f1d80bae0ec798fe6'),
'wiki.ts': ('wiki.ts.npz',
'84a0598803712c8a713943447ddb73fc0f39af43'),
'wiki.tt': ('wiki.tt.npz',
'82c29df18f33e6284af3e977a6dda7e132a7a225'),
'wiki.tum': ('wiki.tum.npz',
'358990b894a3fb09d70674465952d828c9b0eda7'),
'wiki.tw': ('wiki.tw.npz',
'1e6d2838a4f271c1808795fb929cfcbf95094d93'),
'wiki.ty': ('wiki.ty.npz',
'e41ca5192d8cb515b3561c8d6935b150deb027b7'),
'wiki.tyv': ('wiki.tyv.npz',
'ce062ed32e854604714b65698ae290c99ba28060'),
'wiki.udm': ('wiki.udm.npz',
'9e1c5891ee0c5ac8f65fc457e1b42c7b2bfc8d37'),
'wiki.ug': ('wiki.ug.npz',
'656503e54063e200980e39f00fc011395bcd8551'),
'wiki.uk': ('wiki.uk.npz',
'352b7ee24d9fc6513fff4fe13bc04086c680834a'),
'wiki.ur': ('wiki.ur.npz',
'a81e55c7adfc2cef779ce9a01fe21319a7e4943b'),
'wiki.uz': ('wiki.uz.npz',
'd60d1e67bb8574dd71c18c88114aba674fc1eecb'),
'wiki.ve': ('wiki.ve.npz',
'5bfc3dbb3e47d23597df47ef12bd1c64ab8d3ea9'),
'wiki.vep': ('wiki.vep.npz',
'7a94355754fbe56802242c0bf9d7a27335095552'),
'wiki.vi': ('wiki.vi.npz',
'f118039eb16a4ca3347b6b171eac41113350a041'),
'wiki.vls': ('wiki.vls.npz',
'9a46a2fdc6448aa54f212081643745499ea7d05c'),
'wiki.vo': ('wiki.vo.npz',
'8e2f93c85ac608bcc4ae14093b9ff016061378fb'),
'wiki.wa': ('wiki.wa.npz',
'907074f7743d30cdbb2c48d0c8b4040796ea4164'),
'wiki.war': ('wiki.war.npz',
'928fb410c394b9c18d875326b6a3e750e2611e1b'),
'wiki.wo': ('wiki.wo.npz',
'7bb352be44f7261aa926f49b13e77df30f29312f'),
'wiki.wuu': ('wiki.wuu.npz',
'0d1dc7b05867ff2156a1180ad3da3b4697924e59'),
'wiki.xal': ('wiki.xal.npz',
'd87f4a131e086dc0bdc2a7e10406820c3c03b6a9'),
'wiki.xh': ('wiki.xh.npz',
'c64e1d2e77d1c744a628e2bd7353284616e48bea'),
'wiki.xmf': ('wiki.xmf.npz',
'160b9ee9773b9099aaf37ae9bdbc8a4a93b7f6ea'),
'wiki.yi': ('wiki.yi.npz',
'0662542cee29f3392fc905004ac6443b32c1477c'),
'wiki.yo': ('wiki.yo.npz',
'5d12d3b902a1fa19d8548295c3802c0608afa5c8'),
'wiki.za': ('wiki.za.npz',
'536348ff89df62e968739b567a1245bfd4112fbe'),
'wiki.zea': ('wiki.zea.npz',
'61fa192289a7c0f73ffa8035632a38b91c31c224'),
'wiki.zh_classical': ('wiki.zh_classical.npz',
'9acc9eaf8ebe316b945fb1f56ac71a2b7e024854'),
'wiki.zh_min_nan': ('wiki.zh_min_nan.npz',
'5d38bc025c82af578299d60f7df7b399de6ed81a'),
'wiki.zh': ('wiki.zh.npz',
'94007fcf3b105bf2c21b84a3a22bdb7946e74804'),
'wiki.zh_yue': ('wiki.zh_yue.npz',
'af6f0d94e6418d528d6cedd859e07e6e2fb416ab'),
'wiki.zu': ('wiki.zu.npz',
'fc9ce07d5d0c49a3c86cf1b26056ada58f9404ca')}
GOOGLEANALOGY_CATEGORIES = [
'capital-common-countries', 'capital-world', 'currency', 'city-in-state',
'family', 'gram1-adjective-to-adverb', 'gram2-opposite',
'gram3-comparative', 'gram4-superlative', 'gram5-present-participle',
'gram6-nationality-adjective', 'gram7-past-tense', 'gram8-plural',
'gram9-plural-verbs'
]
BATS_CHECKSUMS = \
{'BATS_3.0/1_Inflectional_morphology/I01 [noun - plural_reg].txt':
'cfcba2835edf81abf11b84defd2f4daa3ca0b0bf',
'BATS_3.0/1_Inflectional_morphology/I02 [noun - plural_irreg].txt':
'44dbc56432b79ff5ce2ef80b6840a8aa916524f9',
'BATS_3.0/1_Inflectional_morphology/I03 [adj - comparative].txt':
'dc530918e98b467b8102a7dab772a66d3db32a73',
'BATS_3.0/1_Inflectional_morphology/I04 [adj - superlative].txt':
'6c6fdfb6c733bc9b298d95013765163f42faf6fb',
'BATS_3.0/1_Inflectional_morphology/I05 [verb_inf - 3pSg].txt':
'39fa47ec7238ddb3f9818bc586f23f55b55418d8',
'BATS_3.0/1_Inflectional_morphology/I06 [verb_inf - Ving].txt':
'8fabeb9f5af6c3e7154a220b7034bbe5b900c36f',
'BATS_3.0/1_Inflectional_morphology/I07 [verb_inf - Ved].txt':
'aa04df95aa2edb436cbcc03c7b15bc492ece52d6',
'BATS_3.0/1_Inflectional_morphology/I08 [verb_Ving - 3pSg].txt':
'5f22d8121a5043ce76d3b6b53a49a7bb3fe33920',
'BATS_3.0/1_Inflectional_morphology/I09 [verb_Ving - Ved].txt':
'377777c1e793c638e72c010228156d01f916708e',
'BATS_3.0/1_Inflectional_morphology/I10 [verb_3pSg - Ved].txt':
'051c0c3c633e10900f827991dac14cf76da7f022',
'BATS_3.0/2_Derivational_morphology/D01 [noun+less_reg].txt':
'5d6839e9d34ee1e9fddb5bbf6516cf6420b85d8d',
'BATS_3.0/2_Derivational_morphology/D02 [un+adj_reg].txt':
'80b82227a0d5f7377f1e8cebe28c582bfeb1afb5',
'BATS_3.0/2_Derivational_morphology/D03 [adj+ly_reg].txt':
'223e120bd61b3116298a253f392654c15ad5a39a',
'BATS_3.0/2_Derivational_morphology/D04 [over+adj_reg].txt':
'a56f8685af489bcd09c36f864eba1657ce0a7c28',
'BATS_3.0/2_Derivational_morphology/D05 [adj+ness_reg].txt':
'5da99b1f1781ecfb4a1a7448c715abf07451917b',
'BATS_3.0/2_Derivational_morphology/D06 [re+verb_reg].txt':
'4c5e1796091fade503fbf0bfc2fae2c7f98b5dd2',
'BATS_3.0/2_Derivational_morphology/D07 [verb+able_reg].txt':
'a6218162bc257d98e875fc667c23edfac59e19fd',
'BATS_3.0/2_Derivational_morphology/D08 [verb+er_irreg].txt':
'9a4236c3bbc23903e101a42fb5ad6e15e552fadf',
'BATS_3.0/2_Derivational_morphology/D09 [verb+tion_irreg].txt':
'3ab0153926d5cf890cf08a4077da6d9946133874',
'BATS_3.0/2_Derivational_morphology/D10 [verb+ment_irreg].txt':
'2a012b87a9a60e128e064c5fe24b60f99e16ddce',
'BATS_3.0/3_Encyclopedic_semantics/E01 [country - capital].txt':
'9890315d3c4e6a38b8ae5fc441858564be3d3dc4',
'BATS_3.0/3_Encyclopedic_semantics/E02 [country - language].txt':
'ef08a00e8ff7802811ace8f00fabac41b5d03678',
'BATS_3.0/3_Encyclopedic_semantics/E03 [UK_city - county].txt':
'754957101c93a25b438785bd4458404cd9010259',
'BATS_3.0/3_Encyclopedic_semantics/E04 [name - nationality].txt':
'71a6562c34fb6154992a7c3e499375fcc3529c96',
'BATS_3.0/3_Encyclopedic_semantics/E05 [name - occupation].txt':
'a9a6f9f1af959aef83106f3dbd6bed16dfe9a3ea',
'BATS_3.0/3_Encyclopedic_semantics/E06 [animal - young].txt':
'12d5b51c7b76b9136eadc719abc8cf4806c67b73',
'BATS_3.0/3_Encyclopedic_semantics/E07 [animal - sound].txt':
'91991b007a35f45bd42bd7d0d465c6f8311df911',
'BATS_3.0/3_Encyclopedic_semantics/E08 [animal - shelter].txt':
'e5af11e216db392986ba0cbb597d861066c29adb',
'BATS_3.0/3_Encyclopedic_semantics/E09 [things - color].txt':
'd30b2eb2fc7a60f19afda7c54582e30f6fe28f51',
'BATS_3.0/3_Encyclopedic_semantics/E10 [male - female].txt':
'247a588671bc1da8f615e14076bd42573d24b4b3',
'BATS_3.0/4_Lexicographic_semantics/L01 [hypernyms - animals].txt':
'4b5c4dabe2c9c038fafee85d8d3958f1b1dec987',
'BATS_3.0/4_Lexicographic_semantics/L02 [hypernyms - misc].txt':
'83d5ecad78d9de28fd70347731c7ee5918ba43c9',
'BATS_3.0/4_Lexicographic_semantics/L03 [hyponyms - misc].txt':
'a8319856ae2f76b4d4c030ac7e899bb3a06a9a48',
'BATS_3.0/4_Lexicographic_semantics/L04 [meronyms - substance].txt':
'c081e1104e1b40725063f4b39d13d1ec12496bfd',
'BATS_3.0/4_Lexicographic_semantics/L05 [meronyms - member].txt':
'bcbf05f3be76cef990a74674a9999a0bb9790a07',
'BATS_3.0/4_Lexicographic_semantics/L06 [meronyms - part].txt':
'2f9bdcc74b881e1c54b391c9a6e7ea6243b3accc',
'BATS_3.0/4_Lexicographic_semantics/L07 [synonyms - intensity].txt':
'8fa287860b096bef004fe0f6557e4f686e3da81a',
'BATS_3.0/4_Lexicographic_semantics/L08 [synonyms - exact].txt':
'a17c591961bddefd97ae5df71f9d1559ce7900f4',
'BATS_3.0/4_Lexicographic_semantics/L09 [antonyms - gradable].txt':
'117fbb86504c192b33a5469f2f282e741d9c016d',
'BATS_3.0/4_Lexicographic_semantics/L10 [antonyms - binary].txt':
'3cde2f2c2a0606777b8d7d11d099f316416a7224'}
BATS_CATEGORIES = {
'I01': '[noun - plural_reg]',
'I02': '[noun - plural_irreg]',
'I03': '[adj - comparative]',
'I04': '[adj - superlative]',
'I05': '[verb_inf - 3pSg]',
'I06': '[verb_inf - Ving]',
'I07': '[verb_inf - Ved]',
'I08': '[verb_Ving - 3pSg]',
'I09': '[verb_Ving - Ved]',
'I10': '[verb_3pSg - Ved]',
'D01': '[noun+less_reg]',
'D02': '[un+adj_reg]',
'D03': '[adj+ly_reg]',
'D04': '[over+adj_reg]',
'D05': '[adj+ness_reg]',
'D06': '[re+verb_reg]',
'D07': '[verb+able_reg]',
'D08': '[verb+er_irreg]',
'D09': '[verb+tion_irreg]',
'D10': '[verb+ment_irreg]',
'E01': '[country - capital]',
'E02': '[country - language]',
'E03': '[UK_city - county]',
'E04': '[name - nationality]',
'E05': '[name - occupation]',
'E06': '[animal - young]',
'E07': '[animal - sound]',
'E08': '[animal - shelter]',
'E09': '[things - color]',
'E10': '[male - female]',
'L01': '[hypernyms - animals]',
'L02': '[hypernyms - misc]',
'L03': '[hyponyms - misc]',
'L04': '[meronyms - substance]',
'L05': '[meronyms - member]',
'L06': '[meronyms - part]',
'L07': '[synonyms - intensity]',
'L08': '[synonyms - exact]',
'L09': '[antonyms - gradable]',
'L10': '[antonyms - binary]'
}
SEMEVAL17_CHECKSUMS = \
{'SemEval17-Task2/README.txt':
'ad02d4c22fff8a39c9e89a92ba449ec78750af6b',
'SemEval17-Task2/task2-scorer.jar':
'145ef73ce955656d59e3b67b41f8152e8ee018d8',
'SemEval17-Task2/test/subtask1-monolingual/data/de.test.data.txt':
'6fc840f989d2274509549e472a68fb88dd2e149f',
'SemEval17-Task2/test/subtask1-monolingual/data/en.test.data.txt':
'05293fcbd80b2f4aad9b6518ce1a546ad8f61f33',
'SemEval17-Task2/test/subtask1-monolingual/data/es.test.data.txt':
'552904b5988f9951311290ca8fa0441dd4351d4b',
'SemEval17-Task2/test/subtask1-monolingual/data/fa.test.data.txt':
'29d5970feac5982961bd6ab621ba31f83d3bff77',
'SemEval17-Task2/test/subtask1-monolingual/data/it.test.data.txt':
'c95fe2be8fab37e9c70610117bdedc48a0a8e95c',
'SemEval17-Task2/test/subtask1-monolingual/keys/de.test.gold.txt':
'c51463460495a242cc726d41713c5e00b66fdd18',
'SemEval17-Task2/test/subtask1-monolingual/keys/en.test.gold.txt':
'2d2bb2ed41308cc60e7953cc9036f7dc89141b48',
'SemEval17-Task2/test/subtask1-monolingual/keys/es.test.gold.txt':
'a5842ff17fe3847d15414924826a8eb236018bcc',
'SemEval17-Task2/test/subtask1-monolingual/keys/fa.test.gold.txt':
'717bbe035d8ae2bad59416eb3dd4feb7238b97d4',
'SemEval17-Task2/test/subtask1-monolingual/keys/it.test.gold.txt':
'a342b950109c73afdc86a7829e17c1d8f7c482f0',
'SemEval17-Task2/test/subtask2-crosslingual/data/de-es.test.data.txt':
'ef92b1375762f68c700e050d214d3241ccde2319',
'SemEval17-Task2/test/subtask2-crosslingual/data/de-fa.test.data.txt':
'17aa103981f3193960309bb9b4cc151acaf8136c',
'SemEval17-Task2/test/subtask2-crosslingual/data/de-it.test.data.txt':
'eced15e8565689dd67605a82a782d19ee846222a',
'SemEval17-Task2/test/subtask2-crosslingual/data/en-de.test.data.txt':
'5cb69370a46385a7a3d37cdf2018744be77203a0',
'SemEval17-Task2/test/subtask2-crosslingual/data/en-es.test.data.txt':
'402f7fed52b60e915fb1be49f935395488cf7a7b',
'SemEval17-Task2/test/subtask2-crosslingual/data/en-fa.test.data.txt':
'9bdddbbde3da755f2a700bddfc3ed1cd9324ad48',
'SemEval17-Task2/test/subtask2-crosslingual/data/en-it.test.data.txt':
'd3b37aac79ca10311352309ef9b172f686ecbb80',
'SemEval17-Task2/test/subtask2-crosslingual/data/es-fa.test.data.txt':
'a2959aec346c26475a4a6ad4d950ee0545f2381e',
'SemEval17-Task2/test/subtask2-crosslingual/data/es-it.test.data.txt':
'ca627c30143d9f82a37a8776fabf2cee226dd35c',
'SemEval17-Task2/test/subtask2-crosslingual/data/it-fa.test.data.txt':
'a03d79a6ce7b798356b53b4e85dbe828247b97ef',
'SemEval17-Task2/test/subtask2-crosslingual/keys/de-es.test.gold.txt':
'7564130011d38daad582b83135010a2a58796df6',
'SemEval17-Task2/test/subtask2-crosslingual/keys/de-fa.test.gold.txt':
'c9e23c2e5e970e7f95550fbac3362d85b82cc569',
'SemEval17-Task2/test/subtask2-crosslingual/keys/de-it.test.gold.txt':
'b74cc2609b2bd2ceb5e076f504882a2e0a996a3c',
'SemEval17-Task2/test/subtask2-crosslingual/keys/en-de.test.gold.txt':
'428dfdad2a144642c13c24b845e6b7de6bf5f663',
'SemEval17-Task2/test/subtask2-crosslingual/keys/en-es.test.gold.txt':
'1dd7ab08a10552486299151cdd32ed19b56db682',
'SemEval17-Task2/test/subtask2-crosslingual/keys/en-fa.test.gold.txt':
'17451ac2165aa9b695dae9b1aba20eb8609fb400',
'SemEval17-Task2/test/subtask2-crosslingual/keys/en-it.test.gold.txt':
'5041c0b84a603ed85aa0a5cbe4b1c34f69a2fa7c',
'SemEval17-Task2/test/subtask2-crosslingual/keys/es-fa.test.gold.txt':
'8c09a219670dc32ab3864078bf0c28a287accabc',
'SemEval17-Task2/test/subtask2-crosslingual/keys/es-it.test.gold.txt':
'b1cdd13209354cc2fc2f4226c80aaa85558daf4a',
'SemEval17-Task2/test/subtask2-crosslingual/keys/it-fa.test.gold.txt':
'e0b560bb1d2db39ce45e841c8aad611734dc94f1',
'SemEval17-Task2/trial/subtask1-monolingual/data/de.trial.data.txt':
'dd071fd90f59bec8d271a447d86ee2e462941f52',
'SemEval17-Task2/trial/subtask1-monolingual/data/en.trial.data.txt':
'e8e5add0850b3dec07f102be26b8791a5e9bbbcf',
'SemEval17-Task2/trial/subtask1-monolingual/data/es.trial.data.txt':
'8956c78ff9ceae1d923a57816e55392c6a7dfc49',
'SemEval17-Task2/trial/subtask1-monolingual/data/fa.trial.data.txt':
'2f7c4247cde0d918b3508e90f6b49a1f5031c81b',
'SemEval17-Task2/trial/subtask1-monolingual/data/it.trial.data.txt':
'c11e0b5b55f94fc97c7b11fa455e71b071be879f',
'SemEval17-Task2/trial/subtask1-monolingual/keys/de.trial.gold.txt':
'ce5567b1accf3eb07da53229dfcb2a8a1dfac380',
'SemEval17-Task2/trial/subtask1-monolingual/keys/en.trial.gold.txt':
'693cb5928e807c79e39136dc0981dadca7832ae6',
'SemEval17-Task2/trial/subtask1-monolingual/keys/es.trial.gold.txt':
'8241ca66bf5ba55f77607e9bcfae8e34902715d8',
'SemEval17-Task2/trial/subtask1-monolingual/keys/fa.trial.gold.txt':
'd30701a93c8c5500b82ac2334ed8410f9a23864b',
'SemEval17-Task2/trial/subtask1-monolingual/keys/it.trial.gold.txt':
'bad225573e1216ba8b35429e9fa520a20e8ce031',
'SemEval17-Task2/trial/subtask1-monolingual/output/de.trial.sample.output.txt':
'f85cba9f6690d61736623c16e620826b09384aa5',
'SemEval17-Task2/trial/subtask1-monolingual/output/en.trial.sample.output.txt':
'f85cba9f6690d61736623c16e620826b09384aa5',
'SemEval17-Task2/trial/subtask1-monolingual/output/es.trial.sample.output.txt':
'f85cba9f6690d61736623c16e620826b09384aa5',
'SemEval17-Task2/trial/subtask1-monolingual/output/fa.trial.sample.output.txt':
'f85cba9f6690d61736623c16e620826b09384aa5',
'SemEval17-Task2/trial/subtask1-monolingual/output/it.trial.sample.output.txt':
'f85cba9f6690d61736623c16e620826b09384aa5',
'SemEval17-Task2/trial/subtask2-crosslingual/data/de-es.trial.data.txt':
'c27c8977d8d4434fdc3e59a7b0121d87e0a03237',
'SemEval17-Task2/trial/subtask2-crosslingual/data/de-fa.trial.data.txt':
'88a6f6dd1bba309f7cae7281405e37f442782983',
'SemEval17-Task2/trial/subtask2-crosslingual/data/de-it.trial.data.txt':
'ebdab0859f3b349fa0120fc8ab98be3394f0d73d',
'SemEval17-Task2/trial/subtask2-crosslingual/data/en-de.trial.data.txt':
'128d1a460fe9836b66f0fcdf59455b02edb9f258',
'SemEval17-Task2/trial/subtask2-crosslingual/data/en-es.trial.data.txt':
'508c5dde8ffcc32ee3009a0d020c7c96a338e1d1',
'SemEval17-Task2/trial/subtask2-crosslingual/data/en-fa.trial.data.txt':
'1a3640eb5facfe15b1e23a07183a2e62ed80c7d9',
'SemEval17-Task2/trial/subtask2-crosslingual/data/en-it.trial.data.txt':
'141c83d591b0292016583d9c23a2cc5514a006aa',
'SemEval17-Task2/trial/subtask2-crosslingual/data/es-fa.trial.data.txt':
'a0a548cd698c389ee80c34d6ec72abed5f1625e5',
'SemEval17-Task2/trial/subtask2-crosslingual/data/es-it.trial.data.txt':
'8d42bed8a43ff93d26ca95794758d9392ca707ed',
'SemEval17-Task2/trial/subtask2-crosslingual/data/it-fa.trial.data.txt':
'9c85223f1f734de61c28157df0ce417bb0537803',
'SemEval17-Task2/trial/subtask2-crosslingual/keys/de-es.trial.gold.txt':
'126c92b2fb3b8f2784dd4ae2a4c52b02a87a8196',
'SemEval17-Task2/trial/subtask2-crosslingual/keys/de-fa.trial.gold.txt':
'1db6201c2c8f19744c39dbde8bd4a803859d64c1',
'SemEval17-Task2/trial/subtask2-crosslingual/keys/de-it.trial.gold.txt':
'5300bf2ead163ff3981fb41ec5d0e291c287c9e0',
'SemEval17-Task2/trial/subtask2-crosslingual/keys/en-de.trial.gold.txt':
'd4f5205de929bb0c4020e1502a3f2204b5accd51',
'SemEval17-Task2/trial/subtask2-crosslingual/keys/en-es.trial.gold.txt':
'3237e11c3a0d9c0f5d583f8dc1d025b97a1f8bfe',
'SemEval17-Task2/trial/subtask2-crosslingual/keys/en-fa.trial.gold.txt':
'c14de7bf326907336a02d499c9b92ab229f3f4f8',
'SemEval17-Task2/trial/subtask2-crosslingual/keys/en-it.trial.gold.txt':
'3c0276c4b4e7a6d8a618bbe1ab0f30ad7b07929c',
'SemEval17-Task2/trial/subtask2-crosslingual/keys/es-fa.trial.gold.txt':
'359f69e9dfd6411a936baa3392b8f05c398a7707',
'SemEval17-Task2/trial/subtask2-crosslingual/keys/es-it.trial.gold.txt':
'44090607fabe5a26926a384e521ef1317f6f00d0',
'SemEval17-Task2/trial/subtask2-crosslingual/keys/it-fa.trial.gold.txt':
'97b09ffa11803023c2143fd4a4ac4bbc9775e645',
'SemEval17-Task2/trial/subtask2-crosslingual/output/de-es.trial.sample.output.txt':
'a0735361a692be357963959728dacef85ea08240',
'SemEval17-Task2/trial/subtask2-crosslingual/output/de-fa.trial.sample.output.txt':
'b71166d8615e921ee689cefc81419398d341167f',
'SemEval17-Task2/trial/subtask2-crosslingual/output/de-it.trial.sample.output.txt':
'b71166d8615e921ee689cefc81419398d341167f',
'SemEval17-Task2/trial/subtask2-crosslingual/output/en-de.trial.sample.output.txt':
'b71166d8615e921ee689cefc81419398d341167f',
'SemEval17-Task2/trial/subtask2-crosslingual/output/en-es.trial.sample.output.txt':
'b71166d8615e921ee689cefc81419398d341167f',
'SemEval17-Task2/trial/subtask2-crosslingual/output/en-fa.trial.sample.output.txt':
'a0735361a692be357963959728dacef85ea08240',
'SemEval17-Task2/trial/subtask2-crosslingual/output/en-it.trial.sample.output.txt':
'a0735361a692be357963959728dacef85ea08240',
'SemEval17-Task2/trial/subtask2-crosslingual/output/es-fa.trial.sample.output.txt':
'b71166d8615e921ee689cefc81419398d341167f',
'SemEval17-Task2/trial/subtask2-crosslingual/output/es-it.trial.sample.output.txt':
'b71166d8615e921ee689cefc81419398d341167f',
'SemEval17-Task2/trial/subtask2-crosslingual/output/it-fa.trial.sample.output.txt':
'a0735361a692be357963959728dacef85ea08240'}
UD21_DATA_FILE_SHA1 = \
{'af': {'dev': ('af-ud-dev.conllu',
'e37b104f4425ee00afc81779201816d5ac525194'),
'test': ('af-ud-test.conllu',
'd2bf02370d308ee957c04242bd0871db0e488389'),
'train': ('af-ud-train.conllu',
'a652c7b19c236063d3ea489947f83095893b699a')},
'grc_proiel': {'dev': ('grc_proiel-ud-dev.conllu',
'd199530c7e40ff0214e510957bb126af0dc12c1c'),
'test': ('grc_proiel-ud-test.conllu',
'bb7825ddeb18fc2d86638e4725f04563f3e08aab'),
'train': ('grc_proiel-ud-train.conllu',
'fe6c861299b033abe8c4ce2b6131cd74f87b96a7')},
'grc': {'dev': ('grc-ud-dev.conllu',
'debdfec0272cd558ccd29fe0ae2f13175dd20a33'),
'test': ('grc-ud-test.conllu',
'f19accf31db95e2c736d716d3438c09aa877eb07'),
'train': ('grc-ud-train.conllu',
'e98d3eabea67787c5d43a498f5a0fa4246f38104')},
'ar_nyuad': {'dev': ('ar_nyuad-ud-dev.conllu',
'b740de9bd68e68b30b9b313eb050d44e94470ca5'),
'test': ('ar_nyuad-ud-test.conllu',
'f5d5b8979b7fedd76235d4bae77e0b4a7b0a750a'),
'train': ('ar_nyuad-ud-train.conllu',
'd065f03958fd8782a7431b6778c6665ad09444a6')},
'ar_pud': {'test': ('ar_pud-ud-test.conllu',
'2161701e6726b6feb14733a312fba6160b9eb722')},
'ar': {'dev': ('ar-ud-dev.conllu',
'5f8964974d5ba5eb3504cdafb93c34c473c4177c'),
'test': ('ar-ud-test.conllu',
'58df161047f310cc3bb4d0e615ca33466e630bb9'),
'train': ('ar-ud-train.conllu',
'0a3d5cefa1fecd6a74f2016ee73ea7a7a02eb359')},
'eu': {'dev': ('eu-ud-dev.conllu',
'3ee15b5ed46ec93d7278c8cc0351d242417d553d'),
'test': ('eu-ud-test.conllu',
'aa68d6442ac6dc1abedc19c1b98c4a9944786188'),
'train': ('eu-ud-train.conllu',
'd56ec997916e38ee6ab1badd78c119e81e4797c9')},
'be': {'dev': ('be-ud-dev.conllu',
'015473e91cf8937c46e8b721f206415abac16a35'),
'test': ('be-ud-test.conllu',
'f009ea1885f54cfd77fca8a2c89133b2af8f9f5e'),
'train': ('be-ud-train.conllu',
'26b871e28d2f356a709f106b6e3e86b417ba74e7')},
'bg': {'dev': ('bg-ud-dev.conllu',
'0a2284b10547681eb65691eb2a9f0f1662e16e90'),
'test': ('bg-ud-test.conllu',
'75ea2a5e1d55bb57efecae6ec2b5ac3cc1b37e57'),
'train': ('bg-ud-train.conllu',
'd4b2fa267010c4486885c91f3af65ff66c8be94c')},
'bxr': {'sample': ('bxr-ud-sample.conllu',
'9239bdd251a60820c71111ec54de9e7d58a8579d'),
'test': ('bxr-ud-test.conllu',
'0a06e527454ae0b547153222f67eb5db94e528fd')},
'yue': {'test': ('yue-ud-test.conllu',
'd91477c65aa75cd45489cca13f7a122066972bdb')},
'ca': {'dev': ('ca-ud-dev.conllu',
'5737824f0afff0d07a43db331f102d62c6da2d96'),
'test': ('ca-ud-test.conllu',
'0e28bd2a3b982515c1158194ad52bcbbe741e170'),
'train': ('ca-ud-train.conllu',
'b5ff2392722d4a1df3bfc52fa5b8f2043b7aec0c')},
'zh_cfl': {'test': ('zh_cfl-ud-test.conllu',
'32fe45cd0e4e11ced95202971bce74acbc6a8c30')},
'zh_hk': {'test': ('zh_hk-ud-test.conllu',
'4c75fa5bbcdcb181447b4e037224d50feb2776fb')},
'zh_pud': {'test': ('zh_pud-ud-test.conllu',
'b3e448884b7b6229379f9723b97c6e9a6fedcb61')},
'zh': {'dev': ('zh-ud-dev.conllu',
'34d8253b35ad2245d59ddffa71b5689ef267b6b2'),
'test': ('zh-ud-test.conllu',
'0f00516097650c12262298dd0fbd1b17a6d2bfe2'),
'train': ('zh-ud-train.conllu',
'9444eec5f4561f289ad140e47e49013689512a65')},
'cop': {'dev': ('cop-ud-dev.conllu',
'863d1004df1a92df52515105f6fae6ff68539595'),
'test': ('cop-ud-test.conllu',
'd3b33566679f071d4ad622ad840cd98381835706'),
'train': ('cop-ud-train.conllu',
'33d0e5de5d6077f7c52a4cd90bce0047f3e9ff6f')},
'hr': {'dev': ('hr-ud-dev.conllu',
'8da2a419980807d2e91e09b6bf496e58d442b0ba'),
'test': ('hr-ud-test.conllu',
'49d673cba3d32d39d413e557276a45a0214ed83e'),
'train': ('hr-ud-train.conllu',
'e5cc686bb46c80c84c3ac60ed459e1f124c04c08')},
'cs_cac': {'dev': ('cs_cac-ud-dev.conllu',
'69dfed28c29146b41a3428f4715bde70a6aecf00'),
'test': ('cs_cac-ud-test.conllu',
'a994b33ebbde486c1818a9df460fb112055e95de'),
'train': ('cs_cac-ud-train.conllu',
'694f8559471dc481612606bf5df078daa094a84e')},
'cs_cltt': {'dev': ('cs_cltt-ud-dev.conllu',
'f35d5dbe57cd95760901ea29de4f493d5d2a44d4'),
'test': ('cs_cltt-ud-test.conllu',
'a8f6696785e658471f759bc736b738a105cba9a3'),
'train': ('cs_cltt-ud-train.conllu',
'ab97886066bfa462e5da03d25f802489292c0b56')},
'cs_fictree': {'dev': ('cs_fictree-ud-dev.conllu',
'dc67c07737a3a8bf2633068941f2d55f1500e192'),
'test': ('cs_fictree-ud-test.conllu',
'06becaedef1cfdb8e1b2dce3f0d3a3a607d178a4'),
'train': ('cs_fictree-ud-train.conllu',
'fe7dbe3a0e6ee73e19e788c43bbb8f8f47ae1645')},
'cs_pud': {'test': ('cs_pud-ud-test.conllu',
'9f205677041de694157ba2ef3e1eadb44d467f2f')},
'cs': {'dev': ('cs-ud-dev.conllu',
'd609e895b21b8710337e23a98b58ffd7b7a54bf1'),
'test': ('cs-ud-test.conllu',
'34091286a11b1ce2a9c8bcfa03fdd86fb0e13965'),
'train': ('cs-ud-train.conllu',
'd1f855798a29d433b580d01ade0d8d062cd58534')},
'da': {'dev': ('da-ud-dev.conllu',
'2c0c798c20
| 1 |
4dde161d25ed41154e13b94cc9640c6aac055f87
|
Python
|
a2efb30273172d388342a82bb0ce3c'),
'test': ('da-ud-test.conllu',
'85a95a8527f8773f1575ceaf0ab51f204b211047'),
'train': ('da-ud-train.conllu',
'b653c029a7ae5c106f865dcef949fb3fe2aa0420')},
'nl_lassysmall': {'dev': ('nl_lassysmall-ud-dev.conllu',
'2a169af74c2206c9073c3932b4a300492a314ee5'),
'test': ('nl_lassysmall-ud-test.conllu',
'39f08896a40ad370f2acc37d58689cdc43a660a9'),
'train': ('nl_lassysmall-ud-train.conllu',
'e4fd6bac246c81bb17a3c932e251b8662739cc19')},
'nl': {'dev': ('nl-ud-dev.conllu',
'33a9387eef9f5c0b15bd1e76e78776863f1f6d90'),
'test': ('nl-ud-test.conllu',
'01b3e1048792c851fdd59882c353fcdb76dc165e'),
'train': ('nl-ud-train.conllu',
'8e6a10152b7d09ce61433dd5f715ab2401611cf6')},
'en_lines': {'dev': ('en_lines-ud-dev.conllu',
'83b63b7670ea4394b558bc26e16a004339f0a0ef'),
'test': ('en_lines-ud-test.conllu',
'ccc9d3c71a873313d138c3adb12405a97eb270d8'),
'train': ('en_lines-ud-train.conllu',
'da42bfac9fd97d98ebbbc37c65d83ff4c53b4e79')},
'en_pud': {'test': ('en_pud-ud-test.conllu',
'4a9c83ba058a7e51979af790ba0440cc274b948f')},
'en_partut': {'dev': ('en_partut-ud-dev.conllu',
'863a6f571158acaaca95223e50bd08fc0c1134f0'),
'test': ('en_partut-ud-test.conllu',
'0c0780b0f14e4623f1014e6496d639cd2d2f6ffd'),
'train': ('en_partut-ud-train.conllu',
'e00a2d6f7efa28c8aaa40dccdf29b59a50f48e18')},
'en': {'dev': ('en-ud-dev.conllu',
'e2159dda4400d289ad8a403b466c8d23d733ba35'),
'test': ('en-ud-test.conllu',
'bd36ef23f76155625b379d063427bd62f19b7658'),
'train': ('en-ud-train.conllu',
'993c44f62104971fe2d056847349facbb7986258')},
'et': {'dev': ('et-ud-dev.conllu',
'312f9477f7ee1dd380c1fbcf77a6f0c63476fdbb'),
'test': ('et-ud-test.conllu',
'd70907f0771b41a27406672b9d91043a0954f946'),
'train': ('et-ud-train.conllu',
'b6d788e7a3362d0984d1cff06c1ba3d66f6bf773')},
'fi_ftb': {'dev': ('fi_ftb-ud-dev.conllu',
'552ec574acdb3209e7545af4e16a43a1e2956979'),
'test': ('fi_ftb-ud-test.conllu',
'13c34838a0fa9e379f9624ed1f4c368ca50a7d98'),
'train': ('fi_ftb-ud-train.conllu',
'73d025250bfc82a24181b5ed601dc4ae7c8e846c')},
'fi_pud': {'test': ('fi_pud-ud-test.conllu',
'4ab7b0d99ce6697d79732e401be97585a28c2afa')},
'fi': {'dev': ('fi-ud-dev.conllu',
'e023cf7eaffbda20bd4518d87fe9086207bb5361'),
'test': ('fi-ud-test.conllu',
'fd57c5106e43994250f4472890572bdbb8b4a48b'),
'train': ('fi-ud-train.conllu',
'ab27bda8cbb62886196b78de87985a4c6cf8215d')},
'fr_ftb': {'dev': ('fr_ftb-ud-dev.conllu',
'71b3cc02601f64711f98e33a6b2af10aa00700be'),
'test': ('fr_ftb-ud-test.conllu',
'723b8c44e74202a18b7e71268b738a5e1aa15f86'),
'train': ('fr_ftb-ud-train.conllu',
'9a347120478254647deb7c7e02871b28aad23ec4')},
'fr_pud': {'test': ('fr_pud-ud-test.conllu',
'570b7e31dc359ed62123bea6546efa13cfc2cf25')},
'fr_partut': {'dev': ('fr_partut-ud-dev.conllu',
'1505030048829a8dccc466cc86bca057996301ae'),
'test': ('fr_partut-ud-test.conllu',
'f6446317c9f82cc0b70a76be75282804a3359ac0'),
'train': ('fr_partut-ud-train.conllu',
'f87c246cfa91186b90c7780cb64783034f196622')},
'fr_sequoia': {'dev': ('fr_sequoia-ud-dev.conllu',
'859b10d80c7b3a382571cce9b2620039673539d1'),
'test': ('fr_sequoia-ud-test.conllu',
'be0ef69e392e64030414748da2995433f23e033d'),
'train': ('fr_sequoia-ud-train.conllu',
'48ac01913518888a32670a687123ed1bac57e0e9')},
'fr': {'dev': ('fr-ud-dev.conllu',
'5de0aee778bcc69d14285ada88f0ff7e5ac0a0cd'),
'test': ('fr-ud-test.conllu',
'd20a014acd38193155a33a5233c13f89541c78c3'),
'train': ('fr-ud-train.conllu',
'feee0cc85a2d7dcb3397399ef22c8af8ef75420b')},
'gl_treegal': {'dev': ('gl_treegal-ud-dev.conllu',
'272558614cff4a5e1f2805626904e6dc488b8d25'),
'test': ('gl_treegal-ud-test.conllu',
'18d99474d3aa9c83878c42a79d7881330dd9b861'),
'train': ('gl_treegal-ud-train.conllu',
'b1691dd5f587a19eb9dc6f141ecbd3eec3bb0e07')},
'gl': {'dev': ('gl-ud-dev.conllu',
'e72390dce9bf973442deef31ed0cd7a975361fe5'),
'test': ('gl-ud-test.conllu',
'7d82ba3672bd4427674428e1dcbcae4feebc3aeb'),
'train': ('gl-ud-train.conllu',
'd586e7bffa314f8c5b85288e060e68dddc1f5d33')},
'de_pud': {'test': ('de_pud-ud-test.conllu',
'2c91e42b7345145290b68385ff5270910048b8c4')},
'de': {'dev': ('de-ud-dev.conllu',
'9b4f49bfa2b609d54369890d9e7d8d24a3c229af'),
'test': ('de-ud-test.conllu',
'48f0f6f98b38710906481b5e9fe1d459d28f1b4a'),
'train': ('de-ud-train.conllu',
'04a1d6a6a2da9d9c38496118e0432c9a6720db64')},
'got': {'dev': ('got-ud-dev.conllu',
'501c47193ca2af5826e4afcc04941df87a7c47c3'),
'test': ('got-ud-test.conllu',
'cfcf16d562434987562bd1f5faa0d8c007e9ddb8'),
'train': ('got-ud-train.conllu',
'b4951ede89d947c6617df782ac248566235f78fb')},
'el': {'dev': ('el-ud-dev.conllu',
'9df0919ed6f9dcab3ba3f60f0ad31d0c79ae6cdb'),
'test': ('el-ud-test.conllu',
'1bb4a6b24521f0c3c7d6cf71e2456ef3a1ee31aa'),
'train': ('el-ud-train.conllu',
'32f4abc821624c4cd4d3b3b555c1558f06366e2c')},
'he': {'dev': ('he-ud-dev.conllu',
'c5b76874fcf11c7733e1555957bb49e8298af140'),
'test': ('he-ud-test.conllu',
'4fbe4115948250fc2e42dd43399d1c6c11ddcfd2'),
'train': ('he-ud-train.conllu',
'eae49a515b38d224b109138bf006a112e80a7caf')},
'hi_pud': {'test': ('hi_pud-ud-test.conllu',
'd237fecc594186e7a52ad33313ac52e927905d73')},
'hi': {'dev': ('hi-ud-dev.conllu',
'48b592bb1aa1cbc30d41d2913421cfd3f9d2c790'),
'test': ('hi-ud-test.conllu',
'004a7fdde368f32f9f230bc5e2cf4ce9e1d8f8d7'),
'train': ('hi-ud-train.conllu',
'9be8afb2cabda361817c55b3de6ebba2c3fef7e0')},
'hu': {'dev': ('hu-ud-dev.conllu',
'ec622e6bcf2a84b0b47eba0de01cf5768157a50e'),
'test': ('hu-ud-test.conllu',
'fd717d25add38c2fb2dc8e82e2f9e5b0b9f3c5b8'),
'train': ('hu-ud-train.conllu',
'e5486523a8bebe40d633ad8b4050be8a3d11c78a')},
'id': {'dev': ('id-ud-dev.conllu',
'7b181aa954a4f4b22b80a18e4f67cbf423e9c701'),
'test': ('id-ud-test.conllu',
'357ed8c216725760bf5be561ed6e918ce602b5ac'),
'train': ('id-ud-train.conllu',
'328ea588b75de55ef48373c2bf9983bca277d724')},
'ga': {'dev': ('ga-ud-dev.conllu',
'180a1a9dcfcec6528a559032c67e9a15693a039d'),
'test': ('ga-ud-test.conllu',
'b74a56372af3f68f089ea82ba858e5a82aae4e22'),
'train': ('ga-ud-train.conllu',
'40df0b12fbadae6e56c0a01da483d6c612d9450c')},
'it_pud': {'test': ('it_pud-ud-test.conllu',
'c7121c03dbdc7d27f89c6f6dd8f046b89233438e')},
'it_partut': {'dev': ('it_partut-ud-dev.conllu',
'0bb5dc0c0815212c9832eaef3b802cf885e0543b'),
'test': ('it_partut-ud-test.conllu',
'b5eccd3d9a94a2f96c8c3a6e4192a287ac563898'),
'train': ('it_partut-ud-train.conllu',
'784b18bf8d3b59d967d147075a3cb5b03fb28637')},
'it_postwita': {'dev': ('it_postwita-ud-dev.conllu',
'07f6f658246aa070e2166e688f7569d61aafff54'),
'test': ('it_postwita-ud-test.conllu',
'c2d58f50e51d37cb5f55bd0a3129138e95a72a8a'),
'train': ('it_postwita-ud-train.conllu',
'69684c47fba99230f6ef1a204b95c37d28eaa5a6')},
'it': {'dev': ('it-ud-dev.conllu',
'ea8fd59f36280fbd77b9a807959491636048a698'),
'test': ('it-ud-test.conllu',
'34839fdeeef883f8034c723a18772947106cec6b'),
'train': ('it-ud-train.conllu',
'a0cae413f46a344366f86bc7ffe4f5d7ecbf6a14')},
'ja_pud': {'test': ('ja_pud-ud-test.conllu',
'4c914016a0968ca434348370d38c9579a60e8fd7')},
'ja': {'dev': ('ja-ud-dev.conllu',
'21f06fef7fbeccd05a298385bf40f8b4ffe95146'),
'test': ('ja-ud-test.conllu',
'240d3532698356a7c6f93c3215718ef2f66a672f'),
'train': ('ja-ud-train.conllu',
'35eaf307d94c2006241fe08f745d7b1b17f049cf')},
'kk': {'dev': ('kk-ud-dev.conllu',
'038033c822b407040a4ecb87c077506cd0d1a322'),
'test': ('kk-ud-test.conllu',
'4124bcaa6e4fc132613d94a882abcff8ecad8ca0'),
'train': ('kk-ud-train.conllu',
'48d664d273ad6731cb65228ce9b57ad3cf50f7f5')},
'ko': {'dev': ('ko-ud-dev.conllu',
'60e7da7cca44c923873a062e80262726659f5528'),
'test': ('ko-ud-test.conllu',
'bc9a0fc4ddfed14b70bb58048bf8b8d50062cffd'),
'train': ('ko-ud-train.conllu',
'ee21328f9ea39668e802f0cb6a794358f5c256bf')},
'kmr': {'sample': ('kmr-ud-sample.conllu',
'd76d631400d17b63b9592ce3c0f4ecada012d6d0'),
'test': ('kmr-ud-test.conllu',
'606a338db2d6adde6b4d7d8c9ee2bdf1f988d729')},
'la_ittb': {'dev': ('la_ittb-ud-dev.conllu',
'd9f17992bd0258a734aea9b6c53759039717c86a'),
'test': ('la_ittb-ud-test.conllu',
'f4d097d076083240c48594d4cb058840ff16be8e'),
'train': ('la_ittb-ud-train.conllu',
'627d5b30b20655efab194c75fc9219b0aa2cf4b6')},
'la_proiel': {'dev': ('la_proiel-ud-dev.conllu',
'9a510ff1f29b507ce46d32c04eb8f02ec8bdb4fb'),
'test': ('la_proiel-ud-test.conllu',
'697dbeae38507856a4fafa8506dfc8db5e8e4054'),
'train': ('la_proiel-ud-train.conllu',
'5e57e0a83ed8dcdfcc892c2558249cb6bc02b37a')},
'la': {'dev': ('la-ud-dev.conllu',
'2748bb0479cb599e1a007d1d1634d5870b45549b'),
'test': ('la-ud-test.conllu',
'19c62c64ce41a650e9b55a345c61e7c0d994816e'),
'train': ('la-ud-train.conllu',
'183ce6f58b0305e5926161e29b9a6aacc424662c')},
'lv': {'dev': ('lv-ud-dev.conllu',
'6bf3843d92aeb5b4a5e3b457708ad0aca176fbd2'),
'test': ('lv-ud-test.conllu',
'9f7806a24656db0e859efe041a88926b220b8e28'),
'train': ('lv-ud-train.conllu',
'f1eeff608e8f27d92b683ae041591355198841eb')},
'lt': {'dev': ('lt-ud-dev.conllu',
'0b8dc19005571fa7b66d8302b797d51a241f128b'),
'test': ('lt-ud-test.conllu',
'def54d6caf97610eb4ca8c0179d661c8eab98951'),
'train': ('lt-ud-train.conllu',
'13fe42a3d21f17a5cad5aaf38692619c7713e177')},
'mr': {'dev': ('mr-ud-dev.conllu',
'abf7ac90a3696bb979e6ddc17cbc0fc761040b1b'),
'test': ('mr-ud-test.conllu',
'b70e2a135e69dc17474951bfd9c7cf3f203d4798'),
'train': ('mr-ud-train.conllu',
'24a1370184054a7f5af647997dca783d6c571242')},
'sme': {'sample': ('sme-ud-sample.conllu',
'8c456f06b363c4d273fc454a49505f783f00fe43'),
'test': ('sme-ud-test.conllu',
'6c2084f60d7f2d1468a0cb4f4a4b9669274b122e'),
'train': ('sme-ud-train.conllu',
'203eab4183fd585efe3fea7e6df493a6746b0a9f')},
'no_bokmaal': {'dev': ('no_bokmaal-ud-dev.conllu',
'3a1aa6646ee62c605a6e5a7b535434ce93d0581f'),
'test': ('no_bokmaal-ud-test.conllu',
'18336ef0e4877ae28eb7d6019afe05b5a53245d5'),
'train': ('no_bokmaal-ud-train.conllu',
'c6a1d75956dfb9376e568bf241b3ee5ebf3be3a5')},
'no_nynorsk': {'dev': ('no_nynorsk-ud-dev.conllu',
'5b95a070d11a61a23fc340ecbbbbb70f86884498'),
'test': ('no_nynorsk-ud-test.conllu',
'3eaab8e4af82de2333521e9be0954ffaf6b1440b'),
'train': ('no_nynorsk-ud-train.conllu',
'79319993097c30ddf28d4c1137b8662f4f35d17e')},
'no_nynorsklia': {'dev': ('no_nynorsklia-ud-dev.conllu',
'f3e3cc9b156784c12e7540b6e09a19963df8d7d9'),
'test': ('no_nynorsklia-ud-test.conllu',
'c43abf4ad0d9c1d844edb9ff0fdf8b00949c4a0b')},
'cu': {'dev': ('cu-ud-dev.conllu',
'0b67035ed5ca52aeefae443611232ed202fb990a'),
'test': ('cu-ud-test.conllu',
'0fed872a5a2480b601c67ebbecf8dcd680b6863b'),
'train': ('cu-ud-train.conllu',
'1c58f7322b96aa65e2b6bbeb5cb5226b46dc3ef0')},
'fa': {'dev': ('fa-ud-dev.conllu',
'098f97ff4c0a6a9dcaafe2c83908b1ff044b4446'),
'test': ('fa-ud-test.conllu',
'0024aa6bad5eceed2e36f77d88578304a5886a80'),
'train': ('fa-ud-train.conllu',
'1692f90f58fb1ed2faaa4e8c5d2d47a37c47082b')},
'pl': {'dev': ('pl-ud-dev.conllu',
'b7af7bee091feb0788eb9793a7102972006421dc'),
'test': ('pl-ud-test.conllu',
'e141e793ba35f8a08510ec1ce494099b5c800ca8'),
'train': ('pl-ud-train.conllu',
'f2227ba184a5030fc47b1aff732e04ae11b9ab94')},
'pt_br': {'dev': ('pt_br-ud-dev.conllu',
'8eedc77096a87fe8ab251100d460780e161e5397'),
'test': ('pt_br-ud-test.conllu',
'37a64e3acef107b62ab62ce478fc36ed112fb58f'),
'train': ('pt_br-ud-train.conllu',
'023cafcb6959d52298ad619f7838f26db9798aa9')},
'pt_pud': {'test': ('pt_pud-ud-test.conllu',
'4f7a98b59255ff58a1a423dda6f2cb7261dcea7d')},
'pt': {'dev': ('pt-ud-dev.conllu',
'2171b4ac2b0726c9dfae6adf394b76be927accab'),
'test': ('pt-ud-test.conllu',
'9e819a4592db42905806141d6fca3b7b20396ce3'),
'train': ('pt-ud-train.conllu',
'b5fbb6598d5cc53a0f7e699adeb4a61948a49b5c')},
'ro_nonstandard': {'test': ('ro_nonstandard-ud-test.conllu',
'300d53091412dc5700dc5cad0fd3e136f7c8cb11'),
'train': ('ro_nonstandard-ud-train.conllu',
'ed97f51129b63857627f838f68f41c9ef8541686')},
'ro': {'dev': ('ro-ud-dev.conllu',
'a320e29582e837fa48bbe0aab8e205cadfcb4a02'),
'test': ('ro-ud-test.conllu',
'0cfe4806a28ebdc02dc7ea58635d8b550c3a9d7b'),
'train': ('ro-ud-train.conllu',
'74beb2aa92d2fca50dbb1a4f716b936afb436ab9')},
'ru_pud': {'test': ('ru_pud-ud-test.conllu',
'bca81ce7aaf3cb8add98b19faecc1d8303901631')},
'ru_syntagrus': {'dev': ('ru_syntagrus-ud-dev.conllu',
'304c6ec7fb5060583af5f890384e3a480f8c3ad5'),
'test': ('ru_syntagrus-ud-test.conllu',
'c138e39b48dc1c66d106e68ee75c6fce28ef780c'),
'train': ('ru_syntagrus-ud-train.conllu',
'8fa56fa80845e4ad946189d1e7af228b5595e312')},
'ru': {'dev': ('ru-ud-dev.conllu',
'd3b11c0fd8a87bfb7ce9666a1888126ae5ddca90'),
'test': ('ru-ud-test.conllu',
'ae13bbf49e0d2fddae8ba2eeacd15a9a77c7bfff'),
'train': ('ru-ud-train.conllu',
'fd43e7323ad2e62a6924fc5b5d48e85c6ab5a430')},
'sa': {'test': ('sa-ud-test.conllu',
'fad3a03a6834884a092b1d326625c6f663e36636')},
'sr': {'dev': ('sr-ud-dev.conllu',
'dcb9a242986285e83512ddaa4b3ada07c4cea17a'),
'test': ('sr-ud-test.conllu',
'0f0c9e394c440bb2dd514bdd6873d3ffef13821b'),
'train': ('sr-ud-train.conllu',
'97ea9bfe4ac97011598fbb5ca20b5cbaf5093334')},
'sk': {'dev': ('sk-ud-dev.conllu',
'c84563c08922d60b0c765e9f9c22d9f6f2765ff9'),
'test': ('sk-ud-test.conllu',
'89af4581c5f9058809f48788eb635a92cda0603c'),
'train': ('sk-ud-train.conllu',
'89e108093bbf5619578955fdadfe200cefd8cf01')},
'sl_sst': {'dev': ('sl_sst-ud-dev.conllu',
'c65ae82123af95ec11f47262546b5ab2fc5735e5'),
'test': ('sl_sst-ud-test.conllu',
'144a0124c1181b49d0c542a4a6d4465e45545f3b'),
'train': ('sl_sst-ud-train.conllu',
'4cbb97d5c19cfb1d85cdd54a13e24de2343a4ac5')},
'sl': {'dev': ('sl-ud-dev.conllu',
'0078572c19574d32defeae9924176da2dd701ede'),
'test': ('sl-ud-test.conllu',
'616ace00e25df99be8dd49b7bf7c48f1093df96a'),
'train': ('sl-ud-train.conllu',
'1462ac69163b30cf1399527e95f686ebf91be2d3')},
'es_ancora': {'dev': ('es_ancora-ud-dev.conllu',
'94b00cc6449a1793b5ba1d9d5c1e4b34ad1cc7d5'),
'test': ('es_ancora-ud-test.conllu',
'8d7dc8d8441e1ca4b54708a5382ed61b48bf7920'),
'train': ('es_ancora-ud-train.conllu',
'95d5bf7ad33304f3440ffb014ac094c4967c303f')},
'es_pud': {'test': ('es_pud-ud-test.conllu',
'c2b17fce1da3bdd2a50d9dd7eca101db1d2907e0')},
'es': {'dev': ('es-ud-dev.conllu',
'4cdb828c492c6b7707af0ab6c7fbf734f770630a'),
'test': ('es-ud-test.conllu',
'afd1ae1b7eb73a91456c30acf388eef4faf4785a'),
'train': ('es-ud-train.conllu',
'5ce48b44ba1b3e748a40cb5bf893d3096518ecbc')},
'sv_lines': {'dev': ('sv_lines-ud-dev.conllu',
'15f1a04d960518fe7bfee23ce227fc7b78d4b755'),
'test': ('sv_lines-ud-test.conllu',
'843df4ea3ab4f551b1eaa661652a8d6489a81d41'),
'train': ('sv_lines-ud-train.conllu',
'16e3533bf174b36d728847a36a3600f16c63baa6')},
'sv_pud': {'test': ('sv_pud-ud-test.conllu',
'18dadac0c15468256b340835ebc0529facbe9b73')},
'sv': {'dev': ('sv-ud-dev.conllu',
'6d14e1aae5c9ae37c35481c44c04bf74a4233455'),
'test': ('sv-ud-test.conllu',
'7ead0f7b49508db0022c042195ac5925b611c5b7'),
'train': ('sv-ud-train.conllu',
'68affb85efde6ed017eab1e998e9666108559e04')},
'swl': {'dev': ('swl-ud-dev.conllu',
'828e0a08f12cabfa75f9dd2b53dba58606522a7c'),
'test': ('swl-ud-test.conllu',
'674f76631cf16172d67b795ff92dfbb297eb4930'),
'train': ('swl-ud-train.conllu',
'46b721f9cae2d5ba43f818dd487600b0ce76362a')},
'ta': {'dev': ('ta-ud-dev.conllu',
'4d01f555012ddc1976933d4d928e26470f71bfa1'),
'test': ('ta-ud-test.conllu',
'e8db8816a98d8b7e81188786db7c405979a7e3c3'),
'train': ('ta-ud-train.conllu',
'6753d8c7b1b016de39c087aab45056de6021c3ae')},
'te': {'dev': ('te-ud-dev.conllu',
'29f46355d767e54e8565f76a063c43e95ead0fca'),
'test': ('te-ud-test.conllu',
'50abe345d4ab5bae021cacd096266c57b00572b8'),
'train': ('te-ud-train.conllu',
'1794469abe09e7364cda0d9764cf515dcb4a61b6')},
'tr_pud': {'test': ('tr_pud-ud-test.conllu',
'aae839e2476a2f149c98e0274d245d07a50dafaa')},
'tr': {'dev': ('tr-ud-dev.conllu',
'421de4d8d0fbdda46750523bde72880414c134a3'),
'test': ('tr-ud-test.conllu',
'b175f136f6f0271c494a58a1846971c4a07cda27'),
'train': ('tr-ud-train.conllu',
'5aeaf25fc9e00c75e377983a0d0a642e4df6ae7d')},
'uk': {'dev': ('uk-ud-dev.conllu',
'0d3e3507edcd46a3eaa8c4702d0f5d84661a6d9d'),
'test': ('uk-ud-test.conllu',
'46c88fd623894fabdafb01a826016c215e4f65cc'),
'train': ('uk-ud-train.conllu',
'd06e0e2fa67c35a20517738bd728ac3b26d8eafe')},
'hsb': {'sample': ('hsb-ud-sample.conllu',
'148eddbb19b06115ea54e17a3fca58e99a85cbd9'),
'test': ('hsb-ud-test.conllu',
'3d319288b4c06395b2627980737131995949f770')},
'ur': {'dev': ('ur-ud-dev.conllu',
'dc41e72b5adeb92f308cdc8dfcbf71f84b4a5cf9'),
'test': ('ur-ud-test.conllu',
'af5da25be4c4ec1f2a222bc462b39ca4bbcc0eb0'),
'train': ('ur-ud-train.conllu',
'488d65b394d0de264be1221614c09e541f92f9de')},
'ug': {'dev': ('ug-ud-dev.conllu',
'a2e6cd7ef51ffd7c83de7c62fbad998f1020f857'),
'test': ('ug-ud-test.conllu',
'4877323d8dbfaa8ab862f0aa8e5484fdadb9ef43')},
'vi': {'dev': ('vi-ud-dev.conllu',
'1c733d3ea3e4cce00cb0aa4d599bcb3b0a6096a8'),
'test': ('vi-ud-test.conllu',
'1bb822e58f21aa5ccac15fe6c6742a42e8389d41'),
'train': ('vi-ud-train.conllu',
'ac86132afc061625740abd524c5cdf3d35ebbbc4')}}
| 2 |
9dd59fee46bd4bec87cc8c40099110b483ad0496
|
Python
|
import ambulance_game as abg
import numpy as np
import sympy as sym
from sympy.abc import a, b, c, d, e, f, g, h, i, j
def get_symbolic_pi(num_of_servers, threshold, system_capacity, buffer_capacity):
Q_sym = abg.markov.get_symbolic_transition_matrix(
num_of_servers=num_of_servers,
threshold=threshold,
system_capacity=system_capacity,
buffer_capacity=buffer_capacity,
)
dimension = Q_sym.shape[0]
if dimension > 7:
return "Capacity of 6 exceeded"
M_sym = sym.Matrix([Q_sym.transpose()[:-1, :], sym.ones(1, dimension)])
b_sym = sym.Matrix([sym.zeros(dimension - 1, 1), [1]])
system = M_sym.col_insert(dimension, b_sym)
sol = sym.solve_linear_system_LU(system, [a, b, c, d, e, f, g])
return sol
def get_symbolic_state_probabilities_1222():
num_of_servers = 1
threshold = 2
system_capacity = 2
buffer_capacity = 2
sym_pi_1222 = get_symbolic_pi(
num_of_servers=num_of_servers,
threshold=threshold,
system_capacity=system_capacity,
buffer_capacity=buffer_capacity,
)
all_states_1222 = abg.markov.build_states(
threshold=threshold,
system_capacity=system_capacity,
buffer_capacity=buffer_capacity,
)
sym_state_probs_1222 = [0 for _ in range(len(all_states_1222))]
sym_state_probs_1222[0] = sym.factor(sym_pi_1222[a]) # (0,0)
sym_state_probs_1222[1] = sym.factor(sym_pi_1222[b]) # (0,1)
sym_state_probs_1222[2] = sym.factor(sym_pi_1222[c]) # (1,1)
sym_state_probs_1222[3] = sym.factor(sym_pi_1222[d]) # (0,2)
sym_state_probs_1222[4] = sym.factor(sym_pi_1222[e]) # (1,2)
sym_state_recursive_ratios_1222 = sym.zeros(
buffer_capacity + 1, system_capacity + 1
)
sym_state_recursive_ratios_1222[0, 0] = 1
sym_state_recursive_ratios_1222[0, 1] = sym.factor(
sym_state_probs_1222[1] / sym_state_probs_1222[0]
) # (0,0) -> (0,1)
sym_state_recursive_ratios_1222[0, 2] = sym.factor(
sym_state_probs_1222[2] / sym_state_probs_1222[1]
) # (0,1) -> (1,1)
sym_state_recursive_ratios_1222[1, 2] = sym.factor(
sym_state_probs_1222[3] / sym_state_probs_1222[2]
) # (0,1) -> (0,2)
sym_state_recursive_ratios_1222[2, 2] = sym.factor(
sym_state_probs_1222[4] / sym_state_probs_1222[3]
) # (0,2) -> (1,2)
return sym_state_probs_1222, sym_state_recursive_ratios_1222
def get_symbolic_state_probabilities_1121():
num_of_servers = 1
threshold = 1
system_capacity = 2
buffer_capacity = 1
all_states_1121 = abg.markov.build_states(
threshold=threshold,
system_capacity=system_capacity,
buffer_capacity=buffer_capacity,
)
sym_pi_1121 = get_symbolic_pi(
num_of_servers=num_of_servers,
threshold=threshold,
system_capacity=system_capacity,
buffer_capacity=buffer_capacity,
)
sym_state_probs_1121 = [0 for _ in range(len(all_states_1121))]
sym_state_probs_1121[0] = sym.factor(sym_pi_1121[a]) # (0,0)
sym_state_probs_1121[1] = sym.factor(sym_pi_1121[b]) # (0,1)
sym_state_probs_1121[2] = sym.factor(sym_pi_1121[c]) # (1,1)
sym_state_probs_1121[3] = sym.factor(sym_pi_1121[d]) # (0,2)
sym_state_probs_1121[4] = sym.factor(sym_pi_1121[e]) # (1,2)
sym_state_recursive_ratios_1121 = sym.zeros(
buffer_capacity + 1, system_capacity + 1
)
sym_state_recursive_ratios_1121[0, 0] = 1
sym_state_recursive_ratios_1121[0, 1] = sym.factor(
sym_state_probs_1121[1] / sym_state_probs_1121[0]
) # (0,0) -> (0,1)
sym_state_recursive_ratios_1121[1, 1] = sym.factor(
sym_state_probs_1121[2] / sym_state_probs_1121[1]
) # (0,1) -> (1,1)
sym_state_recursive_ratios_1121[0, 2] = sym.factor(
sym_state_probs_1121[3] / sym_state_probs_1121[1]
) # (0,1) -> (0,2)
sym_state_recursive_ratios_1121[1, 2] = sym.factor(
sym_state_probs_1121[4] / sym_state_probs_1121[3]
) # (0,2) -> (1,2)
sym_state_recursive_ratios_right_1121 = sym_state_recursive_ratios_1121.copy()
sym_state_recursive_ratios_right_1121[1, 2] = sym.factor(
sym_state_probs_1121[4] / sym_state_probs_1121[2]
) # (1,1) -> (1,2)
sym_state_recursive_ratios_P0_1121 = sym.zeros(
buffer_capacity + 1, system_capacity + 1
)
sym_state_recursive_ratios_P0_1121[0, 0] = 1
sym_state_recursive_ratios_P0_1121[0, 1] = sym.factor(
sym_state_probs_1121[1] / sym_state_probs_1121[0]
) # (0,0) -> (0,1)
sym_state_recursive_ratios_P0_1121[1, 1] = sym.factor(
sym_state_probs_1121[2] / sym_state_probs_1121[0]
) # (0,0) -> (1,1)
sym_state_recursive_ratios_P0_1121[0, 2] = sym.factor(
sym_state_probs_1121[3] / sym_state_probs_1121[0]
) # (0,0) -> (0,2)
sym_state_recursive_ratios_P0_1121[1, 2] = sym.factor(
sym_state_probs_1121[4] / sym_state_probs_1121[0]
) # (0,0) -> (1,2)
return (
sym_state_probs_1121,
sym_state_recursive_ratios_1121,
sym_state_recursive_ratios_right_1121,
sym_state_recursive_ratios_P0_1121,
)
def get_symbolic_state_probabilities_1122():
# num_of_servers = 1
threshold = 1
system_capacity = 2
buffer_capacity = 2
all_states_1122 = abg.markov.build_states(
threshold=threshold,
system_capacity=system_capacity,
buffer_capacity=buffer_capacity,
)
sym_state_probs_1122 = [0 for _ in range(len(all_states_1122))]
sym_Lambda = sym.symbols("Lambda")
sym_lambda_1 = sym.symbols("lambda_1")
sym_lambda_2 = sym.symbols("lambda_2")
sym_mu = sym.symbols("mu")
sym_state_probs_1122[0] = (
(sym_mu**6)
+ 2 * (sym_lambda_2) * (sym_mu**5)
+ (sym_lambda_2**2) * (sym_mu**4)
) # (0,0)
sym_state_probs_1122[1] = (sym_Lambda * sym_mu**3) * (
sym_mu**2 + 2 * sym_mu * sym_lambda_2 + sym_lambda_2**2
) # (0,1)
sym_state_probs_1122[2] = (sym_Lambda * sym_lambda_2 * sym_mu**2) * (
sym_lambda_2**2
+ sym_lambda_2 * sym_lambda_1
+ sym_lambda_1 * sym_mu
+ sym_mu**2
+ 2 * sym_lambda_2 * sym_mu
) # (1,1)
sym_state_probs_1122[3] = (sym_Lambda * sym_lambda_2**2 * sym_mu) * (
sym_lambda_2**2
+ 2 * sym_lambda_1 * sym_lambda_2
+ 3 * sym_lambda_1 * sym_mu
+ sym_mu**2
+ 2 * sym_lambda_2 * sym_mu
+ sym_lambda_1**2
) # (2,1)
sym_state_probs_1122[4] = (sym_Lambda * sym_lambda_1 * sym_mu**3) * (
sym_lambda_2 + sym_mu
) # (0,2)
sym_state_probs_1122[5] = (
sym_Lambda * sym_lambda_1 * sym_lambda_2 * sym_mu**2
) * (
2 * sym_mu + sym_lambda_1 + sym_lambda_2
) # (1,2)
sym_state_probs_1122[6] = (sym_Lambda * sym_lambda_1 * sym_lambda_2**2) * (
sym_lambda_1**2
+ 4 * sym_lambda_1 * sym_mu
+ 2 * sym_lambda_1 * sym_lambda_2
+ 3 * sym_mu**2
+ sym_lambda_2**2
+ 3 * sym_lambda_2 * sym_mu
) # (2,2)
total_1122 = np.sum(sym_state_probs_1122)
sym_state_probs_1122 = [i / total_1122 for i in sym_state_probs_1122]
sym_state_recursive_ratios_1122 = sym.zeros(
buffer_capacity + 1, system_capacity + 1
)
sym_state_recursive_ratios_1122[0, 0] = 1
sym_state_recursive_ratios_1122[0, 1] = sym.factor(
sym_state_probs_1122[1] / sym_state_probs_1122[0]
) # (0,0) -> (0,1)
sym_state_recursive_ratios_1122[1, 1] = sym.factor(
sym_state_probs_1122[2] / sym_state_probs_1122[1]
) # (0,1) -> (1,1)
sym_state_recursive_ratios_1122[2, 1] = sym.factor(
sym_state_probs_1122[3] / sym_state_probs_1122[2]
) # (1,1) -> (2,1)
sym_state_recursive_ratios_1122[0, 2] = sym.factor(
sym_state_probs_1122[4] / sym_state_probs_1122[1]
) # (0,1) -> (0,2)
sym_state_recursive_ratios_1122[1, 2] = sym.factor(
sym_state_probs_1122[5] / sym_state_probs_1122[4]
) # (0,2) -> (1,2)
sym_state_recursive_ratios_1122[2, 2] = sym.factor(
sym_state_probs_1122[6] / sym_state_probs_1122[5]
) # (1,2) -> (2,2)
sym_state_recursive_ratios_right_1122 = sym_state_recursive_ratios_1122.copy()
sym_state_recursive_ratios_right_1122[1, 2] = sym.factor(
sym_state_probs_1122[5] / sym_state_probs_1122[2]
) # (1,1) -> (1,2)
sym_state_recursive_ratios_right_1122[2, 2] = sym.factor(
sym_state_probs_1122[6] / sym_state_probs_1122[3]
) # (2,1) -> (2,2)
sym_state_recursive_ratios_P0_1122 = sym.zeros(
buffer_capacity + 1, system_capacity + 1
)
sym_state_recursive_ratios_P0_1122[0, 0] = 1
sym_state_recursive_ratios_P0_1122[0, 1] = sym.factor(
sym_state_probs_1122[1] / sym_state_probs_1122[0]
) # (0,0) -> (0,1)
sym_state_recursive_ratios_P0_1122[1, 1] = sym.factor(
sym_state_probs_1122[2] / sym_state_probs_1122[0]
) # (0,0) -> (1,1)
sym_state_recursive_ratios_P0_1122[2, 1] = sym.factor(
sym_state_probs_1122[3] / sym_state_probs_1122[0]
) # (0,0) -> (2,1)
sym_state_recursive_ratios_P0_1122[0, 2] = sym.factor(
sym_state_probs_1122[4] / sym_state_probs_1122[0]
) # (0,0) -> (0,2)
sym_state_recursive_ratios_P0_1122[1, 2] = sym.factor(
sym_state_probs_1122[5] / sym_state_probs_1122[0]
) # (0,0) -> (1,2)
sym_state_recursive_ratios_P0_1122[2, 2] = sym.factor(
sym_state_probs_1122[6] / sym_state_probs_1122[0]
) # (0,0) -> (2,2)
return (
sym_state_probs_1122,
sym_state_recursive_ratios_1122,
sym_state_recursive_ratios_right_1122,
sym_state_recursive_ratios_P0_1122,
)
def get_symbolic_state_probabilities_1123():
num_of_servers = 1
threshold = 1
system_capacity = 2
buffer_capacity = 3
Q_sym_1123 = abg.markov.get_symbolic_transition_matrix(
num_of_servers, threshold, system_capacity, buffer_capacity
)
p00, p01, p11, p21, p31, p02, p12, p22, p32 = sym.symbols(
"p00, p01, p11, p21, p31, p02, p12, p22, p32"
)
pi_1123 = sym.Matrix([p00, p01, p11, p21, p31, p02, p12, p22, p32])
dimension_1123 = Q_sym_1123.shape[0]
M_sym_1123 = sym.Matrix(
[Q_sym_1123.transpose()[:-1, :], sym.ones(1, dimension_1123)]
)
sym_diff_equations_1123 = M_sym_1123 @ pi_1123
b_sym_1123 = sym.Matrix([sym.zeros(dimension_1123 - 1, 1), [1]])
eq0_1123 = sym.Eq(sym_diff_equations_1123[0], b_sym_1123[0])
eq1_1123 = sym.Eq(sym_diff_equations_1123[1], b_sym_1123[1])
eq2_1123 = sym.Eq(sym_diff_equations_1123[2], b_sym_1123[2])
eq3_1123 = sym.Eq(sym_diff_equations_1123[3], b_sym_1123[3])
eq4_1123 = sym.Eq(sym_diff_equations_1123[4], b_sym_1123[4])
eq5_1123 = sym.Eq(sym_diff_equations_1123[5], b_sym_1123[5])
eq6_1123 = sym.Eq(sym_diff_equations_1123[6], b_sym_1123[6])
eq7_1123 = sym.Eq(sym_diff_equations_1123[7], b_sym_1123[7])
eq8_1123 = sym.Eq(sym_diff_equations_1123[8], b_sym_1123[8])
sym_state_probs_1123 = sym.solve(
[
eq0_1123,
eq1_1123,
eq2_1123,
eq3_1123,
eq4_1123,
eq5_1123,
eq6_1123,
eq7_1123,
eq8_1123,
],
(p00, p01, p11, p21, p31, p02, p12, p22, p32),
)
sym_state_recursive_ratios_1123 = sym.zeros(
buffer_capacity + 1, system_capacity + 1
)
sym_state_recursive_ratios_1123[0, 0] = 1
sym_state_recursive_ratios_1123[0, 1] = sym.factor(
sym_state_probs_1123[p01] / sym_state_probs_1123[p00]
) # (0,0) -> (0,1)
sym_state_recursive_ratios_1123[1, 1] = sym.factor(
sym_state_probs_1123[p11] / sym_state_probs_1123[p01]
) # (0,1) -> (1,1)
sym_state_recursive_ratios_1123[2, 1] = sym.factor(
sym_state_probs_1123[p21] / sym_state_probs_1123[p11]
) # (1,1) -> (2,1)
sym_state_recursive_ratios_1123[3, 1] = sym.factor(
sym_state_probs_1123[p31] / sym_state_probs_1123[p21]
) # (2,1) -> (3,1)
sym_state_recursive_ratios_1123[0, 2] = sym.factor(
sym_state_probs_1123[p02] / sym_state_probs_1123[p01]
) # (0,1) -> (0,2)
sym_state_recursive_ratios_1123[1, 2] = sym.factor(
sym_state_probs_1123[p12] / sym_state_probs_1123[p02]
) # (0,2) -> (1,2)
sym_state_recursive_ratios_1123[2, 2] = sym.factor(
sym_state_probs_1123[p22] / sym_state_probs_1123[p12]
) # (1,2) -> (2,2)
sym_state_recursive_ratios_1123[2, 2] = sym.factor(
sym_state_probs_1123[p32] / sym_state_probs_1123[p22]
) # (2,2) -> (3,2)
sym_state_recursive_ratios_right_1123 = sym_state_recursive_ratios_1123.copy()
sym_state_recursive_ratios_right_1123[1, 2] = sym.factor(
sym_state_probs_1123[p12] / sym_state_probs_1123[p11]
) # (1,1) -> (1,2)
sym_state_recursive_ratios_right_1123[2, 2] = sym.factor(
sym_state_probs_1123[p22] / sym_state_probs_1123[p21]
) # (2,1) -> (2,2)
sym_state_recursive_ratios_right_1123[3, 2] = sym.factor(
sym_state_probs_1123[p32] / sym_state_probs_1123[p22]
) # (2,2) -> (3,2)
sym_state_recursive_ratios_P0_1123 = sym.zeros(
buffer_capacity + 1, system_capacity + 1
)
sym_state_recursive_ratios_P0_1123[0, 0] = 1
sym_state_recursive_ratios_P0_1123[0, 1] = sym.factor(
sym_state_probs_1123[p01] / sym_state_probs_1123[p00]
) # (0,0) -> (0,1)
sym_state_recursive_ratios_P0_1123[1, 1] = sym.factor(
sym_state_probs_1123[p11] / sym_state_probs_1123[p00]
) # (0,0) -> (1,1)
sym_state_recursive_ratios_P0_1123[2, 1] = sym.factor(
sym_state_probs_1123[p21] / sym_state_probs_1123[p00]
) # (0,0) -> (2,1)
sym_state_recursive_ratios_P0_1123[3, 1] = sym.factor(
sym_state_probs_1123[p31] / sym_state_probs_1123[p00]
) # (0,0) -> (3,1)
sym_state_recursive_ratios_P0_1123[0, 2] = sym.factor(
sym_state_probs_1123[p02] / sym_state_probs_1123[p00]
) # (0,0) -> (0,2)
sym_state_recursive_ratios_P0_1123[1, 2] = sym.factor(
sym_state_probs_1123[p12] / sym_state_probs_1123[p00]
) # (0,0) -> (1,2)
sym_state_recursive_ratios_P0_1123[2, 2] = sym.factor(
sym_state_probs_1123[p22] / sym_state_probs_1123[p00]
) # (0,0) -> (2,2)
sym_state_recursive_ratios_P0_1123[3, 2] = sym.factor(
sym_state_probs_1123[p32] / sym_state_probs_1123[p00]
) # (0,0) -> (3,2)
return (
sym_state_probs_1123,
sym_state_recursive_ratios_1123,
sym_state_recursive_ratios_right_1123,
sym_state_recursive_ratios_P0_1123,
)
def get_symbolic_state_probabilities_1341():
# num_of_servers = 1
threshold = 3
system_capacity = 4
buffer_capacity = 1
all_states_1341 = abg.markov.build_states(
threshold=threshold,
system_capacity=system_capacity,
buffer_capacity=buffer_capacity,
)
sym_state_probs_1341 = [0 for _ in range(len(all_states_1341))]
sym_Lambda = sym.symbols("Lambda")
sym_lambda_1 = sym.symbols("lambda_1")
sym_lambda_2 = sym.symbols("lambda_2")
sym_mu = sym.symbols("mu")
sym_state_probs_1341[0] = (sym_lambda_2) * (sym_mu**5) + (sym_mu**6) # (0,0)
sym_state_probs_1341[1] = sym_Lambda * sym_lambda_2 * (sym_mu**4) + sym_Lambda * (
sym_mu**5
) # (0,1)
sym_state_probs_1341[2] = (sym_Lambda**2) * sym_lambda_2 * (sym_mu**3) + (
sym_Lambda**2
) * (
sym_mu**4
) # (0,2)
sym_state_probs_1341[3] = (sym_Lambda**3) * sym_lambda_2 * (sym_mu**2) + (
sym_Lambda**3
) * (
sym_mu**3
) # (0,3)
sym_state_probs_1341[4] = (
(sym_Lambda**3) * sym_lambda_1 * sym_lambda_2 * sym_mu
+ (sym_Lambda**3) * sym_lambda_2 * (sym_mu**2)
+ (sym_Lambda**3) * sym_lambda_2 * sym_lambda_2 * sym_mu
) # (1,3)
sym_state_probs_1341[5] = (sym_Lambda**3) * sym_lambda_1 * (sym_mu**2) # (0,4)
sym_state_probs_1341[6] = (
(sym_Lambda**3) * (sym_lambda_1**2) * sym_lambda_2
+ (sym_Lambda**3) * sym_lambda_1 * (sym_lambda_2**2)
+ 2 * (sym_Lambda**3) * sym_lambda_1 * sym_lambda_2 * sym_mu
) # (1,4)
total_1341 = np.sum(sym_state_probs_1341)
sym_state_probs_1341 = [i / total_1341 for i in sym_state_probs_1341]
sym_state_recursive_ratios_1341 = sym.zeros(
buffer_capacity + 1, system_capacity + 1
)
sym_state_recursive_ratios_1341[0, 0] = 1
sym_state_recursive_ratios_1341[0, 1] = sym.factor(
sym_state_probs_1341[1] / sym_state_probs_1341[0]
) # (0,0) -> (0,1)
sym_state_recursive_ratios_1341[0, 2] = sym.factor(
sym_state_probs_1341[2] / sym_state_probs_1341[1]
) # (0,1) -> (0,2)
sym_state_recursive_ratios_1341[0, 3] = sym.factor(
sym_state_probs_1341[3] / sym_state_probs_1341[2]
) # (0,2) -> (0,3)
sym_state_recursive_ratios_1341[0, 4] = sym.factor(
sym_state_probs_1341[5] / sym_state_probs_1341[3]
) # (0,3) -> (0,4)
sym_state_recursive_ratios_1341[1, 3] = sym.factor(
sym_state_probs_1341[4] / sym_state_probs_1341[3]
) # (0,3) -> (1,3)
sym_state_recursive_ratios_1341[1, 4] = sym.factor(
sym_state_probs_1341[6] / sym_state_probs_1341[5]
) # (0,4) -> (1,4)
sym_state_recursive_ratios_right_1341 = sym_state_recursive_ratios_1341.copy()
sym_state_recursive_ratios_right_1341[1, 4] = sym.factor(
sym_state_probs_1341[6] / sym_state_probs_1341[4]
) # (1,3) -> (1,4)
sym_state_recursive_ratios_P0_1341 = sym.zeros(
buffer_capacity + 1, system_capacity + 1
)
sym_state_recursive_ratios_P0_1341[0, 0] = 1
sym_state_recursive_ratios_P0_1341[0, 1] = sym.factor(
sym_state_probs_1341[1] / sym_state_probs_1341[0]
) # (0,0) -> (0,1)
sym_state_recursive_ratios_P0_1341[0, 2] = sym.factor(
sym_state_probs_1341[2] / sym_state_probs_1341[0]
) # (0,0) -> (0,2)
sym_state_recursive_ratios_P0_1341[0, 3] = sym.factor(
sym_state_probs_1341[3] / sym_state_probs_1341[0]
) # (0,0) -> (0,3)
sym_state_recursive_ratios_P0_1341[1, 3] = sym.factor(
sym_state_probs_1341[4] / sym_state_probs_1341[0]
) # (0,0) -> (1,3)
sym_state_recursive_ratios_P0_1341[0, 4] = sym.factor(
sym_state_probs_1341[5] / sym_state_probs_1341[0]
) # (0,0) -> (0,4)
sym_state_recursive_ratios_P0_1341[1, 4] = sym.factor(
sym_state_probs_1341[6] / sym_state_probs_1341[0]
) # (0,0) -> (1,4)
return (
sym_state_probs_1341,
sym_state_recursive_ratios_1341,
sym_state_recursive_ratios_right_1341,
sym_state_recursive_ratios_P0_1341,
)
def get_symbolic_state_probabilities_1131():
# num_of_servers = 1
threshold = 1
system_capacity = 3
buffer_capacity = 1
all_states_1131 = abg.markov.build_states(
threshold=threshold,
system_capacity=system_capacity,
buffer_capacity=buffer_capacity,
)
sym_state_probs_1131 = [0 for _ in range(len(all_states_1131))]
sym_Lambda = sym.symbols("Lambda")
sym_lambda_1 = sym.symbols("lambda_1")
sym_lambda_2 = sym.symbols("lambda_2")
sym_mu = sym.symbols("mu")
# (0,0)
sym_state_probs_1131[0] = (
(sym_mu**6)
+ 2 * (sym_lambda_2 * (sym_mu**5))
+ ((sym_lambda_2**2) * (sym_mu**4))
+ (sym_lambda_1 * sym_lambda_2 * (sym_mu**4))
)
# (0,1)
sym_state_probs_1131[1] = sym_state_probs_1131[0] * sym_Lambda / sym_mu
# (1,1)
sym_state_probs_1131[2] = (
(sym_Lambda * (sym_lambda_1**2) * sym_lambda_2 * (sym_mu**2))
+ (sym_Lambda * sym_lambda_2 * sym_lambda_1 * (sym_mu**3))
+ 2 * (sym_Lambda * sym_lambda_1 * (sym_lambda_2**2) * (sym_mu**2))
+ 2 * (sym_Lambda * (sym_lambda_2**2) * (sym_mu**3))
+ (sym_Lambda * (sym_lambda_2**3) * (sym_mu**2))
+ (sym_Lambda * sym_lambda_2 * (sym_mu**4))
)
# (0,2)
sym_state_probs_1131[3] = (
sym_Lambda * sym_lambda_1 * sym_mu**3 * (sym_lambda_2 + sym_mu)
)
# (1,2)
sym_state_probs_1131[4] = (sym_Lambda * sym_lambda_2 * sym_lambda_1 * sym_mu) * (
(sym_lambda_2**2)
+ 2 * sym_lambda_2 * sym_lambda_1
+ 3 * sym_lambda_2 * sym_mu
+ (sym_lambda_1**2)
+ 2 * sym_lambda_1 * sym_mu
+ 2 * (sym_mu**2)
)
# (0,3)
sym_state_probs_1131[5] = sym_Lambda * (sym_lambda_1**2) * (sym_mu**3)
# (1,3)
sym_state_probs_1131[6] = (sym_Lambda * sym_lambda_2 * (sym_lambda_1**2)) * (
(sym_lambda_2**2)
+ 2 * sym_lambda_2 * sym_lambda_1
+ 3 * sym_lambda_2 * sym_mu
+ (sym_lambda_1**2)
+ 2 * sym_lambda_1 * sym_mu
+ 3 * (sym_mu**2)
)
denominator = (
sym_Lambda * sym_lambda_2**3 * sym_lambda_1**2
+ sym_Lambda * sym_lambda_2**3 * sym_lambda_1 * sym_mu
+ sym_Lambda * sym_lambda_2**3 * sym_mu**2
+ 2 * sym_Lambda * sym_lambda_2**2 * sym_lambda_1**3
+ 5 * sym_Lambda * sym_lambda_2**2 * sym_lambda_1**2 * sym_mu
+ 5 * sym_Lambda * sym_lambda_2**2 * sym_lambda_1 * sym_mu**2
+ 3 * sym_Lambda * sym_lambda_2**2 * sym_mu**3
+ sym_Lambda * sym_lambda_2 * sym_lambda_1**4
+ 3 * sym_Lambda * sym_lambda_2 * sym_lambda_1**3 * sym_mu
+ 6 * sym_Lambda * sym_lambda_2 * sym_lambda_1**2 * sym_mu**2
+ 5 * sym_Lambda * sym_lambda_2 * sym_lambda_1 * sym_mu**3
+ 3 * sym_Lambda * sym_lambda_2 * sym_mu**4
+ sym_Lambda * sym_lambda_1**2 * sym_mu**3
+ sym_Lambda * sym_lambda_1 * sym_mu**4
+ sym_Lambda * sym_mu**5
+ sym_lambda_2**2 * sym_mu**4
+ sym_lambda_2 * sym_lambda_1 * sym_mu**4
+ 2 * sym_lambda_2 * sym_mu**5
+ sym_mu**6
)
sym_state_probs_1131 = [i / denominator for i in sym_state_probs_1131]
sym_state_recursive_ratios_1131 = sym.zeros(
buffer_capacity + 1, system_capacity + 1
)
sym_state_recursive_ratios_1131[0, 0] = 1
sym_state_recursive_ratios_1131[0, 1] = sym.factor(
sym_state_probs_1131[1] / sym_state_probs_1131[0]
) # (0,0) -> (0,1)
sym_state_recursive_ratios_1131[1, 1] = sym.factor(
sym_state_probs_1131[2] / sym_state_probs_1131[1]
) # (0,1) -> (1,1)
sym_state_recursive_ratios_1131[0, 2] = sym.factor(
sym_state_probs_1131[3] / sym_state_probs_1131[1]
) # (0,1) -> (0,2)
sym_state_recursive_ratios_1131[1, 2] = sym.factor(
sym_state_probs_1131[4] / sym_state_probs_1131[3]
) # (0,2) -> (1,2)
sym_state_recursive_ratios_1131[0, 3] = sym.factor(
sym_state_probs_1131[5] / sym_state_probs_1131[3]
) # (0,2) -> (0,3)
sym_state_recursive_ratios_1131[1, 3] = sym.factor(
sym_state_probs_1131[6] / sym_state_probs_1131[5]
) # (0,3) -> (1,3)
sym_state_recursive_ratios_right_1131 = sym_state_recursive_ratios_1131.copy()
sym_state_recursive_ratios_right_1131[1, 2] = sym.factor(
sym_state_probs_1131[4] / sym_state_probs_1131[2]
) # (1,1) -> (1,2)
sym_state_recursive_ratios_right_1131[1, 3] = sym.factor(
sym_state_probs_1131[6] / sym_state_probs_1131[4]
) # (1,2) -> (1,3)
sym_state_recursive_ratios_P0_1131 = sym.zeros(
buffer_capacity + 1, system_capacity + 1
)
sym_state_recursive_ratios_P0_1131[0, 0] = 1
sym_state_recursive_ratios_P0_1131[0, 1] = sym.factor(
sym_state_probs_1131[1] / sym_state_probs_1131[0]
) # (0,0) -> (0,1)
sym_state_recursive_ratios_P0_1131[1, 1] = sym.factor(
sym_state_probs_1131[2] / sym_state_probs_1131[0]
) # (0,0) -> (1,1)
sym_state_recursive_ratios_P0_1131[0, 2] = sym.factor(
sym_state_probs_1131[3] / sym_state_probs_1131[0]
) # (0,0) -> (0,2)
sym_state_recursive_ratios_P0_1131[1, 2] = sym.factor(
sym_state_probs_1131[4] / sym_state_probs_1131[0]
) # (0,0) -> (1,2)
sym_state_recursive_ratios_P0_1131[0, 3] = sym.factor(
sym_state_probs_1131[5] / sym_state_probs_1131[0]
) # (0,0) -> (0,3)
sym_state_recursive_ratios_P0_1131[1, 3] = sym.factor(
sym_state_probs_1131[6] / sym_state_probs_1131[0]
) # (0,0) -> (1,3)
return (
sym_state_probs_1131,
sym_state_recursive_ratios_1131,
sym_state_recursive_ratios_right_1131,
sym_state_recursive_ratios_P0_1131,
)
def get_symbolic_state_probabilities_1132():
num_of_servers = 1
threshold = 1
system_capacity = 3
buffer_capacity = 2
Q_sym_1132 = abg.markov.get_symbolic_transition_matrix(
num_of_servers, threshold, system_capacity, buffer_capacity
)
p00, p01, p11, p21, p02, p12, p22, p03, p13, p23 = sym.symbols(
"p00, p01, p11, p21, p02, p12, p22, p03, p13, p23"
)
pi_1132 = sym.Matrix([p00, p01, p11, p21, p02, p12, p22, p03, p13, p23])
dimension_1132 = Q_sym_1132.shape[0]
M_sym_1132 = sym.Matrix(
[Q_sym_1132.transpose()[:-1, :], sym.ones(1, dimension_1132)]
)
sym_diff_equations_1132 = M_sym_1132 @ pi_1132
b_sym_1132 = sym.Matrix([sym.zeros(dimension_1132 - 1, 1), [1]])
eq0_1132 = sym.Eq(sym_diff_equations_1132[0], b_sym_1132[0])
eq1_1132 = sym.Eq(sym_diff_equations_1132[1], b_sym_1132[1])
eq2_1132 = sym.Eq(sym_diff_equations_1132[2], b_sym_1132[2])
eq3_1132 = sym.Eq(sym_diff_equations_1132[3], b_sym_1132[3])
eq4_1132 = sym.Eq(sym_diff_equations_1132[4], b_sym_1132[4])
eq5_1132 = sym.Eq(sym_diff_equations_1132[5], b_sym_1132[5])
eq6_1132 = sym.Eq(sym_diff_equations_1132[6], b_sym_1132[6])
eq7_1132 = sym.Eq(sym_diff_equations_1132[7], b_sym_1132[7])
eq8_1132 = sym.Eq(sym_diff_equations_1132[8], b_sym_1132[8])
eq9_1132 = sym.Eq(sym_diff_equations_1132[9], b_sym_1132[9])
sym_state_probs_1132 = sym.solve(
[
eq0_1132,
eq1_1132,
eq2_1132,
eq3_1132,
eq4_1132,
eq5_1132,
eq6_1132,
eq7_1132,
eq8_1132,
eq9_1132,
],
(p00, p01, p11, p21, p02, p12, p22, p03, p13, p23),
)
sym_state_recursive_ratios_1132 = sym.zeros(
buffer_capacity + 1, system_capacity + 1
)
sym_state_recursive_ratios_1132[0, 0] = 1
sym_state_recursive_ratios_1132[0, 1] = sym.factor(
sym_state_probs_1132[p01] / sym_state_probs_1132[p00]
) # (0,0) -> (0,1)
sym_state_recursive_ratios_1132[1, 1] = sym.factor(
sym_state_probs_1132[p11] / sym_state_probs_1132[p01]
) # (0,1) -> (1,1)
sym_state_recursive_ratios_1132[2, 1] = sym.factor(
sym_state_probs_1132[p21] / sym_state_probs_1132[p11]
) # (1,1) -> (2,1)
sym_state_recursive_ratios_1132[0, 2] = sym.factor(
sym_state_probs_1132[p02] / sym_state_probs_1132[p01]
) # (0,1) -> (0,2)
sym_state_recursive_ratios_1132[1, 2] = sym.factor(
sym_state_probs_1132[p12] / sym_state_probs_1132[p02]
) # (0,2) -> (1,2)
sym_state_recursive_ratios_1132[2, 2] = sym.factor(
sym_state_probs_1132[p22] / sym_state_probs_1132[p12]
) # (1,2) -> (2,2)
sym_state_recursive_ratios_1132[0, 3] = sym.factor(
sym_state_probs_1132[p03] / sym_state_probs_1132[p02]
) # (0,2) -> (0,3)
sym_state_recursive_ratios_1132[1, 3] = sym.factor(
sym_state_probs_1132[p13] / sym_state_probs_1132[p03]
) # (0,3) -> (1,3)
sym_state_recursive_ratios_1132[2, 3] = sym.factor(
sym_state_probs_1132[p23] / sym_state_probs_1132[p13]
) # (1,3) -> (2,3)
sym_state_recursive_ratios_right_1132 = sym_state_recursive_ratios_1132.copy()
sym_state_recursive_ratios_right_1132[1, 2] = sym.factor(
sym_state_probs_1132[p12] / sym_state_probs_1132[p11]
) # (1,1) -> (1,2)
sym_state_recursive_ratios_right_1132[1, 3] = sym.factor(
sym_state_probs_1132[p13] / sym_state_probs_1132[p12]
) # (1,2) -> (1,3)
sym_state_recursive_ratios_right_1132[2, 2] = sym.factor(
sym_state_probs_1132[p22] / sym_state_probs_1132[p21]
) # (2,1) -> (2,2)
sym_state_recursive_ratios_right_1132[2, 3] = sym.factor(
sym_state_probs_1132[p23] / sym_state_probs_1132[p22]
) # (2,2) -> (2,3)
sym_state_recursive_ratios_P0_1132 = sym.zeros(
buffer_capacity + 1, system_capacity + 1
)
sym_state_recursive_ratios_P0_1132[0, 0] = 1
sym_state_recursive_ratios_P0_1132[0, 1] = sym.factor(
sym_state_probs_1132[p01] / sym_state_probs_1132[p00]
) # (0,0) -> (0,1)
sym_state_recursive_ratios_P0_1132[1, 1] = sym.factor(
sym_state_probs_1132[p11] / sym_state_probs_1132[p00]
) # (0,0) -> (1,1)
sym_state_recursive_ratios_P0_1132[2, 1] = sym.factor(
sym_state_probs_1132[p21] / sym_state_probs_1132[p00]
) # (0,0) -> (2,1)
sym_state_recursive_ratios_P0_1132[0, 2] = sym.factor(
sym_state_probs_1132[p02] / sym_state_probs_1132[p00]
) # (0,0) -> (0,2)
sym_state_recursive_ratios_P0_1132[1, 2] = sym.factor(
sym_state_probs_1132[p12] / sym_state_probs_1132[p00]
) # (0,0) -> (1,2)
sym_state_recursive_ratios_P0_1132[2, 2] = sym.factor(
sym_state_probs_1132[p22] / sym_state_probs_1132[p00]
) # (0,0) -> (2,2)
sym_state_recursive_ratios_P0_1132[0, 3] = sym.factor(
sym_state_probs_1132[p03] / sym_state_probs_1132[p00]
) # (0,0) -> (0,3)
sym_state_recursive_ratios_P0_1132[1, 3] = sym.factor(
sym_state_probs_1132[p13] / sym_state_probs_1132[p00]
) # (0,0) -> (1,3)
sym_state_recursive_ratios_P0_1132[2, 3] = sym.factor(
sym_state_probs_1132[p23] / sym_state_probs_1132[p00]
) # (0,0) -> (2,3)
return (
sym_state_probs_1132,
sym_state_recursive_ratios_1132,
sym_state_recursive_ratios_right_1132,
sym_state_recursive_ratios_P0_1132,
)
def get_symbolic_state_probabilities_1141():
num_of_servers = 1
threshold = 1
system_capacity = 4
buffer_capacity = 1
Q_sym_1141 = abg.markov.get_symbolic_transition_matrix(
num_of_servers, threshold, system_capacity, buffer_capacity
)
p00, p01, p11, p02, p12, p03, p13, p04, p14 = sym.symbols(
"p00, p01, p11, p02, p12, p03, p13, p04, p14"
)
pi_1141 = sym.Matrix([p00, p01, p11, p02, p12, p03, p13, p04, p14])
dimension_1141 = Q_sym_1141.shape[0]
M_sym_1141 = sym.Matrix(
[Q_sym_1141.transpose()[:-1, :], sym.ones(1, dimension_1141)]
)
sym_diff_equations_1141 = M_sym_1141 @ pi_1141
b_sym_1141 = sym.Matrix([sym.zeros(dimension_1141 - 1, 1), [1]])
eq0_1141 = sym.Eq(sym_diff_equations_1141[0], b_sym_1141[0])
eq1_1141 = sym.Eq(sym_diff_equations_1141[1], b_sym_1141[1])
eq2_1141 = sym.Eq(sym_diff_equations_1141[2], b_sym_1141[2])
eq3_1141 = sym.Eq(sym_diff_equations_1141[3], b_sym_1141[3])
eq4_1141 = sym.Eq(sym_diff_equations_1141[4], b_sym_1141[4])
eq5_1141 = sym.Eq(sym_diff_equations_1141[5], b_sym_1141[5])
eq6_1141 = sym.Eq(sym_diff_equations_1141[6], b_sym_1141[6])
eq7_1141 = sym.Eq(sym_diff_equations_1141[7], b_sym_1141[7])
eq8_1141 = sym.Eq(sym_diff_equations_1141[8], b_sym_1141[8])
sym_state_probs_1141 = sym.solve(
[
eq0_1141,
eq1_1141,
eq2_1141,
eq3_1141,
eq4_1141,
eq5_1141,
eq6_1141,
eq7_1141,
eq8_1141,
],
(p00, p01, p11, p02, p12, p03, p13, p04, p14),
)
sym_state_recursive_ratios_1141 = sym.zeros(
buffer_capacity + 1, system_capacity + 1
)
sym_state_recursive_ratios_1141[0, 0] = 1
sym_state_recursive_ratios_1141[0, 1] = sym.factor(
sym_state_probs_1141[p01] / sym_state_probs_1141[p00]
) # (0,0) -> (0,1)
sym_state_recursive_ratios_1141[1, 1] = sym.factor(
sym_state_probs_1141[p11] / sym_state_probs_1141[p01]
) # (0,1) -> (1,1)
sym_state_recursive_ratios_1141[0, 2] = sym.factor(
sym_state_probs_1141[p02] / sym_state_probs_1141[p01]
) # (0,1) -> (0,2)
sym_state_recursive_ratios_1141[1, 2] = sym.factor(
sym_state_probs_1141[p12] / sym_state_probs_1141[p02]
) #
| 0 |
9dd59fee46bd4bec87cc8c40099110b483ad0496
|
Python
|
(0,2) -> (1,2)
sym_state_recursive_ratios_1141[0, 3] = sym.factor(
sym_state_probs_1141[p03] / sym_state_probs_1141[p02]
) # (0,2) -> (0,3)
sym_state_recursive_ratios_1141[1, 3] = sym.factor(
sym_state_probs_1141[p13] / sym_state_probs_1141[p03]
) # (0,3) -> (1,3)
sym_state_recursive_ratios_1141[0, 4] = sym.factor(
sym_state_probs_1141[p04] / sym_state_probs_1141[p03]
) # (0,3) -> (0,4)
sym_state_recursive_ratios_1141[1, 4] = sym.factor(
sym_state_probs_1141[p14] / sym_state_probs_1141[p04]
) # (0,4) -> (1,4)
sym_state_recursive_ratios_right_1141 = sym_state_recursive_ratios_1141.copy()
sym_state_recursive_ratios_right_1141[1, 2] = sym.factor(
sym_state_probs_1141[p12] / sym_state_probs_1141[p11]
) # (1,1) -> (1,2)
sym_state_recursive_ratios_right_1141[1, 3] = sym.factor(
sym_state_probs_1141[p13] / sym_state_probs_1141[p12]
) # (1,2) -> (1,3)
sym_state_recursive_ratios_right_1141[1, 4] = sym.factor(
sym_state_probs_1141[p14] / sym_state_probs_1141[p13]
) # (1,3) -> (1,4)
sym_state_recursive_ratios_P0_1141 = sym.zeros(
buffer_capacity + 1, system_capacity + 1
)
sym_state_recursive_ratios_P0_1141[0, 0] = 1
sym_state_recursive_ratios_P0_1141[0, 1] = sym.factor(
sym_state_probs_1141[p01] / sym_state_probs_1141[p00]
) # (0,0) -> (0,1)
sym_state_recursive_ratios_P0_1141[1, 1] = sym.factor(
sym_state_probs_1141[p11] / sym_state_probs_1141[p00]
) # (0,0) -> (1,1)
sym_state_recursive_ratios_P0_1141[0, 2] = sym.factor(
sym_state_probs_1141[p02] / sym_state_probs_1141[p00]
) # (0,0) -> (0,2)
sym_state_recursive_ratios_P0_1141[1, 2] = sym.factor(
sym_state_probs_1141[p12] / sym_state_probs_1141[p00]
) # (0,0) -> (1,2)
sym_state_recursive_ratios_P0_1141[0, 3] = sym.factor(
sym_state_probs_1141[p03] / sym_state_probs_1141[p00]
) # (0,0) -> (0,3)
sym_state_recursive_ratios_P0_1141[1, 3] = sym.factor(
sym_state_probs_1141[p13] / sym_state_probs_1141[p00]
) # (0,0) -> (1,3)
sym_state_recursive_ratios_P0_1141[0, 4] = sym.factor(
sym_state_probs_1141[p04] / sym_state_probs_1141[p00]
) # (0,0) -> (0,4)
sym_state_recursive_ratios_P0_1141[1, 4] = sym.factor(
sym_state_probs_1141[p14] / sym_state_probs_1141[p00]
) # (0,0) -> (1,4)
return (
sym_state_probs_1141,
sym_state_recursive_ratios_1141,
sym_state_recursive_ratios_right_1141,
sym_state_recursive_ratios_P0_1141,
)
def get_symbolic_state_probabilities_1142():
num_of_servers = 1
threshold = 1
system_capacity = 4
buffer_capacity = 2
Q_sym_1142 = abg.markov.get_symbolic_transition_matrix(
num_of_servers=num_of_servers,
threshold=threshold,
system_capacity=system_capacity,
buffer_capacity=buffer_capacity,
)
p00, p01, p11, p21, p02, p12, p22, p03, p13, p23, p04, p14, p24 = sym.symbols(
"p00, p01, p11, p21, p02, p12, p22, p03, p13, p23, p04, p14, p24"
)
pi_1142 = sym.Matrix(
[p00, p01, p11, p21, p02, p12, p22, p03, p13, p23, p04, p14, p24]
)
dimension_1142 = Q_sym_1142.shape[0]
M_sym_1142 = sym.Matrix(
[Q_sym_1142.transpose()[:-1, :], sym.ones(1, dimension_1142)]
)
sym_diff_equations_1142 = M_sym_1142 @ pi_1142
b_sym_1142 = sym.Matrix([sym.zeros(dimension_1142 - 1, 1), [1]])
eq0_1142 = sym.Eq(sym_diff_equations_1142[0], b_sym_1142[0])
eq1_1142 = sym.Eq(sym_diff_equations_1142[1], b_sym_1142[1])
eq2_1142 = sym.Eq(sym_diff_equations_1142[2], b_sym_1142[2])
eq3_1142 = sym.Eq(sym_diff_equations_1142[3], b_sym_1142[3])
eq4_1142 = sym.Eq(sym_diff_equations_1142[4], b_sym_1142[4])
eq5_1142 = sym.Eq(sym_diff_equations_1142[5], b_sym_1142[5])
eq6_1142 = sym.Eq(sym_diff_equations_1142[6], b_sym_1142[6])
eq7_1142 = sym.Eq(sym_diff_equations_1142[7], b_sym_1142[7])
eq8_1142 = sym.Eq(sym_diff_equations_1142[8], b_sym_1142[8])
eq9_1142 = sym.Eq(sym_diff_equations_1142[9], b_sym_1142[9])
eq10_1142 = sym.Eq(sym_diff_equations_1142[10], b_sym_1142[10])
eq11_1142 = sym.Eq(sym_diff_equations_1142[11], b_sym_1142[11])
eq12_1142 = sym.Eq(sym_diff_equations_1142[12], b_sym_1142[12])
sym_state_probs_1142 = sym.solve(
[
eq0_1142,
eq1_1142,
eq2_1142,
eq3_1142,
eq4_1142,
eq5_1142,
eq6_1142,
eq7_1142,
eq8_1142,
eq9_1142,
eq10_1142,
eq11_1142,
eq12_1142,
],
(p00, p01, p11, p21, p02, p12, p22, p03, p13, p23, p04, p14, p24),
)
sym_state_recursive_ratios_1142 = sym.zeros(
buffer_capacity + 1, system_capacity + 1
)
sym_state_recursive_ratios_1142[0, 0] = 1
sym_state_recursive_ratios_1142[0, 1] = sym.factor(
sym_state_probs_1142[p01] / sym_state_probs_1142[p00]
) # (0,0) -> (0,1)
sym_state_recursive_ratios_1142[1, 1] = sym.factor(
sym_state_probs_1142[p11] / sym_state_probs_1142[p01]
) # (0,1) -> (1,1)
sym_state_recursive_ratios_1142[2, 1] = sym.factor(
sym_state_probs_1142[p21] / sym_state_probs_1142[p11]
) # (1,1) -> (2,1)
sym_state_recursive_ratios_1142[0, 2] = sym.factor(
sym_state_probs_1142[p02] / sym_state_probs_1142[p01]
) # (0,1) -> (0,2)
sym_state_recursive_ratios_1142[1, 2] = sym.factor(
sym_state_probs_1142[p12] / sym_state_probs_1142[p02]
) # (0,2) -> (1,2)
sym_state_recursive_ratios_1142[2, 2] = sym.factor(
sym_state_probs_1142[p22] / sym_state_probs_1142[p12]
) # (1,2) -> (2,2)
sym_state_recursive_ratios_1142[0, 3] = sym.factor(
sym_state_probs_1142[p03] / sym_state_probs_1142[p02]
) # (0,2) -> (0,3)
sym_state_recursive_ratios_1142[1, 3] = sym.factor(
sym_state_probs_1142[p13] / sym_state_probs_1142[p03]
) # (0,3) -> (1,3)
sym_state_recursive_ratios_1142[2, 3] = sym.factor(
sym_state_probs_1142[p23] / sym_state_probs_1142[p13]
) # (1,3) -> (2,3)
sym_state_recursive_ratios_1142[0, 4] = sym.factor(
sym_state_probs_1142[p04] / sym_state_probs_1142[p03]
) # (0,3) -> (0,4)
sym_state_recursive_ratios_1142[1, 4] = sym.factor(
sym_state_probs_1142[p14] / sym_state_probs_1142[p04]
) # (0,4) -> (1,4)
sym_state_recursive_ratios_1142[2, 4] = sym.factor(
sym_state_probs_1142[p24] / sym_state_probs_1142[p14]
) # (1,4) -> (2,4)
sym_state_recursive_ratios_right_1142 = sym_state_recursive_ratios_1142.copy()
sym_state_recursive_ratios_right_1142[1, 2] = sym.factor(
sym_state_probs_1142[p12] / sym_state_probs_1142[p11]
) # (1,1) -> (1,2)
sym_state_recursive_ratios_right_1142[1, 3] = sym.factor(
sym_state_probs_1142[p13] / sym_state_probs_1142[p12]
) # (1,2) -> (1,3)
sym_state_recursive_ratios_right_1142[1, 4] = sym.factor(
sym_state_probs_1142[p14] / sym_state_probs_1142[p13]
) # (1,3) -> (1,4)
sym_state_recursive_ratios_right_1142[2, 2] = sym.factor(
sym_state_probs_1142[p22] / sym_state_probs_1142[p21]
) # (2,1) -> (2,2)
sym_state_recursive_ratios_right_1142[2, 3] = sym.factor(
sym_state_probs_1142[p23] / sym_state_probs_1142[p22]
) # (2,2) -> (2,3)
sym_state_recursive_ratios_right_1142[2, 4] = sym.factor(
sym_state_probs_1142[p24] / sym_state_probs_1142[p23]
) # (2,3) -> (2,4)
sym_state_recursive_ratios_P0_1142 = sym.zeros(
buffer_capacity + 1, system_capacity + 1
)
sym_state_recursive_ratios_P0_1142[0, 0] = 1
sym_state_recursive_ratios_P0_1142[0, 1] = sym.factor(
sym_state_probs_1142[p01] / sym_state_probs_1142[p00]
) # (0,0) -> (0,1)
sym_state_recursive_ratios_P0_1142[1, 1] = sym.factor(
sym_state_probs_1142[p11] / sym_state_probs_1142[p00]
) # (0,0) -> (1,1)
sym_state_recursive_ratios_P0_1142[2, 1] = sym.factor(
sym_state_probs_1142[p21] / sym_state_probs_1142[p00]
) # (0,0) -> (2,1)
sym_state_recursive_ratios_P0_1142[0, 2] = sym.factor(
sym_state_probs_1142[p02] / sym_state_probs_1142[p00]
) # (0,0) -> (0,2)
sym_state_recursive_ratios_P0_1142[1, 2] = sym.factor(
sym_state_probs_1142[p12] / sym_state_probs_1142[p00]
) # (0,0) -> (1,2)
sym_state_recursive_ratios_P0_1142[2, 2] = sym.factor(
sym_state_probs_1142[p22] / sym_state_probs_1142[p00]
) # (0,0) -> (2,2)
sym_state_recursive_ratios_P0_1142[0, 3] = sym.factor(
sym_state_probs_1142[p03] / sym_state_probs_1142[p00]
) # (0,0) -> (0,3)
sym_state_recursive_ratios_P0_1142[1, 3] = sym.factor(
sym_state_probs_1142[p13] / sym_state_probs_1142[p00]
) # (0,0) -> (1,3)
sym_state_recursive_ratios_P0_1142[2, 3] = sym.factor(
sym_state_probs_1142[p23] / sym_state_probs_1142[p00]
) # (0,0) -> (2,3)
sym_state_recursive_ratios_P0_1142[0, 4] = sym.factor(
sym_state_probs_1142[p04] / sym_state_probs_1142[p00]
) # (0,0) -> (0,4)
sym_state_recursive_ratios_P0_1142[1, 4] = sym.factor(
sym_state_probs_1142[p14] / sym_state_probs_1142[p00]
) # (0,0) -> (1,4)
sym_state_recursive_ratios_P0_1142[2, 4] = sym.factor(
sym_state_probs_1142[p24] / sym_state_probs_1142[p00]
) # (0,0) -> (2,4)
return (
sym_state_probs_1142,
sym_state_recursive_ratios_1142,
sym_state_recursive_ratios_right_1142,
sym_state_recursive_ratios_P0_1142,
)
def get_symbolic_state_probabilities_1151():
num_of_servers = 1
threshold = 1
system_capacity = 5
buffer_capacity = 1
Q_sym_1151 = abg.markov.get_symbolic_transition_matrix(
num_of_servers, threshold, system_capacity, buffer_capacity
)
p00, p01, p11, p02, p12, p03, p13, p04, p14, p05, p15 = sym.symbols(
"p00, p01, p11, p02, p12, p03, p13, p04, p14, p05, p15"
)
pi_1151 = sym.Matrix([p00, p01, p11, p02, p12, p03, p13, p04, p14, p05, p15])
dimension_1151 = Q_sym_1151.shape[0]
M_sym_1151 = sym.Matrix(
[Q_sym_1151.transpose()[:-1, :], sym.ones(1, dimension_1151)]
)
sym_diff_equations_1151 = M_sym_1151 @ pi_1151
b_sym_1151 = sym.Matrix([sym.zeros(dimension_1151 - 1, 1), [1]])
eq0_1151 = sym.Eq(sym_diff_equations_1151[0], b_sym_1151[0])
eq1_1151 = sym.Eq(sym_diff_equations_1151[1], b_sym_1151[1])
eq2_1151 = sym.Eq(sym_diff_equations_1151[2], b_sym_1151[2])
eq3_1151 = sym.Eq(sym_diff_equations_1151[3], b_sym_1151[3])
eq4_1151 = sym.Eq(sym_diff_equations_1151[4], b_sym_1151[4])
eq5_1151 = sym.Eq(sym_diff_equations_1151[5], b_sym_1151[5])
eq6_1151 = sym.Eq(sym_diff_equations_1151[6], b_sym_1151[6])
eq7_1151 = sym.Eq(sym_diff_equations_1151[7], b_sym_1151[7])
eq8_1151 = sym.Eq(sym_diff_equations_1151[8], b_sym_1151[8])
eq9_1151 = sym.Eq(sym_diff_equations_1151[9], b_sym_1151[9])
eq10_1151 = sym.Eq(sym_diff_equations_1151[10], b_sym_1151[10])
sym_state_probs_1151 = sym.solve(
[
eq0_1151,
eq1_1151,
eq2_1151,
eq3_1151,
eq4_1151,
eq5_1151,
eq6_1151,
eq7_1151,
eq8_1151,
eq9_1151,
eq10_1151,
],
(p00, p01, p11, p02, p12, p03, p13, p04, p14, p05, p15),
)
sym_state_recursive_ratios_1151 = sym.zeros(
buffer_capacity + 1, system_capacity + 1
)
sym_state_recursive_ratios_1151[0, 0] = 1
sym_state_recursive_ratios_1151[0, 1] = sym.factor(
sym_state_probs_1151[p01] / sym_state_probs_1151[p00]
) # (0,0) -> (0,1)
sym_state_recursive_ratios_1151[1, 1] = sym.factor(
sym_state_probs_1151[p11] / sym_state_probs_1151[p01]
) # (0,1) -> (1,1)
sym_state_recursive_ratios_1151[0, 2] = sym.factor(
sym_state_probs_1151[p02] / sym_state_probs_1151[p01]
) # (0,1) -> (0,2)
sym_state_recursive_ratios_1151[1, 2] = sym.factor(
sym_state_probs_1151[p12] / sym_state_probs_1151[p02]
) # (0,2) -> (1,2)
sym_state_recursive_ratios_1151[0, 3] = sym.factor(
sym_state_probs_1151[p03] / sym_state_probs_1151[p02]
) # (0,2) -> (0,3)
sym_state_recursive_ratios_1151[1, 3] = sym.factor(
sym_state_probs_1151[p13] / sym_state_probs_1151[p03]
) # (0,3) -> (1,3)
sym_state_recursive_ratios_1151[0, 4] = sym.factor(
sym_state_probs_1151[p04] / sym_state_probs_1151[p03]
) # (0,3) -> (0,4)
sym_state_recursive_ratios_1151[1, 4] = sym.factor(
sym_state_probs_1151[p14] / sym_state_probs_1151[p04]
) # (0,4) -> (1,4)
sym_state_recursive_ratios_1151[0, 5] = sym.factor(
sym_state_probs_1151[p05] / sym_state_probs_1151[p04]
) # (0,4) -> (0,5)
sym_state_recursive_ratios_1151[1, 5] = sym.factor(
sym_state_probs_1151[p15] / sym_state_probs_1151[p05]
) # (0,5) -> (1,5)
sym_state_recursive_ratios_right_1151 = sym_state_recursive_ratios_1151.copy()
sym_state_recursive_ratios_right_1151[1, 2] = sym.factor(
sym_state_probs_1151[p12] / sym_state_probs_1151[p11]
) # (1,1) -> (1,2)
sym_state_recursive_ratios_right_1151[1, 3] = sym.factor(
sym_state_probs_1151[p13] / sym_state_probs_1151[p12]
) # (1,2) -> (1,3)
sym_state_recursive_ratios_right_1151[1, 4] = sym.factor(
sym_state_probs_1151[p14] / sym_state_probs_1151[p13]
) # (1,3) -> (1,4)
sym_state_recursive_ratios_right_1151[1, 5] = sym.factor(
sym_state_probs_1151[p15] / sym_state_probs_1151[p14]
) # (1,4) -> (1,5)
sym_state_recursive_ratios_P0_1151 = sym.zeros(
buffer_capacity + 1, system_capacity + 1
)
sym_state_recursive_ratios_P0_1151[0, 0] = 1
sym_state_recursive_ratios_P0_1151[0, 1] = sym.factor(
sym_state_probs_1151[p01] / sym_state_probs_1151[p00]
) # (0,0) -> (0,1)
sym_state_recursive_ratios_P0_1151[1, 1] = sym.factor(
sym_state_probs_1151[p11] / sym_state_probs_1151[p00]
) # (0,0) -> (1,1)
sym_state_recursive_ratios_P0_1151[0, 2] = sym.factor(
sym_state_probs_1151[p02] / sym_state_probs_1151[p00]
) # (0,0) -> (0,2)
sym_state_recursive_ratios_P0_1151[1, 2] = sym.factor(
sym_state_probs_1151[p12] / sym_state_probs_1151[p00]
) # (0,0) -> (1,2)
sym_state_recursive_ratios_P0_1151[0, 3] = sym.factor(
sym_state_probs_1151[p03] / sym_state_probs_1151[p00]
) # (0,0) -> (0,3)
sym_state_recursive_ratios_P0_1151[1, 3] = sym.factor(
sym_state_probs_1151[p13] / sym_state_probs_1151[p00]
) # (0,0) -> (1,3)
sym_state_recursive_ratios_P0_1151[0, 4] = sym.factor(
sym_state_probs_1151[p04] / sym_state_probs_1151[p00]
) # (0,0) -> (0,4)
sym_state_recursive_ratios_P0_1151[1, 4] = sym.factor(
sym_state_probs_1151[p14] / sym_state_probs_1151[p00]
) # (0,0) -> (1,4)
sym_state_recursive_ratios_P0_1151[0, 5] = sym.factor(
sym_state_probs_1151[p05] / sym_state_probs_1151[p00]
) # (0,0) -> (0,5)
sym_state_recursive_ratios_P0_1151[1, 5] = sym.factor(
sym_state_probs_1151[p15] / sym_state_probs_1151[p00]
) # (0,0) -> (1,5)
return (
sym_state_probs_1151,
sym_state_recursive_ratios_1151,
sym_state_recursive_ratios_right_1151,
sym_state_recursive_ratios_P0_1151,
)
def get_symbolic_state_probabilities_1161():
num_of_servers = 1
threshold = 1
system_capacity = 6
buffer_capacity = 1
Q_sym_1161 = abg.markov.get_symbolic_transition_matrix(
num_of_servers, threshold, system_capacity, buffer_capacity
)
p00, p01, p11, p02, p12, p03, p13, p04, p14, p05, p15, p06, p16 = sym.symbols(
"p00, p01, p11, p02, p12, p03, p13, p04, p14, p05, p15, p06, p16"
)
pi_1161 = sym.Matrix(
[p00, p01, p11, p02, p12, p03, p13, p04, p14, p05, p15, p06, p16]
)
dimension_1161 = Q_sym_1161.shape[0]
M_sym_1161 = sym.Matrix(
[Q_sym_1161.transpose()[:-1, :], sym.ones(1, dimension_1161)]
)
sym_diff_equations_1161 = M_sym_1161 @ pi_1161
b_sym_1161 = sym.Matrix([sym.zeros(dimension_1161 - 1, 1), [1]])
eq0_1161 = sym.Eq(sym_diff_equations_1161[0], b_sym_1161[0])
eq1_1161 = sym.Eq(sym_diff_equations_1161[1], b_sym_1161[1])
eq2_1161 = sym.Eq(sym_diff_equations_1161[2], b_sym_1161[2])
eq3_1161 = sym.Eq(sym_diff_equations_1161[3], b_sym_1161[3])
eq4_1161 = sym.Eq(sym_diff_equations_1161[4], b_sym_1161[4])
eq5_1161 = sym.Eq(sym_diff_equations_1161[5], b_sym_1161[5])
eq6_1161 = sym.Eq(sym_diff_equations_1161[6], b_sym_1161[6])
eq7_1161 = sym.Eq(sym_diff_equations_1161[7], b_sym_1161[7])
eq8_1161 = sym.Eq(sym_diff_equations_1161[8], b_sym_1161[8])
eq9_1161 = sym.Eq(sym_diff_equations_1161[9], b_sym_1161[9])
eq10_1161 = sym.Eq(sym_diff_equations_1161[10], b_sym_1161[10])
eq11_1161 = sym.Eq(sym_diff_equations_1161[11], b_sym_1161[11])
eq12_1161 = sym.Eq(sym_diff_equations_1161[12], b_sym_1161[12])
sym_state_probs_1161 = sym.solve(
[
eq0_1161,
eq1_1161,
eq2_1161,
eq3_1161,
eq4_1161,
eq5_1161,
eq6_1161,
eq7_1161,
eq8_1161,
eq9_1161,
eq10_1161,
eq11_1161,
eq12_1161,
],
(p00, p01, p11, p02, p12, p03, p13, p04, p14, p05, p15, p06, p16),
)
sym_state_recursive_ratios_1161 = sym.zeros(
buffer_capacity + 1, system_capacity + 1
)
sym_state_recursive_ratios_1161[0, 0] = 1
sym_state_recursive_ratios_1161[0, 1] = sym.factor(
sym_state_probs_1161[p01] / sym_state_probs_1161[p00]
) # (0,0) -> (0,1)
sym_state_recursive_ratios_1161[1, 1] = sym.factor(
sym_state_probs_1161[p11] / sym_state_probs_1161[p01]
) # (0,1) -> (1,1)
sym_state_recursive_ratios_1161[0, 2] = sym.factor(
sym_state_probs_1161[p02] / sym_state_probs_1161[p01]
) # (0,1) -> (0,2)
sym_state_recursive_ratios_1161[1, 2] = sym.factor(
sym_state_probs_1161[p12] / sym_state_probs_1161[p02]
) # (0,2) -> (1,2)
sym_state_recursive_ratios_1161[0, 3] = sym.factor(
sym_state_probs_1161[p03] / sym_state_probs_1161[p02]
) # (0,2) -> (0,3)
sym_state_recursive_ratios_1161[1, 3] = sym.factor(
sym_state_probs_1161[p13] / sym_state_probs_1161[p03]
) # (0,3) -> (1,3)
sym_state_recursive_ratios_1161[0, 4] = sym.factor(
sym_state_probs_1161[p04] / sym_state_probs_1161[p03]
) # (0,3) -> (0,4)
sym_state_recursive_ratios_1161[1, 4] = sym.factor(
sym_state_probs_1161[p14] / sym_state_probs_1161[p04]
) # (0,4) -> (1,4)
sym_state_recursive_ratios_1161[0, 5] = sym.factor(
sym_state_probs_1161[p05] / sym_state_probs_1161[p04]
) # (0,4) -> (0,5)
sym_state_recursive_ratios_1161[1, 5] = sym.factor(
sym_state_probs_1161[p15] / sym_state_probs_1161[p05]
) # (0,5) -> (1,5)
sym_state_recursive_ratios_1161[0, 6] = sym.factor(
sym_state_probs_1161[p06] / sym_state_probs_1161[p05]
) # (0,5) -> (0,6)
sym_state_recursive_ratios_1161[1, 6] = sym.factor(
sym_state_probs_1161[p16] / sym_state_probs_1161[p06]
) # (0,6) -> (1,6)
sym_state_recursive_ratios_right_1161 = sym_state_recursive_ratios_1161.copy()
sym_state_recursive_ratios_right_1161[1, 2] = sym.factor(
sym_state_probs_1161[p12] / sym_state_probs_1161[p11]
) # (1,1) -> (1,2)
sym_state_recursive_ratios_right_1161[1, 3] = sym.factor(
sym_state_probs_1161[p13] / sym_state_probs_1161[p12]
) # (1,2) -> (1,3)
sym_state_recursive_ratios_right_1161[1, 4] = sym.factor(
sym_state_probs_1161[p14] / sym_state_probs_1161[p13]
) # (1,3) -> (1,4)
sym_state_recursive_ratios_right_1161[1, 5] = sym.factor(
sym_state_probs_1161[p15] / sym_state_probs_1161[p14]
) # (1,4) -> (1,5)
sym_state_recursive_ratios_right_1161[1, 6] = sym.factor(
sym_state_probs_1161[p16] / sym_state_probs_1161[p15]
) # (1,5) -> (1,6)
sym_state_recursive_ratios_P0_1161 = sym.zeros(
buffer_capacity + 1, system_capacity + 1
)
sym_state_recursive_ratios_P0_1161[0, 0] = 1
sym_state_recursive_ratios_P0_1161[0, 1] = sym.factor(
sym_state_probs_1161[p01] / sym_state_probs_1161[p00]
) # (0,0) -> (0,1)
sym_state_recursive_ratios_P0_1161[1, 1] = sym.factor(
sym_state_probs_1161[p11] / sym_state_probs_1161[p00]
) # (0,0) -> (1,1)
sym_state_recursive_ratios_P0_1161[0, 2] = sym.factor(
sym_state_probs_1161[p02] / sym_state_probs_1161[p00]
) # (0,0) -> (0,2)
sym_state_recursive_ratios_P0_1161[1, 2] = sym.factor(
sym_state_probs_1161[p12] / sym_state_probs_1161[p00]
) # (0,0) -> (1,2)
sym_state_recursive_ratios_P0_1161[0, 3] = sym.factor(
sym_state_probs_1161[p03] / sym_state_probs_1161[p00]
) # (0,0) -> (0,3)
sym_state_recursive_ratios_P0_1161[1, 3] = sym.factor(
sym_state_probs_1161[p13] / sym_state_probs_1161[p00]
) # (0,0) -> (1,3)
sym_state_recursive_ratios_P0_1161[0, 4] = sym.factor(
sym_state_probs_1161[p04] / sym_state_probs_1161[p00]
) # (0,0) -> (0,4)
sym_state_recursive_ratios_P0_1161[1, 4] = sym.factor(
sym_state_probs_1161[p14] / sym_state_probs_1161[p00]
) # (0,0) -> (1,4)
sym_state_recursive_ratios_P0_1161[0, 5] = sym.factor(
sym_state_probs_1161[p05] / sym_state_probs_1161[p00]
) # (0,0) -> (0,5)
sym_state_recursive_ratios_P0_1161[1, 5] = sym.factor(
sym_state_probs_1161[p15] / sym_state_probs_1161[p00]
) # (0,0) -> (1,5)
sym_state_recursive_ratios_P0_1161[0, 6] = sym.factor(
sym_state_probs_1161[p06] / sym_state_probs_1161[p00]
) # (0,0) -> (0,6)
sym_state_recursive_ratios_P0_1161[1, 6] = sym.factor(
sym_state_probs_1161[p16] / sym_state_probs_1161[p00]
) # (0,0) -> (1,6)
return (
sym_state_probs_1161,
sym_state_recursive_ratios_1161,
sym_state_recursive_ratios_right_1161,
sym_state_recursive_ratios_P0_1161,
)
def get_symbolic_state_probabilities_1171():
num_of_servers = 1
threshold = 1
system_capacity = 7
buffer_capacity = 1
Q_sym_1171 = abg.markov.get_symbolic_transition_matrix(
num_of_servers, threshold, system_capacity, buffer_capacity
)
(
p00,
p01,
p11,
p02,
p12,
p03,
p13,
p04,
p14,
p05,
p15,
p06,
p16,
p07,
p17,
) = sym.symbols(
"p00, p01, p11, p02, p12, p03, p13, p04, p14, p05, p15, p06, p16, p07, p17"
)
pi_1171 = sym.Matrix(
[p00, p01, p11, p02, p12, p03, p13, p04, p14, p05, p15, p06, p16, p07, p17]
)
dimension_1171 = Q_sym_1171.shape[0]
M_sym_1171 = sym.Matrix(
[Q_sym_1171.transpose()[:-1, :], sym.ones(1, dimension_1171)]
)
sym_diff_equations_1171 = M_sym_1171 @ pi_1171
b_sym_1171 = sym.Matrix([sym.zeros(dimension_1171 - 1, 1), [1]])
eq0_1171 = sym.Eq(sym_diff_equations_1171[0], b_sym_1171[0])
eq1_1171 = sym.Eq(sym_diff_equations_1171[1], b_sym_1171[1])
eq2_1171 = sym.Eq(sym_diff_equations_1171[2], b_sym_1171[2])
eq3_1171 = sym.Eq(sym_diff_equations_1171[3], b_sym_1171[3])
eq4_1171 = sym.Eq(sym_diff_equations_1171[4], b_sym_1171[4])
eq5_1171 = sym.Eq(sym_diff_equations_1171[5], b_sym_1171[5])
eq6_1171 = sym.Eq(sym_diff_equations_1171[6], b_sym_1171[6])
eq7_1171 = sym.Eq(sym_diff_equations_1171[7], b_sym_1171[7])
eq8_1171 = sym.Eq(sym_diff_equations_1171[8], b_sym_1171[8])
eq9_1171 = sym.Eq(sym_diff_equations_1171[9], b_sym_1171[9])
eq10_1171 = sym.Eq(sym_diff_equations_1171[10], b_sym_1171[10])
eq11_1171 = sym.Eq(sym_diff_equations_1171[11], b_sym_1171[11])
eq12_1171 = sym.Eq(sym_diff_equations_1171[12], b_sym_1171[12])
eq13_1171 = sym.Eq(sym_diff_equations_1171[13], b_sym_1171[13])
eq14_1171 = sym.Eq(sym_diff_equations_1171[14], b_sym_1171[14])
sym_state_probs_1171 = sym.solve(
[
eq0_1171,
eq1_1171,
eq2_1171,
eq3_1171,
eq4_1171,
eq5_1171,
eq6_1171,
eq7_1171,
eq8_1171,
eq9_1171,
eq10_1171,
eq11_1171,
eq12_1171,
eq13_1171,
eq14_1171,
],
(p00, p01, p11, p02, p12, p03, p13, p04, p14, p05, p15, p06, p16, p07, p17),
)
sym_state_recursive_ratios_1171 = sym.zeros(
buffer_capacity + 1, system_capacity + 1
)
sym_state_recursive_ratios_1171[0, 0] = 1
sym_state_recursive_ratios_1171[0, 1] = sym.factor(
sym_state_probs_1171[p01] / sym_state_probs_1171[p00]
) # (0,0) -> (0,1)
sym_state_recursive_ratios_1171[1, 1] = sym.factor(
sym_state_probs_1171[p11] / sym_state_probs_1171[p01]
) # (0,1) -> (1,1)
sym_state_recursive_ratios_1171[0, 2] = sym.factor(
sym_state_probs_1171[p02] / sym_state_probs_1171[p01]
) # (0,1) -> (0,2)
sym_state_recursive_ratios_1171[1, 2] = sym.factor(
sym_state_probs_1171[p12] / sym_state_probs_1171[p02]
) # (0,2) -> (1,2)
sym_state_recursive_ratios_1171[0, 3] = sym.factor(
sym_state_probs_1171[p03] / sym_state_probs_1171[p02]
) # (0,2) -> (0,3)
sym_state_recursive_ratios_1171[1, 3] = sym.factor(
sym_state_probs_1171[p13] / sym_state_probs_1171[p03]
) # (0,3) -> (1,3)
sym_state_recursive_ratios_1171[0, 4] = sym.factor(
sym_state_probs_1171[p04] / sym_state_probs_1171[p03]
) # (0,3) -> (0,4)
sym_state_recursive_ratios_1171[1, 4] = sym.factor(
sym_state_probs_1171[p14] / sym_state_probs_1171[p04]
) # (0,4) -> (1,4)
sym_state_recursive_ratios_1171[0, 5] = sym.factor(
sym_state_probs_1171[p05] / sym_state_probs_1171[p04]
) # (0,4) -> (0,5)
sym_state_recursive_ratios_1171[1, 5] = sym.factor(
sym_state_probs_1171[p15] / sym_state_probs_1171[p05]
) # (0,5) -> (1,5)
sym_state_recursive_ratios_1171[0, 6] = sym.factor(
sym_state_probs_1171[p06] / sym_state_probs_1171[p05]
) # (0,5) -> (0,6)
sym_state_recursive_ratios_1171[1, 6] = sym.factor(
sym_state_probs_1171[p16] / sym_state_probs_1171[p06]
) # (0,6) -> (1,6)
sym_state_recursive_ratios_1171[0, 7] = sym.factor(
sym_state_probs_1171[p07] / sym_state_probs_1171[p06]
) # (0,6) -> (0,7)
sym_state_recursive_ratios_1171[1, 7] = sym.factor(
sym_state_probs_1171[p17] / sym_state_probs_1171[p07]
) # (0,7) -> (1,7)
sym_state_recursive_ratios_right_1171 = sym_state_recursive_ratios_1171.copy()
sym_state_recursive_ratios_right_1171[1, 2] = sym.factor(
sym_state_probs_1171[p12] / sym_state_probs_1171[p11]
) # (1,1) -> (1,2)
sym_state_recursive_ratios_right_1171[1, 3] = sym.factor(
sym_state_probs_1171[p13] / sym_state_probs_1171[p12]
) # (1,2) -> (1,3)
sym_state_recursive_ratios_right_1171[1, 4] = sym.factor(
sym_state_probs_1171[p14] / sym_state_probs_1171[p13]
) # (1,3) -> (1,4)
sym_state_recursive_ratios_right_1171[1, 5] = sym.factor(
sym_state_probs_1171[p15] / sym_state_probs_1171[p14]
) # (1,4) -> (1,5)
sym_state_recursive_ratios_right_1171[1, 6] = sym.factor(
sym_state_probs_1171[p16] / sym_state_probs_1171[p15]
) # (1,5) -> (1,6)
sym_state_recursive_ratios_right_1171[1, 7] = sym.factor(
sym_state_probs_1171[p17] / sym_state_probs_1171[p16]
) # (1,6) -> (1,7)
sym_state_recursive_ratios_P0_1171 = sym.zeros(
buffer_capacity + 1, system_capacity + 1
)
sym_state_recursive_ratios_P0_1171[0, 0] = 1
sym_state_recursive_ratios_P0_1171[0, 1] = sym.factor(
sym_state_probs_1171[p01] / sym_state_probs_1171[p00]
) # (0,0) -> (0,1)
sym_state_recursive_ratios_P0_1171[1, 1] = sym.factor(
sym_state_probs_1171[p11] / sym_state_probs_1171[p00]
) # (0,0) -> (1,1)
sym_state_recursive_ratios_P0_1171[0, 2] = sym.factor(
sym_state_probs_1171[p02] / sym_state_probs_1171[p00]
) # (0,0) -> (0,2)
sym_state_recursive_ratios_P0_1171[1, 2] = sym.factor(
sym_state_probs_1171[p12] / sym_state_probs_1171[p00]
) # (0,0) -> (1,2)
sym_state_recursive_ratios_P0_1171[0, 3] = sym.factor(
sym_state_probs_1171[p03] / sym_state_probs_1171[p00]
) # (0,0) -> (0,3)
sym_state_recursive_ratios_P0_1171[1, 3] = sym.factor(
sym_state_probs_1171[p13] / sym_state_probs_1171[p00]
) # (0,0) -> (1,3)
sym_state_recursive_ratios_P0_1171[0, 4] = sym.factor(
sym_state_probs_1171[p04] / sym_state_probs_1171[p00]
) # (0,0) -> (0,4)
sym_state_recursive_ratios_P0_1171[1, 4] = sym.factor(
sym_state_probs_1171[p14] / sym_state_probs_1171[p00]
) # (0,0) -> (1,4)
sym_state_recursive_ratios_P0_1171[0, 5] = sym.factor(
sym_state_probs_1171[p05] / sym_state_probs_1171[p00]
) # (0,0) -> (0,5)
sym_state_recursive_ratios_P0_1171[1, 5] = sym.factor(
sym_state_probs_1171[p15] / sym_state_probs_1171[p00]
) # (0,0) -> (1,5)
sym_state_recursive_ratios_P0_1171[0, 6] = sym.factor(
sym_state_probs_1171[p06] / sym_state_probs_1171[p00]
) # (0,0) -> (0,6)
sym_state_recursive_ratios_P0_1171[1, 6] = sym.factor(
sym_state_probs_1171[p16] / sym_state_probs_1171
| 1 |
9dd59fee46bd4bec87cc8c40099110b483ad0496
|
Python
|
[p00]
) # (0,0) -> (1,6)
sym_state_recursive_ratios_P0_1171[0, 7] = sym.factor(
sym_state_probs_1171[p07] / sym_state_probs_1171[p00]
) # (0,0) -> (0,7)
sym_state_recursive_ratios_P0_1171[1, 7] = sym.factor(
sym_state_probs_1171[p17] / sym_state_probs_1171[p00]
) # (0,0) -> (1,7)
return (
sym_state_probs_1171,
sym_state_recursive_ratios_1171,
sym_state_recursive_ratios_right_1171,
sym_state_recursive_ratios_P0_1171,
)
def get_symbolic_state_probabilities_1181():
num_of_servers = 1
threshold = 1
system_capacity = 8
buffer_capacity = 1
Q_sym_1181 = abg.markov.get_symbolic_transition_matrix(
num_of_servers, threshold, system_capacity, buffer_capacity
)
(
p00,
p01,
p11,
p02,
p12,
p03,
p13,
p04,
p14,
p05,
p15,
p06,
p16,
p07,
p17,
p08,
p18,
) = sym.symbols(
"p00, p01, p11, p02, p12, p03, p13, p04, p14, p05, p15, p06, p16, p07, p17, p08, p18"
)
pi_1181 = sym.Matrix(
[
p00,
p01,
p11,
p02,
p12,
p03,
p13,
p04,
p14,
p05,
p15,
p06,
p16,
p07,
p17,
p08,
p18,
]
)
dimension_1181 = Q_sym_1181.shape[0]
M_sym_1181 = sym.Matrix(
[Q_sym_1181.transpose()[:-1, :], sym.ones(1, dimension_1181)]
)
sym_diff_equations_1181 = M_sym_1181 @ pi_1181
b_sym_1181 = sym.Matrix([sym.zeros(dimension_1181 - 1, 1), [1]])
eq0_1181 = sym.Eq(sym_diff_equations_1181[0], b_sym_1181[0])
eq1_1181 = sym.Eq(sym_diff_equations_1181[1], b_sym_1181[1])
eq2_1181 = sym.Eq(sym_diff_equations_1181[2], b_sym_1181[2])
eq3_1181 = sym.Eq(sym_diff_equations_1181[3], b_sym_1181[3])
eq4_1181 = sym.Eq(sym_diff_equations_1181[4], b_sym_1181[4])
eq5_1181 = sym.Eq(sym_diff_equations_1181[5], b_sym_1181[5])
eq6_1181 = sym.Eq(sym_diff_equations_1181[6], b_sym_1181[6])
eq7_1181 = sym.Eq(sym_diff_equations_1181[7], b_sym_1181[7])
eq8_1181 = sym.Eq(sym_diff_equations_1181[8], b_sym_1181[8])
eq9_1181 = sym.Eq(sym_diff_equations_1181[9], b_sym_1181[9])
eq10_1181 = sym.Eq(sym_diff_equations_1181[10], b_sym_1181[10])
eq11_1181 = sym.Eq(sym_diff_equations_1181[11], b_sym_1181[11])
eq12_1181 = sym.Eq(sym_diff_equations_1181[12], b_sym_1181[12])
eq13_1181 = sym.Eq(sym_diff_equations_1181[13], b_sym_1181[13])
eq14_1181 = sym.Eq(sym_diff_equations_1181[14], b_sym_1181[14])
eq15_1181 = sym.Eq(sym_diff_equations_1181[15], b_sym_1181[15])
eq16_1181 = sym.Eq(sym_diff_equations_1181[16], b_sym_1181[16])
sym_state_probs_1181 = sym.solve(
[
eq0_1181,
eq1_1181,
eq2_1181,
eq3_1181,
eq4_1181,
eq5_1181,
eq6_1181,
eq7_1181,
eq8_1181,
eq9_1181,
eq10_1181,
eq11_1181,
eq12_1181,
eq13_1181,
eq14_1181,
eq15_1181,
eq16_1181,
],
(
p00,
p01,
p11,
p02,
p12,
p03,
p13,
p04,
p14,
p05,
p15,
p06,
p16,
p07,
p17,
p08,
p18,
),
)
sym_state_recursive_ratios_1181 = sym.zeros(
buffer_capacity + 1, system_capacity + 1
)
sym_state_recursive_ratios_1181[0, 0] = 1
sym_state_recursive_ratios_1181[0, 1] = sym.factor(
sym_state_probs_1181[p01] / sym_state_probs_1181[p00]
) # (0,0) -> (0,1)
sym_state_recursive_ratios_1181[1, 1] = sym.factor(
sym_state_probs_1181[p11] / sym_state_probs_1181[p01]
) # (0,1) -> (1,1)
sym_state_recursive_ratios_1181[0, 2] = sym.factor(
sym_state_probs_1181[p02] / sym_state_probs_1181[p01]
) # (0,1) -> (0,2)
sym_state_recursive_ratios_1181[1, 2] = sym.factor(
sym_state_probs_1181[p12] / sym_state_probs_1181[p02]
) # (0,2) -> (1,2)
sym_state_recursive_ratios_1181[0, 3] = sym.factor(
sym_state_probs_1181[p03] / sym_state_probs_1181[p02]
) # (0,2) -> (0,3)
sym_state_recursive_ratios_1181[1, 3] = sym.factor(
sym_state_probs_1181[p13] / sym_state_probs_1181[p03]
) # (0,3) -> (1,3)
sym_state_recursive_ratios_1181[0, 4] = sym.factor(
sym_state_probs_1181[p04] / sym_state_probs_1181[p03]
) # (0,3) -> (0,4)
sym_state_recursive_ratios_1181[1, 4] = sym.factor(
sym_state_probs_1181[p14] / sym_state_probs_1181[p04]
) # (0,4) -> (1,4)
sym_state_recursive_ratios_1181[0, 5] = sym.factor(
sym_state_probs_1181[p05] / sym_state_probs_1181[p04]
) # (0,4) -> (0,5)
sym_state_recursive_ratios_1181[1, 5] = sym.factor(
sym_state_probs_1181[p15] / sym_state_probs_1181[p05]
) # (0,5) -> (1,5)
sym_state_recursive_ratios_1181[0, 6] = sym.factor(
sym_state_probs_1181[p06] / sym_state_probs_1181[p05]
) # (0,5) -> (0,6)
sym_state_recursive_ratios_1181[1, 6] = sym.factor(
sym_state_probs_1181[p16] / sym_state_probs_1181[p06]
) # (0,6) -> (1,6)
sym_state_recursive_ratios_1181[0, 7] = sym.factor(
sym_state_probs_1181[p07] / sym_state_probs_1181[p06]
) # (0,6) -> (0,7)
sym_state_recursive_ratios_1181[1, 7] = sym.factor(
sym_state_probs_1181[p17] / sym_state_probs_1181[p07]
) # (0,7) -> (1,7)
sym_state_recursive_ratios_1181[0, 8] = sym.factor(
sym_state_probs_1181[p08] / sym_state_probs_1181[p07]
) # (0,7) -> (0,8)
sym_state_recursive_ratios_1181[1, 8] = sym.factor(
sym_state_probs_1181[p18] / sym_state_probs_1181[p08]
) # (0,8) -> (1,8)
sym_state_recursive_ratios_right_1181 = sym_state_recursive_ratios_1181.copy()
sym_state_recursive_ratios_right_1181[1, 2] = sym.factor(
sym_state_probs_1181[p12] / sym_state_probs_1181[p11]
) # (1,1) -> (1,2)
sym_state_recursive_ratios_right_1181[1, 3] = sym.factor(
sym_state_probs_1181[p13] / sym_state_probs_1181[p12]
) # (1,2) -> (1,3)
sym_state_recursive_ratios_right_1181[1, 4] = sym.factor(
sym_state_probs_1181[p14] / sym_state_probs_1181[p13]
) # (1,3) -> (1,4)
sym_state_recursive_ratios_right_1181[1, 5] = sym.factor(
sym_state_probs_1181[p15] / sym_state_probs_1181[p14]
) # (1,4) -> (1,5)
sym_state_recursive_ratios_right_1181[1, 6] = sym.factor(
sym_state_probs_1181[p16] / sym_state_probs_1181[p15]
) # (1,5) -> (1,6)
sym_state_recursive_ratios_right_1181[1, 7] = sym.factor(
sym_state_probs_1181[p17] / sym_state_probs_1181[p16]
) # (1,6) -> (1,7)
sym_state_recursive_ratios_right_1181[1, 8] = sym.factor(
sym_state_probs_1181[p18] / sym_state_probs_1181[p17]
) # (1,7) -> (1,8)
sym_state_recursive_ratios_P0_1181 = sym.zeros(
buffer_capacity + 1, system_capacity + 1
)
sym_state_recursive_ratios_P0_1181[0, 0] = 1
sym_state_recursive_ratios_P0_1181[0, 1] = sym.factor(
sym_state_probs_1181[p01] / sym_state_probs_1181[p00]
) # (0,0) -> (0,1)
sym_state_recursive_ratios_P0_1181[1, 1] = sym.factor(
sym_state_probs_1181[p11] / sym_state_probs_1181[p00]
) # (0,0) -> (1,1)
sym_state_recursive_ratios_P0_1181[0, 2] = sym.factor(
sym_state_probs_1181[p02] / sym_state_probs_1181[p00]
) # (0,0) -> (0,2)
sym_state_recursive_ratios_P0_1181[1, 2] = sym.factor(
sym_state_probs_1181[p12] / sym_state_probs_1181[p00]
) # (0,0) -> (1,2)
sym_state_recursive_ratios_P0_1181[0, 3] = sym.factor(
sym_state_probs_1181[p03] / sym_state_probs_1181[p00]
) # (0,0) -> (0,3)
sym_state_recursive_ratios_P0_1181[1, 3] = sym.factor(
sym_state_probs_1181[p13] / sym_state_probs_1181[p00]
) # (0,0) -> (1,3)
sym_state_recursive_ratios_P0_1181[0, 4] = sym.factor(
sym_state_probs_1181[p04] / sym_state_probs_1181[p00]
) # (0,0) -> (0,4)
sym_state_recursive_ratios_P0_1181[1, 4] = sym.factor(
sym_state_probs_1181[p14] / sym_state_probs_1181[p00]
) # (0,0) -> (1,4)
sym_state_recursive_ratios_P0_1181[0, 5] = sym.factor(
sym_state_probs_1181[p05] / sym_state_probs_1181[p00]
) # (0,0) -> (0,5)
sym_state_recursive_ratios_P0_1181[1, 5] = sym.factor(
sym_state_probs_1181[p15] / sym_state_probs_1181[p00]
) # (0,0) -> (1,5)
sym_state_recursive_ratios_P0_1181[0, 6] = sym.factor(
sym_state_probs_1181[p06] / sym_state_probs_1181[p00]
) # (0,0) -> (0,6)
sym_state_recursive_ratios_P0_1181[1, 6] = sym.factor(
sym_state_probs_1181[p16] / sym_state_probs_1181[p00]
) # (0,0) -> (1,6)
sym_state_recursive_ratios_P0_1181[0, 7] = sym.factor(
sym_state_probs_1181[p07] / sym_state_probs_1181[p00]
) # (0,0) -> (0,7)
sym_state_recursive_ratios_P0_1181[1, 7] = sym.factor(
sym_state_probs_1181[p17] / sym_state_probs_1181[p00]
) # (0,0) -> (1,7)
sym_state_recursive_ratios_P0_1181[0, 8] = sym.factor(
sym_state_probs_1181[p08] / sym_state_probs_1181[p00]
) # (0,0) -> (0,8)
sym_state_recursive_ratios_P0_1181[1, 8] = sym.factor(
sym_state_probs_1181[p18] / sym_state_probs_1181[p00]
) # (0,0) -> (1,8)
return (
sym_state_probs_1181,
sym_state_recursive_ratios_1181,
sym_state_recursive_ratios_right_1181,
sym_state_recursive_ratios_P0_1181,
)
def get_symbolic_state_probabilities_1191():
num_of_servers = 1
threshold = 1
system_capacity = 9
buffer_capacity = 1
Q_sym_1191 = abg.markov.get_symbolic_transition_matrix(
num_of_servers, threshold, system_capacity, buffer_capacity
)
(
p00,
p01,
p11,
p02,
p12,
p03,
p13,
p04,
p14,
p05,
p15,
p06,
p16,
p07,
p17,
p08,
p18,
p09,
p19,
) = sym.symbols(
"p00, p01, p11, p02, p12, p03, p13, p04, p14, p05, p15, p06, p16, p07, p17, p08, p18, p09, p19"
)
pi_1191 = sym.Matrix(
[
p00,
p01,
p11,
p02,
p12,
p03,
p13,
p04,
p14,
p05,
p15,
p06,
p16,
p07,
p17,
p08,
p18,
p09,
p19,
]
)
dimension_1191 = Q_sym_1191.shape[0]
M_sym_1191 = sym.Matrix(
[Q_sym_1191.transpose()[:-1, :], sym.ones(1, dimension_1191)]
)
sym_diff_equations_1191 = M_sym_1191 @ pi_1191
b_sym_1191 = sym.Matrix([sym.zeros(dimension_1191 - 1, 1), [1]])
eq0_1191 = sym.Eq(sym_diff_equations_1191[0], b_sym_1191[0])
eq1_1191 = sym.Eq(sym_diff_equations_1191[1], b_sym_1191[1])
eq2_1191 = sym.Eq(sym_diff_equations_1191[2], b_sym_1191[2])
eq3_1191 = sym.Eq(sym_diff_equations_1191[3], b_sym_1191[3])
eq4_1191 = sym.Eq(sym_diff_equations_1191[4], b_sym_1191[4])
eq5_1191 = sym.Eq(sym_diff_equations_1191[5], b_sym_1191[5])
eq6_1191 = sym.Eq(sym_diff_equations_1191[6], b_sym_1191[6])
eq7_1191 = sym.Eq(sym_diff_equations_1191[7], b_sym_1191[7])
eq8_1191 = sym.Eq(sym_diff_equations_1191[8], b_sym_1191[8])
eq9_1191 = sym.Eq(sym_diff_equations_1191[9], b_sym_1191[9])
eq10_1191 = sym.Eq(sym_diff_equations_1191[10], b_sym_1191[10])
eq11_1191 = sym.Eq(sym_diff_equations_1191[11], b_sym_1191[11])
eq12_1191 = sym.Eq(sym_diff_equations_1191[12], b_sym_1191[12])
eq13_1191 = sym.Eq(sym_diff_equations_1191[13], b_sym_1191[13])
eq14_1191 = sym.Eq(sym_diff_equations_1191[14], b_sym_1191[14])
eq15_1191 = sym.Eq(sym_diff_equations_1191[15], b_sym_1191[15])
eq16_1191 = sym.Eq(sym_diff_equations_1191[16], b_sym_1191[16])
eq17_1191 = sym.Eq(sym_diff_equations_1191[17], b_sym_1191[17])
eq18_1191 = sym.Eq(sym_diff_equations_1191[18], b_sym_1191[18])
sym_state_probs_1191 = sym.solve(
[
eq0_1191,
eq1_1191,
eq2_1191,
eq3_1191,
eq4_1191,
eq5_1191,
eq6_1191,
eq7_1191,
eq8_1191,
eq9_1191,
eq10_1191,
eq11_1191,
eq12_1191,
eq13_1191,
eq14_1191,
eq15_1191,
eq16_1191,
eq17_1191,
eq18_1191,
],
(
p00,
p01,
p11,
p02,
p12,
p03,
p13,
p04,
p14,
p05,
p15,
p06,
p16,
p07,
p17,
p08,
p18,
p09,
p19,
),
)
sym_state_recursive_ratios_1191 = sym.zeros(
buffer_capacity + 1, system_capacity + 1
)
sym_state_recursive_ratios_1191[0, 0] = 1
sym_state_recursive_ratios_1191[0, 1] = sym.factor(
sym_state_probs_1191[p01] / sym_state_probs_1191[p00]
) # (0,0) -> (0,1)
sym_state_recursive_ratios_1191[1, 1] = sym.factor(
sym_state_probs_1191[p11] / sym_state_probs_1191[p01]
) # (0,1) -> (1,1)
sym_state_recursive_ratios_1191[0, 2] = sym.factor(
sym_state_probs_1191[p02] / sym_state_probs_1191[p01]
) # (0,1) -> (0,2)
sym_state_recursive_ratios_1191[1, 2] = sym.factor(
sym_state_probs_1191[p12] / sym_state_probs_1191[p02]
) # (0,2) -> (1,2)
sym_state_recursive_ratios_1191[0, 3] = sym.factor(
sym_state_probs_1191[p03] / sym_state_probs_1191[p02]
) # (0,2) -> (0,3)
sym_state_recursive_ratios_1191[1, 3] = sym.factor(
sym_state_probs_1191[p13] / sym_state_probs_1191[p03]
) # (0,3) -> (1,3)
sym_state_recursive_ratios_1191[0, 4] = sym.factor(
sym_state_probs_1191[p04] / sym_state_probs_1191[p03]
) # (0,3) -> (0,4)
sym_state_recursive_ratios_1191[1, 4] = sym.factor(
sym_state_probs_1191[p14] / sym_state_probs_1191[p04]
) # (0,4) -> (1,4)
sym_state_recursive_ratios_1191[0, 5] = sym.factor(
sym_state_probs_1191[p05] / sym_state_probs_1191[p04]
) # (0,4) -> (0,5)
sym_state_recursive_ratios_1191[1, 5] = sym.factor(
sym_state_probs_1191[p15] / sym_state_probs_1191[p05]
) # (0,5) -> (1,5)
sym_state_recursive_ratios_1191[0, 6] = sym.factor(
sym_state_probs_1191[p06] / sym_state_probs_1191[p05]
) # (0,5) -> (0,6)
sym_state_recursive_ratios_1191[1, 6] = sym.factor(
sym_state_probs_1191[p16] / sym_state_probs_1191[p06]
) # (0,6) -> (1,6)
sym_state_recursive_ratios_1191[0, 7] = sym.factor(
sym_state_probs_1191[p07] / sym_state_probs_1191[p06]
) # (0,6) -> (0,7)
sym_state_recursive_ratios_1191[1, 7] = sym.factor(
sym_state_probs_1191[p17] / sym_state_probs_1191[p07]
) # (0,7) -> (1,7)
sym_state_recursive_ratios_1191[0, 8] = sym.factor(
sym_state_probs_1191[p08] / sym_state_probs_1191[p07]
) # (0,7) -> (0,8)
sym_state_recursive_ratios_1191[1, 8] = sym.factor(
sym_state_probs_1191[p18] / sym_state_probs_1191[p08]
) # (0,8) -> (1,8)
sym_state_recursive_ratios_1191[0, 9] = sym.factor(
sym_state_probs_1191[p09] / sym_state_probs_1191[p08]
) # (0,8) -> (0,9)
sym_state_recursive_ratios_1191[1, 9] = sym.factor(
sym_state_probs_1191[p19] / sym_state_probs_1191[p09]
) # (0,9) -> (1,9)
sym_state_recursive_ratios_right_1191 = sym_state_recursive_ratios_1191.copy()
sym_state_recursive_ratios_right_1191[1, 2] = sym.factor(
sym_state_probs_1191[p12] / sym_state_probs_1191[p11]
) # (1,1) -> (1,2)
sym_state_recursive_ratios_right_1191[1, 3] = sym.factor(
sym_state_probs_1191[p13] / sym_state_probs_1191[p12]
) # (1,2) -> (1,3)
sym_state_recursive_ratios_right_1191[1, 4] = sym.factor(
sym_state_probs_1191[p14] / sym_state_probs_1191[p13]
) # (1,3) -> (1,4)
sym_state_recursive_ratios_right_1191[1, 5] = sym.factor(
sym_state_probs_1191[p15] / sym_state_probs_1191[p14]
) # (1,4) -> (1,5)
sym_state_recursive_ratios_right_1191[1, 6] = sym.factor(
sym_state_probs_1191[p16] / sym_state_probs_1191[p15]
) # (1,5) -> (1,6)
sym_state_recursive_ratios_right_1191[1, 7] = sym.factor(
sym_state_probs_1191[p17] / sym_state_probs_1191[p16]
) # (1,6) -> (1,7)
sym_state_recursive_ratios_right_1191[1, 8] = sym.factor(
sym_state_probs_1191[p18] / sym_state_probs_1191[p17]
) # (1,7) -> (1,8)
sym_state_recursive_ratios_right_1191[1, 8] = sym.factor(
sym_state_probs_1191[p18] / sym_state_probs_1191[p17]
) # (1,8) -> (1,9)
sym_state_recursive_ratios_P0_1191 = sym.zeros(
buffer_capacity + 1, system_capacity + 1
)
sym_state_recursive_ratios_P0_1191[0, 0] = 1
sym_state_recursive_ratios_P0_1191[0, 1] = sym.factor(
sym_state_probs_1191[p01] / sym_state_probs_1191[p00]
) # (0,0) -> (0,1)
sym_state_recursive_ratios_P0_1191[1, 1] = sym.factor(
sym_state_probs_1191[p11] / sym_state_probs_1191[p00]
) # (0,0) -> (1,1)
sym_state_recursive_ratios_P0_1191[0, 2] = sym.factor(
sym_state_probs_1191[p02] / sym_state_probs_1191[p00]
) # (0,0) -> (0,2)
sym_state_recursive_ratios_P0_1191[1, 2] = sym.factor(
sym_state_probs_1191[p12] / sym_state_probs_1191[p00]
) # (0,0) -> (1,2)
sym_state_recursive_ratios_P0_1191[0, 3] = sym.factor(
sym_state_probs_1191[p03] / sym_state_probs_1191[p00]
) # (0,0) -> (0,3)
sym_state_recursive_ratios_P0_1191[1, 3] = sym.factor(
sym_state_probs_1191[p13] / sym_state_probs_1191[p00]
) # (0,0) -> (1,3)
sym_state_recursive_ratios_P0_1191[0, 4] = sym.factor(
sym_state_probs_1191[p04] / sym_state_probs_1191[p00]
) # (0,0) -> (0,4)
sym_state_recursive_ratios_P0_1191[1, 4] = sym.factor(
sym_state_probs_1191[p14] / sym_state_probs_1191[p00]
) # (0,0) -> (1,4)
sym_state_recursive_ratios_P0_1191[0, 5] = sym.factor(
sym_state_probs_1191[p05] / sym_state_probs_1191[p00]
) # (0,0) -> (0,5)
sym_state_recursive_ratios_P0_1191[1, 5] = sym.factor(
sym_state_probs_1191[p15] / sym_state_probs_1191[p00]
) # (0,0) -> (1,5)
sym_state_recursive_ratios_P0_1191[0, 6] = sym.factor(
sym_state_probs_1191[p06] / sym_state_probs_1191[p00]
) # (0,0) -> (0,6)
sym_state_recursive_ratios_P0_1191[1, 6] = sym.factor(
sym_state_probs_1191[p16] / sym_state_probs_1191[p00]
) # (0,0) -> (1,6)
sym_state_recursive_ratios_P0_1191[0, 7] = sym.factor(
sym_state_probs_1191[p07] / sym_state_probs_1191[p00]
) # (0,0) -> (0,7)
sym_state_recursive_ratios_P0_1191[1, 7] = sym.factor(
sym_state_probs_1191[p17] / sym_state_probs_1191[p00]
) # (0,0) -> (1,7)
sym_state_recursive_ratios_P0_1191[0, 8] = sym.factor(
sym_state_probs_1191[p08] / sym_state_probs_1191[p00]
) # (0,0) -> (0,8)
sym_state_recursive_ratios_P0_1191[1, 8] = sym.factor(
sym_state_probs_1191[p18] / sym_state_probs_1191[p00]
) # (0,0) -> (1,8)
sym_state_recursive_ratios_P0_1191[0, 9] = sym.factor(
sym_state_probs_1191[p09] / sym_state_probs_1191[p00]
) # (0,0) -> (0,9)
sym_state_recursive_ratios_P0_1191[1, 9] = sym.factor(
sym_state_probs_1191[p19] / sym_state_probs_1191[p00]
) # (0,0) -> (1,9)
return (
sym_state_probs_1191,
sym_state_recursive_ratios_1191,
sym_state_recursive_ratios_right_1191,
sym_state_recursive_ratios_P0_1191,
)
| 2 |
27976e9f7fbe030910b3595ea1a13e0e505183e5
|
Python
|
#!/software/python-2.7-2014q3-el6-x86_64/bin/python
import SNANA_Reader as simread
import REAL_Reader as dataread
#import astropy.cosmology as cosmo
import traceback
import scipy
import scipy.stats as stats
import numpy as np
import matplotlib.pyplot as plt
plt.switch_backend('Agg')
#import Cosmology
import scipy.stats.mstats as mstats
import scipy.stats as stats
from scipy.interpolate import UnivariateSpline
from sys import argv
import glob
import time
import os
import gzip
import shutil
import numpy.ma as ma
import subprocess
import iminuit as iM
from iminuit import Minuit as M
from discreteChi2Func import discreteChi2Func as chi2func
import pandas as pd
class Rate_Fitter:
def __init__(self, realfilename, realName, simfilename, simName, simgenfilename, MCBeta, MCK, zminSamp=0.1, zmaxSamp=1.20 , zminFit = 0.1, zmaxFit = 1.20, simOmegaM=0.3, simOmegaL=0.7, simH0=70.0, simw=-1.0, simOb0=0.049, simSigma8=0.81, simNs=0.95, Rate_Model = 'powerlaw', cheatType = False, cheatZ = False, cheatCCSub = False, cheatCCScale = False, cuts = None, nprint = 5, MURESCuts = None, noCCMC = False, priorRate = None, priorZEff = None, ratePriorErrUp = None, ratePriorErrDown =None, ratePriorErrAll = None, fixCCScale = False):
print "Rate_Fitter"
print "np version {0}".format(np.__version__)
self.zminSamp = zminSamp
self.zmaxSamp = zmaxSamp
self.zminFit = zminFit
self.zmaxFit = zmaxFit
self.MCBeta = MCBeta
self.MCK = MCK
self.Rate_Model = Rate_Model
self.cheatType = cheatType
self.cheatZ = cheatZ
self.cheatCCSub = cheatCCSub
self.cheatCCScale = cheatCCScale
self.cuts = cuts
self.nprint = nprint
self.MURESCuts = MURESCuts
self.priorRate = priorRate
self.priorZEff = priorZEff
self.ratePriorErrUp = ratePriorErrUp
self.ratePriorErrDown = ratePriorErrDown
self.ratePriorErrAll = ratePriorErrAll
self.fixCCScale = fixCCScale
#print "PRIORS"
#print priorRate
#print priorZEff
#print ratePriorErrUp
#print ratePriorErrDown
if self.cheatZ:
self.ztype = 'SIM_ZCMB'
else:
#self.ztype = 'zHD'
self.ztype = 'zPHOT'
self.shiftFlagData = False
self.shiftFlagSim = False
self.globalChi2Storage = []
self.globalNDataStorage = []
'''
self.globalZPhotBinStorage = []
self.globalNDataIaPhotBinStorage = []
self.globalNDataCCPhotBinStorage = []
self.globalZTrueBinStorage = []
self.globalNDataIaTrueBinStorage = []
self.globalNDataCCTrueBinStorage = []
'''
print 'a'
try:
self.simcat = simread.SNANA_Cat(simfilename, simName, simOmegaM=0.3, simOmegaL=0.7, simH0=70.0, simw=-1.0, simOb0=0.049, simSigma8=0.81, simNs=0.95)
except:
try:
self.simcat = simread.SNANA_Cat(simfilename, simName, simOmegaM=0.3, simOmegaL=0.7, simH0=70.0, simw=-1.0, simOb0=0.049, simSigma8=0.81, simNs=0.95, skip_header = 5)
except:
self.simcat = simread.SNANA_Cat(simfilename, simName, simOmegaM=0.3, simOmegaL=0.7, simH0=70.0, simw=-1.0, simOb0=0.049, simSigma8=0.81, simNs=0.95, skip_header = 6)
print 'b'
self.simName = simName
self.simgencat = simread.SNANA_Cat(simfilename, simName, simOmegaM=0.3, simOmegaL=0.7, simH0=70.0, simw=-1.0, simOb0=0.049, simSigma8=0.81, simNs=0.95)
print 'c'
try:
#with np.load(simgenfilename+'.npz', allow_pickle = True) as data0:
# SIMGEN = data0['a']
SIMGEN = np.load(simgenfilename + '.npy', allow_pickle = True)
except:
SIMGEN = np.genfromtxt(simgenfilename, dtype=None, names = True, skip_footer=3, invalid_raise=False)
print "Compress save A"
SIMGEN.dtype.names = map(str, SIMGEN.dtype.names)
#np.savez_compressed(simgenfilename+'.npz', a = SIMGEN)
np.save(simgenfilename+'.npy', SIMGEN)
print "WHY DO YOU HATE ME WHEN I SHOW YOU NOTHING BUT LOVE"
print simgenfilename
#SIMGEN = pd.read_csv(simgenfilename, delim_whitespace=True, comment="#").to_records(index = False)
print 'd'
SIMGEN = SIMGEN[SIMGEN['GENZ'] != 'GENZ']
self.simgencat.params = {'flat':True, 'H0': simH0, 'Om0':simOmegaM, 'Ob0': simOb0, 'sigma8': simSigma8, 'ns': simNs}
#self.simgencat.cosmo = Cosmology.setCosmology('simCosmo', self.simcat.params)
self.simgencat.OrigCatalog = np.copy(SIMGEN)
self.simgencat.Catalog = np.copy(SIMGEN)
self.simgencat.Catalog = self.simgencat.Catalog[self.simgencat.Catalog['GENZ'] != 'GENZ']
self.simgencat.simname = simName
self.simgencat.NSN = self.simgencat.Catalog['GENZ'].shape[2]
print "SIMGEN NUMBER"
print self.simgencat.NSN
print "TEST2"
print self.simgencat.Catalog['GENZ'].shape[0]
print self.simgencat.Catalog['GENZ'].shape[1]
print self.simgencat.Catalog['GENZ'].shape[2]
print "SIMGENCAT FILE"
print simfilename
self.realName = realName
try:
print 'q'
self.realcat = simread.SNANA_Cat(realfilename, realName, simOmegaM=0.3, simOmegaL=0.7, simH0=70.0, simw=-1.0, simOb0=0.049, simSigma8=0.81, simNs=0.95, skip_header = 6)
except:
#self.realcat = simread.SNANA_Cat(realfilename, realName, simOmegaM=0.3, simOmegaL=0.7, simH0=70.0, simw=-1.0, simOb0=0.049, simSigma8=0.81, simNs=0.95)
try:
print 'r'
self.realcat = simread.SNANA_Cat(realfilename, realName, simOmegaM=0.3, simOmegaL=0.7, simH0=70.0, simw=-1.0, simOb0=0.049, simSigma8=0.81, simNs=0.95)
except:
print 's'
self.realcat = dataread.REAL_Cat(realfilename, realName, skip_header =11 )
if self.cheatType:
print "WARNING, THE FITTER IS CHEATING AND ELIMINATED NON-IAs USING SIM INFO"
self.realcat.Catalog = self.realcat.Catalog[self.realcat.Catalog['SIM_TYPE_INDEX'].astype(int) == 1]
self.simcat.Catalog = self.simcat.Catalog[self.simcat.Catalog['SIM_TYPE_INDEX'].astype(int) == 1]
print "Pre cut Catalog"
print self.realcat.Catalog.shape
for cut in cuts:
print 'a'
print cut
print self.realcat.Catalog.shape
self.realcat.Catalog = self.realcat.Catalog[(self.realcat.Catalog[cut[0]].astype(type(cut[1])) > cut[1]) & (self.realcat.Catalog[cut[0]].astype(type(cut[2])) < cut[2])]
self.simcat.Catalog = self.simcat.Catalog[(self.simcat.Catalog[cut[0]].astype(type(cut[1])) > cut[1]) & (self.simcat.Catalog[cut[0]].astype(type(cut[2])) < cut[2])]
print 'b'
print cut
print self.realcat.Catalog.shape
self.postCutRealCat = np.copy(self.realcat.Catalog)
self.postCutSimCat = np.copy(self.simcat.Catalog)
self.realcat.Catalog = self.realcat.Catalog[(self.realcat.Catalog[self.ztype].astype(float) > self.zminSamp) & (self.realcat.Catalog[self.ztype].astype(float) < self.zmaxSamp)]
self.simcat.Catalog = self.simcat.Catalog[(self.simcat.Catalog[self.ztype].astype(float) > self.zminSamp) & (self.simcat.Catalog[self.ztype].astype(float) < self.zmaxSamp)]
print 'zCut Pre MURESCut'
print np.sum((self.realcat.Catalog[self.ztype].astype(float) > self.zminFit) & (self.realcat.Catalog[self.ztype].astype(float) < self.zmaxFit))
print 'MURESCUT'
print self.MURESCuts
print self.realcat.Catalog.shape
if not (self.MURESCuts is None):
'''
#MURES Cut format: (zmin, zmax, neg Cut, pos Cut)
for mc in self.MURESCuts:
realCond = (self.realcat.Catalog[self.ztype] < mc[0]) | (self.realcat.Catalog[self.ztype] > mc[1])| ((self.realcat.Catalog['MURES'] > mc[2])& (self.realcat.Catalog['MURES'] < mc[3]))
simCond = (self.simcat.Catalog[self.ztype] < mc[0]) | (self.simcat.Catalog[self.ztype] > mc[1])| ((self.simcat.Catalog['MURES'] > mc[2])& (self.simcat.Catalog['MURES'] < mc[3]))
self.realcat.Catalog = self.realcat.Catalog[realCond]
self.simcat.Catalog = self.simcat.Catalog[simCond]
'''
self.realcat.Catalog = self.realcat.Catalog[ np.abs( self.realcat.Catalog['MURES'] * 1.0 / self.realcat.Catalog['MUERR'] ) < MURESCuts]
self.simcat.Catalog = self.simcat.Catalog[ np.abs( self.simcat.Catalog['MURES'] * 1.0 / self.simcat.Catalog['MUERR'] ) < MURESCuts]
print "PostMURESCut Shape"
print self.realcat.Catalog.shape
print 'zCut Post MURESCut'
print np.sum((self.realcat.Catalog[self.ztype].astype(float) > self.zminFit) & (self.realcat.Catalog[self.ztype].astype(float) < self.zmaxFit))
print "Post cut Catalog"
print self.realcat.Catalog.shape
if noCCMC:
self.simgencat.Catalog = self.simgencat.Catalog[self.simgencat.Catalog['GENTYPE'] == 1]
self.simcat.Catalog = self.simcat.Catalog[self.simcat.Catalog['SIM_TYPE_INDEX'] == 1]
def newData(self, realfilename, realName, simInd =100):
self.realName = realName
self.shiftFlagData = False
try:
self.realcat = simread.SNANA_Cat(realfilename, realName, simOmegaM=0.3, simOmegaL=0.7, simH0=70.0, simw=-1.0, simOb0=0.049, simSigma8=0.81, simNs=0.95)
except:
self.realcat = simread.SNANA_Cat(realfilename, realName, simOmegaM=0.3, simOmegaL=0.7, simH0=70.0, simw=-1.0, simOb0=0.049, simSigma8=0.81, simNs=0.95, skip_header = 6 )
if self.cheatType:
print "WARNING, THE FITTER IS CHEATING AND ELIMINATED NON-IAs USING SIM INFO"
self.realcat.Catalog = self.realcat.Catalog[self.realcat.Catalog['SIM_TYPE_INDEX'].astype(int) == 1]
if simInd < self.nprint:
print 'N precuts'
print self.realcat.Catalog['FITPROB'].shape
print "Pre cut Catalog"
print self.realcat.Catalog.shape
for cut in cuts:
self.realcat.Catalog = self.realcat.Catalog[(self.realcat.Catalog[cut[0]].astype(type(cut[1])) > cut[1]) & (self.realcat.Catalog[cut[0]].astype(type(cut[2])) < cut[2])]
self.realcat.Catalog = self.realcat.Catalog[(self.realcat.Catalog[self.ztype].astype(float) > self.zminSamp) & (self.realcat.Catalog[self.ztype].astype(float) < self.zmaxSamp)]
print "Post cut Catalog"
print self.realcat.Catalog.shape
self.postCutRealCat = np.copy(self.realcat.Catalog)
print 'MURESCUT'
print self.MURESCuts
print self.realcat.Catalog.shape
if not (self.MURESCuts is None):
#MURES Cut format: (zmin, zmax, neg Cut, pos Cut)
'''
for mc in self.MURESCuts:
realCond = (self.realcat.Catalog[self.ztype] < mc[0]) | (self.realcat.Catalog[self.ztype] > mc[1])| ((self.realcat.Catalog['MURES'] > mc[2])& (self.realcat.Catalog['MURES'] < mc[3]))
self.realcat.Catalog = self.realcat.Catalog[realCond]
'''
self.realcat.Catalog = self.realcat.Catalog[np.abs(self.realcat.Catalog['MURES']*1.0/self.realcat.Catalog['MUERR']) < MURESCuts]
print "PostMURESCut Shape"
print self.realcat.Catalog.shape
if simInd < self.nprint:
print "Minimum Fitprob"
print np.min(self.realcat.Catalog['FITPROB'])
print 'N postcuts'
print self.realcat.Catalog['FITPROB'].shape
def zSystematic(self, binList = None, nbins = None):
assert(0)
if nbins is None:
try:
self.nbins = len(binList) - 1
self.binList = binList
except:
self.nbins = binList.shape[0] - 1
self.binList = binList
else:
binList = np.linspace(self.zmin, self.zmax, nbins+1)
self.nbins = nbins
self.binList = binList
if self.shiftFlagData:
print "DONT DOUBLE SHIFT"
return 0
if not self.shiftFlagSim:
oldsimz = self.simcat.Catalog['zPHOT']
oldsimtruez = self.simcat.Catalog['SIM_ZCMB']
stat, bins, binnum = stats.binned_statistic(oldsimz, oldsimz - oldsimtruez, bins = self.binList, statistic = 'mean')
self.zBiasShifts = stat
newsimz = oldsimz - stat[binnum]
assert(np.sum(np.abs(newsimz - oldsimz)) > 0)
assert((oldzshape - np.arange(0, oldz.shape[0]).shape[0])< 1)
self.shiftFlagSim = True
oldz = self.realcat.Catalog['zPHOT']
_,_, binnum = stats.binned_statistic(oldz, oldz , bins = self.binList, statistic = 'mean')
newz = oldz - self.zBiasShifts[binnum]
oldzshape = oldz.shape[0]
self.realcat.Catalog['zPHOT'].put(np.arange(0, oldz.shape[0]), newz)
assert(np.sum(np.abs(newz - oldz)) > 0)
assert((oldzshape - np.arange(0, oldz.shape[0]).shape[0])< 1)
self.simFlagData = True
def effCalc(self, fracContamCut = 0.0, nbinsSamp = None, nbinsFit = None, binListSamp = None, binListFit = None, simInd =100):
#### Do we want SNIas or all SN for efficiency?
import matplotlib as mpl
if nbinsSamp is None:
try:
self.nbinsSamp = len(binListSamp) - 1
self.binListSamp = binListSamp
except:
self.nbinsSamp = binListSamp.shape[0] - 1
self.binListSamp = binListSamp
else:
binListSamp = np.linspace(self.zminSamp, self.zmaxSamp, nbinsSamp+1)
self.nbinsSamp = nbinsSamp
self.binListSamp = binListSamp
if nbinsFit is None:
try:
self.nbinsFit = len(binListFit) - 1
self.binListFit = binListFit
except:
self.nbinsFit = binListFit.shape[0] - 1
self.binListFit = binListFit
else:
binListFit = np.linspace(self.zminFit, self.zmaxFit, nbinsFit+1)
self.nbinsFit = nbinsFit
self.binListFit = binListFit
self.typeString = ''
#if self.cheatZ:
# self.ztype = 'SIM_ZCMB'
#else:
# self.ztype = 'zPHOT'
'''
if (fracContamCut > 0.000000001) & (fracContamCut < 1.0):
print " Cutting based on Frac Contam"
histTot, binsX, binsY = np.histogram2d(self.simcat.Catalog[ztype], self.simcat.Catalog['MURES'], bins = nbins)
histCC, binsX, binsY = np.histogram2d(self.simcat.Catalog[self.simcat.Catalog['SIM_TYPE_INDEX'].astype(int) != 1][ztype], self.simcat.Catalog[self.simcat.Catalog['SIM_TYPE_INDEX'].astype(int) != 1]['MURES'], bins = (binsX, binsY))
fracContam = histCC.astype(np.float)/histTot.astype(np.float)
for fcRow, i in zip(fracContam, xrange(binsX.shape[0])):
for fc, j in zip(fcRow, xrange(binsY.shape[0])):
if fc < fracContamCut:
continue
else:
simInBin = (self.simcat.Catalog[ztype] > binsX[i]) & (self.simcat.Catalog[ztype] < binsX[i+1]) & (self.simcat.Catalog['MURES'] > binsY[j]) & (self.simcat.Catalog['MURES'] < binsY[j+1])
realInBin = (self.realcat.Catalog[ztype] > binsX[i]) & (self.realcat.Catalog[ztype] < binsX[i+1]) & (self.realcat.Catalog['MURES'] > binsY[j]) & (self.realcat.Catalog['MURES'] < binsY[j+1])
self.simcat.Catalog = self.simcat.Catalog[np.invert(simInBin)]
self.realcat.Catalog = self.realcat.Catalog[np.invert(realInBin)]
'''
zPHOTs = self.simcat.Catalog[self.simcat.Catalog['SIM_TYPE_INDEX'].astype(int) == 1][self.ztype].astype(float)
zTRUEs = self.simcat.Catalog[self.simcat.Catalog['SIM_TYPE_INDEX'].astype(int) == 1]['SIM_ZCMB'].astype(float)
self.typeString = self.typeString + 'A1'
print "Type Location A"
print "Choice A1"
print zPHOTs.shape
print zTRUEs.shape
print binList
counts, zPhotEdges, zTrueEdges, binnumber = scipy.stats.binned_statistic_2d(zPHOTs, zTRUEs, zTRUEs, statistic = 'count', bins = (self.binListFit, self.binListSamp))
assert(zPhotEdges.shape[0] == (self.nbinsFit + 1))
print "Type Location B"
print "Choice B1"
self.typeString = self.typeString + 'B1'
zGenHist, zGenBins = np.histogram(self.simgencat.Catalog[self.simgencat.Catalog['GENTYPE'].astype(int) == 1]['GENZ'].astype(float), bins = self.binListSamp)
#zSim1Hist, zSim1Bins = np.histogram(self.simcat.Catalog[self.simcat.Catalog['SIM_TYPE_INDEX'].astype(int) ==1]['SIM_ZCMB'].astype(float), bins = self.binListSamp)
print "counts of zTrue in each zPhot vs zTrue bin"
print counts.astype(int)
print "zGen Bins"
print zGenBins
print 'zGen Histogram'
print zGenHist
print "sum zGen events"
print np.sum(zGenHist)
print "sum zPhot events"
print np.sum(counts)
#print "DEBUG HERE"
#assert(0)
self.effmat = np.zeros((self.nbinsFit, self.nbinsSamp))
xMax = zPhotEdges.shape[0] - 2
yMax = zTrueEdges.shape[0] - 2
print zGenHist
print counts.astype(int)
'''
for zPhotLedge, zPhotRedge, row, i in zip(zPhotEdges[:-1], zPhotEdges[1:], counts, xrange(xMax + 1)):
zPhotCenter = (zPhotLedge + zPhotRedge)/2.0
for zTrueLedge, zTrueRedge, count, j in zip(zTrueEdges[:-1], zTrueEdges[1:], row, xrange(yMax + 1)):
zTrueCenter = (zTrueLedge + zTrueRedge)/2.0
inCell = (zPHOTs > zPhotLedge) & (zPHOTs < zPhotRedge) & (zTRUEs > zTrueLedge)& (zTRUEs < zTrueRedge)
zPhotCell = zPHOTs[inCell];zTrueCell = zTRUEs[inCell]
self.effmat[i][j] = count # np.sum(inCell)
#print "inCell"
#print np.sum(inCell)
#print "count"
#print count
#try:
# assert(np.abs(np.sum(inCell) - count) < 2)
#except:
# print "CHECK ABOVE"
for row, i in zip(self.effmat, xrange(self.effmat.shape[0])):
for j in xrange(row.shape[0]):
self.effmat[i][j] /= zGenHist[j]
'''
self.effmat = counts/zGenHist
#if simInd < self.nprint:
print 'effmat'
print self.effmat
extent = [zPhotEdges[0], zPhotEdges[-1], zTrueEdges[0], zTrueEdges[-1]]
if (simInd == 0) or (not ('sim' in self.realName.lower())):
plt.figure()
plt.imshow(np.flipud(counts.T), extent = extent, cmap = 'Blues')
plt.colorbar()
plt.savefig(self.realName + 'redshiftDistro.png')
plt.clf()
plt.close()
plt.figure()
plt.imshow(np.flipud(self.effmat.T), extent = extent, cmap = 'Blues', norm=mpl.colors.LogNorm())
plt.colorbar()
plt.savefig(self.realName + 'efficiencyMatrixLog.png')
plt.clf()
plt.close()
plt.figure()
plt.imshow(np.flipud(self.effmat.T), extent = extent, cmap = 'Blues')
plt.colorbar()
plt.savefig(self.realName + 'efficiencyMatrix.png')
plt.clf()
plt.close()
def fit_rate(self, fixK = False, fixBeta = False, simInd =100, trueBeta = 0, CCScale = 1.0, CCScaleErr = None, TrueCCScale = 1.0, BetaInit = 0.0, kInit = 1.0, BetaErr = 1, kErr = 1, f_Js = None, CCZbins = None, scaleZBins = None, Blind = False):
#import iminuit as iM
#from iminuit import Minuit as M
#import numpy as np
#import matplotlib as mpl
#import matplotlib.pyplot as plt
#if self.cheatZ:
# self.ztype = 'SIM_ZCMB'
#else:
# self.ztype = 'zPHOT'
plt.switch_backend('Agg')
if simInd < self.nprint:
print "Type Location C"
print "Choice C1"
if len(self.typeString) <= 4:
self.typeString = self.typeString + 'C1'
nSim, simBins = np.histogram(self.simgencat.Catalog[self.simgencat.Catalog['GENTYPE'].astype(int) == 1]['GENZ'].astype(float), bins=self.binListSamp)
if simInd < self.nprint:
print "nSim1"
print nSim
print self.simgencat.Catalog.shape
print "FIGURE OUT WHY YOU MADE THIS ASSERT STATEMENT LATER"
#assert(0)
nSim2, simBins2 = np.histogram(self.simcat.Catalog[self.simcat.Catalog['SIM_TYPE_INDEX'].astype(int) ==1][self.ztype].astype(float), bins=self.binListFit)
nSim3, simBins3 = np.histogram(self.simcat.Catalog[self.ztype].astype(float), bins=self.binListFit)
NCC , _ = np.histogram(self.simcat.Catalog[self.simcat.Catalog['SIM_TYPE_INDEX'] != 1][self.ztype].astype(float), bins=self.binListFit)
if self.fixCCScale:
print "Fix CC Scale at 1"
else:
if simInd < self.nprint:
print "nSim2"
print nSim2
print "nSim3"
print nSim3
print "nCC"
print NCC
OrigNCC = np.copy(NCC)
if self.cheatCCSub:
if self.cheatCCScale:
print "WARNING: Only cheating on CC Subtraction not scale"
print "Setting NCC to infinity to make sure that cheating correctly"
print "Diagnostics after this point may be nonsense"
print self.cheatCCSub
print "NCC BeforeFck"
print NCC
NCC = NCC*1E100
print "NCC AfterFck"
print NCC
elif self.cheatCCScale:
print "NCC Before1"
print NCC
print TrueCCScale
NCC = applyCCScale(NCC, TrueCCScale, CCScaleErr, zbins = CCZbins, datazbins = self.binListFit)
print "NCC After1"
print NCC
else:
print "NCC Before2"
print NCC
print CCScale
NCC = applyCCScale(NCC, CCScale, CCScaleErr, zbins = CCZbins, datazbins = self.binListFit)
print "NCC After2"
print NCC
#assert(0)
NIa , _ = np.histogram(self.simcat.Catalog[self.simcat.Catalog['SIM_TYPE_INDEX'] == 1][self.ztype].astype(float), bins=self.binListFit)
'''
DebugNIaPhot, _ = np.histogram(self.simcat.Catalog[self.simcat.Catalog['SIM_TYPE_INDEX'] == 1]['zPHOT'].astype(float), bins=self.binListFit)
DebugNCCPhot, _ = np.histogram(self.simcat.Catalog[self.simcat.Catalog['SIM_TYPE_INDEX'] != 1]['zPHOT'].astype(float), bins=self.binListFit)
DebugNCCPhot = applyCCScale(DebugNCCPhot, CCScale, CCScaleErr, zbins = scaleZBins, datazbins = self.binListFit)
DebugNIaTrue, _ = np.histogram(self.simcat.Catalog[self.simcat.Catalog['SIM_TYPE_INDEX'] == 1]['SIM_ZCMB'].astype(float), bins=self.binListSamp)
DebugNCCTrue, _ = np.histogram(self.simcat.Catalog[self.simcat.Catalog['SIM_TYPE_INDEX'] != 1]['SIM_ZCMB'].astype(float), bins=self.binListSamp)
DebugNCCTrue = applyCCScale(DebugNCCTrue, CCScale, CCScaleErr, zbins = scaleZBins, datazbins = self.binListSamp)
uselessCtr = 0
for niap, nccp, niat, ncct, zp, zt in zip(DebugNIaPhot, DebugNCCPhot, DebugNIaTrue, DebugNCCTrue,(self.binListFit[1:] + self.binListFit[:-1])/2.0, (self.binListSamp[1:] + self.binListSamp[:-1])/2.0 ):
uselessCtr +=1
self.globalZTrueBinStorage.append(zt)
self.globalZPhotBinStorage.append(zp)
self.globalNDataIaPhotBinStorage.append(niap)
self.globalNDataCCPhotBinStorage.append(nccp)
self.globalNDataIaTrueBinStorage.append(niat)
self.globalNDataCCTrueBinStorage.append(ncct)
print "UselessCtr"
print uselessCtr
'''
try:
TrueNCC, _ = np.histogram(self.realcat.Catalog[self.realcat.Catalog['SIM_TYPE_INDEX'] !=1][self.ztype].astype(float), bins=self.binListFit)
if simInd < self.nprint:
print "True NCC Data"
print TrueNCC
except:
print "Using real data"
TrueNCC = 0.0
nData, dataBins = np.histogram(self.realcat.Catalog[self.ztype].astype(float), bins=self.binListFit)
print "nData"
print nData
if not(self.cheatCCSub):
FracBad = NCC*1.0/(1.0*(NCC+NIa))
nCCData = nData*FracBad
else:
nCCData = TrueNCC*1.0
FracBad = TrueNCC*1.0/nData
if simInd < self.nprint:
print "PreScale NCC/nSim"
print OrigNCC*1.0/(OrigNCC+NIa)
print "PreScale Pred NCC Data"
print OrigNCC*1.0/(OrigNCC+NIa)*nData
print "PreScale Pred NCC Data if 2NCC"
print OrigNCC*2.0/(2.0*OrigNCC+NIa)*nData
print "TrueNCC"
print TrueNCC
if type(TrueNCC) != int:
if simInd < self.nprint:
print "PreScale PredNCCData - TrueNCCData"
print OrigNCC*2.0/(2.0*OrigNCC+NIa)*nData - TrueNCC
print "PreScale PredNCCData - TrueNCCData/ PredNCCData"
print (OrigNCC*2.0/(2.0*OrigNCC+NIa)*nData - TrueNCC)/(OrigNCC*2.0/(2.0*OrigNCC+NIa)*nData)
else:
print "Using real data"
print "Mean of PreScale PredNCCData - TrueNCCData/ PredNCCData"
print np.nanmean((OrigNCC*2.0/(2.0*OrigNCC+NIa)*nData - TrueNCC)/(OrigNCC*2.0/(2.0*OrigNCC+NIa)*nData))
print "PostScale NCC/nData"
print NCC*1.0/(NCC+NIa)
if simInd < self.nprint:
print "Fraction of CCs in each bin"
print FracBad
print 'NCC'
print NCC
print 'nSim2'
print nSim2
print "nData, dataBins, realcat shape pre contam correction"
print nData
print dataBins
print np.sum(self.realcat.Catalog[self.ztype].astype(float) > self.zmaxFit)
print np.sum(self.realcat.Catalog[self.ztype].astype(float) < self.zminFit)
print self.realcat.Catalog[self.ztype].shape
print "Ratio nData/nSim"
print 1.0*nData/(1.0*nSim3)
print "Ratio nSim2/nData"
print 1.0*nSim3/(1.0*nData)
print "FracBad"
print FracBad
print 'NCCData'
print nCCData
if simInd < self.nprint:
print "overall Contam"
print np.sum(NCC)*1.0/(np.sum(nSim3)*1.0)
def chi2func(nData, nSim, effmat, fnorm, zCentersSamp, zCentersFit, k = 1.0, Beta = 0.0, zBreak = 1.0, dump = False, complexdump = False, modelError = False, nIA = None, nCC = None, Rate_Model = 'powerlaw', zbins = None, simInd = 100, BetaPrior = (-3, 3), KPrior = (0.0, 50.0), priorRate = None, priorZEff = None, ratePriorErrUp = None, ratePriorErrDown =None, ratePriorErrAll = None, TrueNCCData = None, f_1 = 1.0, f_2 = 1.0, f_3 = 1.0, f_4 = 1.0, f_5 = 1.0, f_6 = 1.0, f_7 = 1.0, f_8 = 1.0, f_9 = 1.0, f_10 = 1.0, f_11 = 1.0):
if simInd < self.nprint:
print "PRIORS2"
print priorRate
print priorZEff
print ratePriorErrUp
print ratePriorErrDown
Chi2Temp = 0.0
if Rate_Model == 'powerlaw':
f_Js = k*(1+zCentersSamp)**Beta
elif Rate_Model == 'discrete':
f_Js = np.array([f_1, f_2, f_3, f_4, f_5, f_6, f_7, f_8, f_9, f_10, f_11])
elif (Rate_Model == 'brokenpowerlaw') | (Rate_Model == 'brokenpowerlawVar'):
f_Js = []
#zCenters = (zbins[1:]+zbins[:-1])/2.0
temp = None
for zC in zCentersSamp:
if zC < zBreak:
f_Js.append(k*(1+zC)**Beta)
elif not(temp is None):
f_Js.append(temp)
else:
temp = f_Js[-1]
f_Js.append(temp)
f_Js = np.array(f_Js)
else:
assert(0)
if simInd < self.nprint:
if Rate_Model == 'discrete':
print "f_Js init"
print f_Js
else:
print "Beta init"
print Beta
print "k init"
print k
#chi2Mat = np.zeros((self.nbinsFit))
#adjNMC = np.zeros((self.nbinsFit))
if Rate_Model == 'discrete':
kprior = 0
betaprior = 0
else:
kprior = weakPrior(k, KPrior)
betaprior = weakPrior(Beta, BetaPrior)
if dump and (self.nprint > simInd):
print "kprior"
print kprior
print "betaprior"
print betaprior
if (nIA is None) or (nCC is None):
if dump:
print "No CC Cut"
fracCCData = np.zeros(nData.shape)
elif self.cheatCCSub:
fracCCData = TrueNCC*1.0/nData
else:
if Rate_Model == 'discrete':
if dump and (self.nprint > simInd):
print 'f_J adjusted CC Cut'
print Rate_Model
print nCC
print nIA
print np.array(f_Js)
fracCCData = (nCC*1.0)/((1.0*nCC + nIA*np.array(f_Js)))
print fracCCData
else:
if dump and (self.nprint > simInd):
print "Beta Adjusted CC Cut"
print Rate_Model
#BetaRatio = k*(1+zCenters)**(Beta)#/(1+zCenters)**MCBeta
BetaRatio = (1+zCentersFit)**(Beta)#/(1+zCenters)**MCBeta
if dump and (self.nprint > simInd):
print "Beta Ratio"
print BetaRatio
print "BadFracCCData"
print (nCC*1.0)/((1.0*nCC + nIA*BetaRatio))
print "bad NCCData"
print (nCC*1.0)/((1.0*nCC + nIA*BetaRatio))*nData
fracCCData = (nCC*1.0)/((1.0*nCC + nIA*BetaRatio))
if dump and (self.nprint > simInd):
print 'abc'
print "fracCCData2"
print fracCCData
print "unscaled fracCCData"
print (1.0*nCC)/(1.0*(nCC+nIA))
if self.cheatCCSub:
nCCData = TrueNCCData
if dump and (self.nprint < simInd):
print "Cheating CC Sub"
assert(not(TrueNCCData is None))
elif dump and (self.nprint > simInd):
print 'def'
print "Normal CC Sub"
if not self.cheatCCSub:
nCCData = nData*fracCCData
if dump and (self.nprint > simInd):
print "nCCData2"
print nCCData
if not(TrueNCCData is None):
print "TrueNCCData"
print TrueNCCData
#print f_Js
#Check if I am scaling errors down with increasing MC size. Make MC twice as large as "Data" to test.
if dump: chi2Storage = []
if dump: scaledNSimStor = []
if dump: JSumTempNumStor = []
if dump: JSumTempDenStor = []
if dump:
print "actually used NCC"
#print nCC
print nCCData
if dump and (simInd < self.nprint):
print "effmat"
print effmat
print "nData"
print nData
print "nCCData"
print nCCData
print "nSim"
print nSim
print nCCData
for row, nDataI, nCCDataI, i, zc in zip(effmat, nData, nCCData, range(self.nbinsFit), zCentersFit):
if dump and (self.nprint > simInd):
print 'effmat row'
print row
print 'nDataI'
print nDataI
print 'nCCDataI'
print nCCDataI
scaledNSimTemp = 0.0
JSumTempNum = 0.0
JSumTempDen = 0.0
if dump and (simInd < self.nprint):
print "nBinsSamp"
print self.nbinsSamp
assert(row.shape[0] == self.nbinsSamp)
assert(nSim.shape[0] == self.nbinsSamp)
assert(len(f_Js) == self.nbinsSamp)
for eff, nSimJ, f_J, j in zip(row, nSim, f_Js, range(self.nbinsSamp)):
if dump and (self.nprint > simInd):
print 'NGen J'
print nSimJ
print 'JSumTempNum contr'
print nSimJ*f_J*eff*fnorm
print 'JSumTempDen contr'
print nSimJ*f_J*eff*fnorm*f_J*fnorm
#if dump and (i != j) and self.cheatZ and (self.nprint < simInd):
# if nSimJ*f_J*eff*fnorm > 0:
# print " This should be zero but isnt "
# print nSimJ*f_J*eff*fnorm
# assert(0)
JSumTempNum += nSimJ*f_J*eff*fnorm
JSumTempDen += nSimJ*f_J*eff*fnorm*f_J*fnorm
dataFunc = np.maximum(nDataI ,1)
#CCFunc = np.ceil(np.maximum(nCCDataI, 1))
CCFunc = np.maximum(nCCDataI, 1)
c2t = (nDataI - nCCDataI - JSumTempNum)**2/( dataFunc + CCFunc + JSumTempDen)
if dump:
JSumTempNumStor.append(JSumTempNum)
JSumTempDenStor.append(JSumTempDen)
if dump and (self.nprint > simInd):
print i
print 'nDataI'
print nDataI
print 'fnCCDataI'
print nCCDataI
print 'fnorm'
print fnorm
print "JSumTempNum tot"
print JSumTempNum
print "JSumTempDen tot"
print JSumTempDen
print "Chi2Bin"
print c2t
if dump:
chi2Storage.append(c2t)
if c2t > 5:
print 'INSANITY CHECK ABOVE'
# Chi2Temp += ((nDataI - nCCDataI - JSumTempNum)**2/(JSumTempNum + JSumTempDen))#*fnorm**2
if nDataI > 1E-11 or JSumTempDen > 1E-11:
Chi2Temp += c2t
if dump and (self.nprint > simInd):
print "JSumTempNum/Den"
print JSumTempNumStor
print JSumTempDenStor
if dump:
if (self.nprint >simInd):
print Chi2Temp
print kprior
print betaprior
print chi2Storage
print "nData"
print nData
print "nCCData"
print nCCData
if priorRate is None:
return Chi2Temp+kprior+betaprior , chi2Storage
else:
print "PRIORS3"
print priorRate
print "fit k"
print k
print 'MCK'
print self.MCK
print "fit beta"
print Beta
print 'MCBeta'
print self.MCBeta
print ratePrior(k*self.MCK, Beta + self.MCBeta, priorRate, priorZEff, ratePriorErrUp, ratePriorErrDown, ratePriorErrAll)
return Chi2Temp+kprior+betaprior + ratePrior(k*self.MCK, Beta+self.MCBeta, priorRate, priorZEff, ratePriorErrUp, ratePriorErrDown, ratePriorErrAll), chi2Storage
else:
if dump and (self.nprint > simInd):
print 'C2T'
print Chi2Temp
print kprior
print betaprior
if priorRate is None:
return Chi2Temp+kprior+betaprior
else:
print "PRIORS3"
print priorRate
print "fit k"
print k
print 'MCK'
print self.MCK
print "fit beta"
print Beta
print 'MCBeta'
print self.MCBeta
print ratePrior(k*self.MCK, Beta+self.MCBeta, priorRate, priorZEff, ratePriorErrUp, ratePriorErrDown, ratePriorErrAll)
return Chi2Temp+kprior+betaprior + ratePrior(k*self.MCK, Beta+self.MCBeta, priorRate, priorZEff, ratePriorErrUp, ratePriorErrDown, ratePriorErrAll)
zCentersSamp = (self.binListSamp[1:] + self.binListSamp[:-1])/2.0
zCentersFit = (self.binListFit[1:] + self.binListFit[:-1])/2.0
#Is this right? Everything else in the other side of the chi2 function should be Ia only
if self.cheatCCSub:
self.fracCCData = TrueNCC*1.0/nData
else:
self.fracCCData = (NCC*1.0)/(1.0*(NCC + NIa))
if (self.nprint > simInd):
print "nSim"
print nSim
print 'fracCCData'
print self.fracCCData
print "nData"
print nData
#fnorm = float(np.sum(nData*(1-self.fracCCData)))/float(np.sum(nSim))
fnorm = 1.0/240.0
#print "PRIORS"
#print self.priorZEff
#print self.priorRate
#print self.ratePriorErrUp
#print self.ratePriorErrDown
if self.Rate_Model == 'powerlaw':
lamChi2 = lambda k, Beta: chi2func(nData, nSim, self.effmat, fnorm, zCentersSamp, zCentersFit, k, Beta, nIA = NIa, nCC = NCC, simInd =simInd, TrueNCCData = TrueNCC, priorRate = self.priorRate, priorZEff = self.priorZEff, ratePriorErrUp = self.ratePriorErrUp, ratePriorErrDown =self.ratePriorErrDown, ratePriorErrAll = self.ratePriorErrAll)#, zbins = self.binListFit)
lamChi2Dump = lambda k, Beta: chi2func(nData, nSim, self.effmat, fnorm, zCentersSamp, zCentersFit, k, Beta, dump = True, nIA = NIa, nCC = NCC, simInd =simInd, TrueNCCData = TrueNCC, priorRate = self.priorRate, priorZEff = self.priorZEff, ratePriorErrUp = self.ratePriorErrUp, ratePriorErrDown =self.ratePriorErrDown, ratePriorErrAll = self.ratePriorErrAll)#, zbins = self.binListFit)
MinObj = M(lamChi2, k = kInit, error_k = kErr , Beta = BetaInit, error_Beta = BetaErr, limit_k = (0.0, None), limit_Beta = (-100, 100), fix_k = fixK, fix_Beta = fixBeta)
c2i, _ = lamChi2Dump(1.0, 0.0)
print "Chi2 init = {0}".format(round(c2i, 4))
elif self.Rate_Model == 'brokenpowerlaw':
lamChi2 = lambda k, Beta: chi2func(nData, nSim, self.effmat, fnorm, zCentersSamp, zCentersFit, k, Beta, 1.0, nIA = NIa, nCC = NCC, simInd =simInd, TrueNCCData = TrueNCC, Rate_Model = 'brokenpowerlaw', priorRate = self.priorRate, priorZEff = self.priorZEff, ratePriorErrUp = self.ratePriorErrUp, ratePriorErrDown =self.ratePriorErrDown, ratePriorErrAll = self.ratePriorErrAll)#, zbins = self.binListFit)
lamChi2Dump = lambda k, Beta: chi2func(nData, nSim, self.effmat, fnorm, zCentersSamp, zCentersFit, k, Beta, 1.0, dump = True, nIA = NIa, nCC = NCC, simInd =simInd, TrueNCCData = TrueNCC, Rate_Model = 'brokenpowerlaw', priorRate = self.priorRate, priorZEff = self.priorZEff, ratePriorErrUp = self.ratePriorErrUp, ratePriorErrDown =self.ratePriorErrDown, ratePriorErrAll = self.ratePriorErrAll)#, zbins = self.binListFit)
MinObj = M(lamChi2, k = kInit, error_k = kErr , Beta = BetaInit, error_Beta = BetaErr, limit_k = (0.0, None), limit_Beta = (-100, 100), fix_k = fixK, fix_Beta = fixBeta)
c2i, _ = lamChi2Dump(1.0, 0.0)
print "Chi2 init = {0}".format(round(c2i, 4))
elif self.Rate_Model == 'brokenpowerlawVar':
lamChi2 = lambda k, Beta, zBreak: chi2func(nData, nSim, self.effmat, fnorm, zCentersSamp, zCentersFit, k, Beta, zBreak, nIA = NIa, nCC = NCC, simInd =simInd, TrueNCCData = TrueNCC, Rate_Model = 'brokenpowerlawVar', priorRate = self.priorRate, priorZEff = self.priorZEff, ratePriorErrUp = self.ratePriorErrUp, ratePriorErrDown =self.ratePriorErrDown, ratePriorErrAll = self.ratePriorErrAll)#, zbins = self.binListFit)
lamChi2Dump = lambda k, Beta, zBreak: chi2func(nData, nSim, self.effmat, fnorm, zCentersSamp, zCentersFit, k, Beta, zBreak, dump = True, nIA = NIa, nCC = NCC, simInd =simInd, TrueNCCData = TrueNCC, Rate_Model = 'brokenpowerlawVar', priorRate = self.priorRate, priorZEff = self.priorZEff, ratePriorErrUp = self.ratePriorErrUp, ratePriorErrDown =self.ratePriorErrDown, ratePriorErrAll = self.ratePriorErrAll)#, zbins = self.binListFit)
MinObj = M(lamChi2, k = kInit, error_k = kErr , Beta = BetaInit, error_Beta = BetaErr, limit_k = (0.0, None), limit_Beta = (-100, 100), fix_k = fixK, fix_Beta = fixBeta, zBreak = 1.0, error_zBreak = 0.1, limit_zBreak = (self.zminFit, self.zmaxFit))
c2i, _ = lamChi2Dump(1.0, 0.0)
print "Chi2 init = {0}".format(round(c2i, 4))
elif self.Rate_Model == 'discrete':
lamChi2 = lambda f_1, f_2, f_3, f_4, f_5, f_6, f_7, f_8, f_9, f_10, f_11: chi2func(nData, nSim, self.effmat, fnorm, zCentersSamp, zCentersFit, 1.0, nIA = NIa, nCC = NCC, simInd =simInd, TrueNCCData = TrueNCC, f_1 = f_1, f_2 = f_2,f_3 = f_3, f_4 = f_4,f_5 = f_5, f_6 = f_6,f_7 = f_7, f_8 = f_8,f_9 = f_9, f_10 = f_10, f_11 = f_11, Rate_Model = 'discrete', priorRate = self.priorRate, priorZEff = self.priorZEff, ratePriorErrUp = self.ratePriorErrUp, ratePriorErrDown =self.ratePriorErrDown, ratePriorErrAll = self.ratePriorErrAll)#, zbins = self.binListFit )
lamChi2Dump = lambda f_1, f_2, f_3, f_4, f_5, f_6, f_7, f_8, f_9, f_10, f_11: chi2func(nData, nSim, self.effmat, fnorm, zCentersSamp, zCentersFit, 1.0, nIA = NIa, nCC = NCC, simInd =simInd, TrueNCCData = TrueNCC, f_1 = f_1, f_2 = f_2,f_3 = f_3, f_4 = f_4,f_5 = f_5, f_6 = f_6,f_7 = f_7, f_8 = f_8,f_9 = f_9, f_10 = f_10, f_11 = f_11, dump = True, Rate_Model = 'discrete', priorRate = self.priorRate, priorZEff = self.priorZEff, ratePriorErrUp = self.ratePriorErrUp, ratePriorErrDown =self.ratePriorErrDown, ratePriorErrAll = self.ratePriorErrAll)#, zbins = self.binListFit)
c2i, _ = lamChi2Dump(1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0)
print "Chi2 init = {0}".format(round(c2i, 4))
MinObj = M(lamChi2, f_1 = 1.0, error_f_1 = 1.0, limit_f_1 = (0.0, None), f_2 = 1.0, error_f_2 = 1.0, limit_f_2 = (0.0, None), f_3 = 1.0, error_f_3 = 1.0, limit_f_3 = (0.0, None), f_4 = 1.0, error_f_4 = 1.0, limit_f_4 = (0.0, None), f_5 = 1.0, error_f_5 = 1.0, limit_f_5 = (0.0, None), f_6 = 1.0, error_f_6 = 1.0, limit_f_6 = (0.0, None), f_7 = 1.0, error_f_7 = 1.0, limit_f_7 = (0.0, None), f_8 = 1.0, error_f_8 = 1.0, limit_f_8 = (0.0, None), f_9 = 1.0, error_f_9 = 1.0, limit_f_9 = (0.0, None), f_10 = 1.0, error_f_10 = 1.0, limit_f_10 = (0.0, None), f_11 = 1.0,error_f_11 = 1.0, limit_f_11 = (0.0, None))
if self.Rate_Model == 'discrete':
c2f, c2stor = lamChi2Dump(MinObj.values['f_1'],MinObj.values['f_2'],MinObj.values['f_3'],MinObj.values['f_4'],MinObj.values['f_5'],MinObj.values['f_6'],MinObj.values['f_7'],MinObj.values['f_8'],MinObj.values['f_9'],MinObj.values['f_10'],MinObj.values['f_11'])
else:
print "TEST DUMP HERE"
c2f, c2stor = lamChi2Dump(MinObj.values['k'], MinObj.values['Beta'])
#MinObj = M(lamChi2, k = 1.0, fix_k = True, Beta = 0.0, error_Beta = 0.1)
MinObj.set_strategy(2)
fmin, param = MinObj.migrad(nsplit= 10)
#fmin, param = MinObj.migrad()
#ErrDict = MinObj.minos()
self.covar = MinObj.np_covariance()
ErrDict = MinObj.minos(maxcall = 1000)
#plt.scatter(nData, c2
| 0 |
27976e9f7fbe030910b3595ea1a13e0e505183e5
|
Python
|
stor)
#plt.xlabel('nData')
#plt.ylabel('chi2 in bin')
#plt.savefig(self.realName + 'Chi2VsnData.png')
#plt.clf()
if self.nprint > simInd:
print "Shapes of things"
print len(c2stor)
print nData.shape
print dataBins.shape
print self.binListFit.shape
print self.binListSamp.shape
#print DebugNIaPhot.shape
#print DebugNCCPhot.shape
#print DebugNIaTrue.shape
#print DebugNCCTrue.shape
for c2, nd in zip(c2stor, nData):
self.globalChi2Storage.append(c2)
self.globalNDataStorage.append(nd)
if self.Rate_Model == 'discrete':
fJList = [MinObj.values['f_1'],MinObj.values['f_2'],MinObj.values['f_3'],MinObj.values['f_4'],MinObj.values['f_5'],MinObj.values['f_6'],MinObj.values['f_7'],MinObj.values['f_8'],MinObj.values['f_9'],MinObj.values['f_10'],MinObj.values['f_11']]
fJErrList = [MinObj.errors['f_1'],MinObj.errors['f_2'],MinObj.errors['f_3'],MinObj.errors['f_4'],MinObj.errors['f_5'],MinObj.errors['f_6'],MinObj.errors['f_7'],MinObj.errors['f_8'],MinObj.errors['f_9'],MinObj.errors['f_10'],MinObj.errors['f_11']]
self.fJList = fJList
self.fJErrList = fJErrList
self.Beta = None
self.k = None
self.kErr = None
self.BetaErr = None
print fJList
print fJErrList
else:
k = MinObj.values['k']
#kErr = MinObj.errors['k']
kErr = (np.abs(ErrDict['k']['lower']) + np.abs(ErrDict['k']['upper']))/2.0
Beta = MinObj.values['Beta']
#BetaErr = MinObj.errors['Beta']
BetaErr = (np.abs(ErrDict['Beta']['lower']) + np.abs(ErrDict['Beta']['upper']))/2.0
if self.Rate_Model == 'brokenpowerlawVar':
zBreak = MinObj.values['zBreak']
zBreakErr = MinObj.values['zBreakErr']
self.k = k
self.Beta = Beta
self.kErr = kErr
self.BetaErr = BetaErr
#/(self.nbins - 2)
self.BetaRatio = (1+zCentersFit)**(Beta)
self.fJList = None
print 'SCALE DEBUG'
print NCC
print NIa
print self.BetaRatio
print 'SCALE DEBUG2'
print np.sum(NCC)
print np.sum(NIa)
print np.sum(NIa*self.BetaRatio)
self.fracCCData = (NCC*1.0)/(1.0*(1.0*NCC + NIa*self.BetaRatio))
self.fracCCDataTot = (np.sum(NCC)*1.0)/(1.0*(1.0*np.sum(NCC) + np.sum(NIa*self.BetaRatio)))
print 'SCALE DEBUG3'
print self.fracCCData
print self.fracCCDataTot
print 'SCALE DEBUG4'
print OrigNCC
print np.sum(OrigNCC)
print CCScale
#print self.fracCCDataTot
#print type(self.fracCCDataTot)
#assert(type(self.fracCCDataTot) == float)
print "Chi2 final = {0}".format(round(lamChi2Dump(self.k, self.Beta)[0], 4))
self.chi2 = fmin.fval
print "Chi2final? = {0}".format(round(fmin.fval, 4))
if not(self.priorRate is None):
ratePriorFinalVal = ratePrior(self.k*self.MCK, self.Beta+self.MCBeta, self.priorRate, self.priorZEff, self.ratePriorErrUp, self.ratePriorErrDown, self.ratePriorErrAll )
c2NoPrior = chi2func(nData, nSim, self.effmat, fnorm, zCentersSamp, zCentersFit, self.k, self.Beta, dump = False, nIA = NIa, nCC = NCC, simInd =simInd, TrueNCCData = TrueNCC)
print "RATE PRIOR FINAL"
print ratePriorFinalVal
print "Chi2final? = {0}".format(round(fmin.fval, 4))
print "Chi2FinalNoPrior"
print c2NoPrior
#fJs = np.ones(zCenters.shape)
'''
try:
if (Rate_Model != 'discrete'):
plt.clf()
MinObj.draw_contour('k','Beta', nsigma=3)
plt.savefig('{0}_{1}_k_beta_contour.png'.format(self.realName, self.simName))
if Blind:
locs, labels = plt.xticks()
labels = locs + np.cos(cosVal)
plt.xticks(labels)
locs, labels = plt.yticks()
labels = locs + np.cos(cosVal)
plt.yticks(labels)
plt.clf()
#xgrid,ygrid, sigma, rawdata = MinObj.mncontour_grid('k', 'Beta', numpoints=30, sigma_res = 1, nsigma = 2.0)
#fig, ax = plt.subplots(1)
#plt.clf()
#CS = ax.contour(xgrid, ygrid + self.MCBeta, sigma, levels = [ 1.0, 2.0])
#ax.clabel(CS, fontsize=7, inline=1)
#ax.set_xlabel('k')
#ax.set_ylabel('Beta')
#if Blind:
# ax.set_xticklabels([])
# ax.set_yticklabels([])
#plt.savefig('{0}_{1}_k_beta_contour.png'.format(self.realName, self.simName))
#plt.close()
except:
print "Plot Fail A"
try:
if (Rate_Model != 'discrete'):
plt.clf()
MinObj.draw_profile('Beta', text = False)
if Blind:
locs, labels = plt.xticks()
labels = locs + np.cos(cosVal)
plt.xticks(labels)
plt.savefig('{0}_{1}_beta_contour.png'.format(self.realName, self.simName))
plt.clf()
except:
print "Plot Fail C"
try:
if Rate_Model != 'discrete':
Betas = np.linspace(self.Beta - 0.5, self.Beta + 0.5, 51)
FCNs = []
for bTemp in Betas:
FCN = lamChi2( self.k, bTemp)
FCNs.append(FCN)
plt.plot(Betas, FCNs, c = 'k', label = 'Non Minuit Contour')
plt.legend()
plt.xlabel('Beta')
plt.ylabel('Chi2')
if Blind:
locs, labels = plt.xticks()
labels = locs + np.cos(cosVal)
plt.xticks(labels)
plt.savefig('{0}_{1}_beta_mycontour.png'.format(self.realName, self.simName))
plt.clf()
except:
print "Plot Fail D"
if Rate_Model != 'discrete':
plt.clf()
ax = plt.axes()
Betas = np.linspace(self.Beta - 0.1, self.Beta + 0.1, 501)
FCNs = []
for bTemp in Betas:
FCN = lamChi2( self.k, bTemp)
FCNs.append(FCN)
plt.plot(Betas, FCNs, c = 'k', label = 'Non Minuit Contour')
plt.legend()
plt.xlabel('Beta')
plt.ylabel('Chi2')
if Blind:
locs, labels = plt.xticks()
labels = locs + np.cos(cosVal)
ax.set_xticklabels(labels)
print "FCNs"
print FCNs
plt.savefig('{0}_{1}_beta_myzoomcontour.png'.format(self.realName, self.simName))
plt.clf()
plt.clf()
ax = plt.axes()
ks = np.linspace(self.k - 0.1, self.k + 0.1, 501)
FCNs = []
for kTemp in ks:
FCN = lamChi2( kTemp,self.Beta)
FCNs.append(FCN)
plt.plot(ks, FCNs, c = 'k', label = 'Non Minuit Contour')
plt.legend()
plt.xlabel('k')
plt.ylabel('Chi2')
print "FCNs"
print FCNs
plt.savefig('{0}_{1}_k_myzoomcontour.png'.format(self.realName, self.simName))
plt.clf()
df = np.array(FCNs[1:]) - np.array(FCNs[:-1])
inds = np.where(df > 0)[0]
print 'inds'
print inds
print inds < 250
print np.where(inds < 250)
inds = inds[np.where(inds < 250)]
print 'inds'
print inds
print "INDSSHAPE"
print inds.shape
if inds.shape[0]:
print "MINUIT IS PROBABLY MAD. HERES WHY"
print inds
print Betas[inds]
if inds.shape[0] > 1:
inds = inds[-1]
print inds
print Betas[inds]
lamChi2Dump(self.k, Betas[inds -3])
print "MINUIT MAD 2"
lamChi2Dump(self.k, Betas[inds -2])
print "MINUIT MAD 3"
lamChi2Dump(self.k, Betas[inds -1])
print "MINUIT MAD 4"
lamChi2Dump(self.k, Betas[inds])
print "MINUIT MAD 5"
lamChi2Dump(self.k, Betas[inds + 1])
print "MINUIT MAD 6"
lamChi2Dump(self.k, Betas[inds + 2])
print "MINUIT MAD 7"
lamChi2Dump(self.k, Betas[inds + 3])
print "END MINUIT MAD"
try:
if (Rate_Model != 'discrete'):
plt.clf()
MinObj.draw_mncontour('k','Beta', nsigma=3)
plt.savefig('{0}_{1}_k_beta_mncontour.png'.format(self.realName, self.simName))
if Blind:
locs, labels = plt.xticks()
labels = locs + np.cos(cosVal)
plt.xticks(labels)
locs, labels = plt.yticks()
labels = locs + np.cos(cosVal)
plt.yticks(labels)
plt.clf()
MinObj.draw_mnprofile('Beta', text = False, subtract_min = True)
if Blind:
locs, labels = plt.xticks()
labels = locs + np.cos(cosVal)
plt.xticks(labels)
plt.savefig('{0}_{1}_beta_mncontour.png'.format(self.realName, self.simName))
plt.clf()
#xgrid,ygrid, sigma, rawdata = MinObj.mncontour_grid('k', 'Beta', numpoints=30, sigma_res = 1, nsigma = 2.0)
#fig, ax = plt.subplots(1)
#plt.clf()
#CS = ax.contour(xgrid, ygrid + self.MCBeta, sigma, levels = [ 1.0, 2.0])
#ax.clabel(CS, fontsize=7, inline=1)
#ax.set_xlabel('k')
#ax.set_ylabel('Beta')
#if Blind:
# ax.set_xticklabels([])
# ax.set_yticklabels([])
#plt.savefig('{0}_{1}_k_beta_contour.png'.format(self.realName, self.simName))
#plt.close()
except:
print "Plot Fail B"
pass
#plt.axhline(y = self.MCBeta, c = 'k', label = 'True Beta')
#plt.axhline(y = Beta + self.MCBeta, c = 'g', label= 'Best Fit Beta')
#plt.axvline(x = k, label = 'Best Fit k')
'''
'''
def chi2V2(self, fJs, fJErrs, zCenters, k, Beta):
fitfJs = k*(1+zCenters)**Beta
Chi2Temp = 0
for fJ, fitfJ, fJErr in zip(fJs, fitfJs, fJErrs):
Chi2Temp += (fJ - fitfJ)**2/(fJ + fJErr)
return Chi2Temp
'''
def weakPrior(value, priorTuple):
if value < priorTuple[1]:
if value > priorTuple[0]:
return 1
else:
return (value - priorTuple[0])**4
else:
return (value - priorTuple[1])**4
def ratePrior(fitK, fitBeta, priorRate, zEffPrior, priorRateErrUp = None, priorRateErrDown = None, priorRateErrAll = None):
print "PRIOR"
print priorRate
print zEffPrior
print priorRateErrUp
print priorRateErrDown
print "Fit Beta/k"
print fitBeta
print fitK
fitRate = fitK*(1+zEffPrior)**fitBeta
print 'Fit Rate'
print fitRate
print "PriorChi2"
if fitRate > priorRate:
if not (priorRateErrUp is None):
print (fitRate - priorRate)**2/priorRateErrUp**2
return (fitRate - priorRate)**2/priorRateErrUp**2
else:
print (fitRate - priorRate)**2/priorRateErrAll**2
return (fitRate - priorRate)**2/priorRateErrAll**2
else:
if not (priorRateErrDown is None):
print (fitRate - priorRate)**2/priorRateErrDown**2
return (fitRate - priorRate)**2/priorRateErrDown**2
else:
print (fitRate - priorRate)**2/priorRateErrAll**2
return (fitRate - priorRate)**2/priorRateErrAll**2
def getCCScale(simCat, dataCat, MURESWindow = (-1, 1), zbins = [0.0, 0.3, 0.6, 0.9, 1.2], Beta = None, binList = None, fracCCData = None, outfilePrefix = 'Test', Rate_Model = 'powerlaw', f_Js = None, returnHist = False, debug = False, simInd = 100, ztype = 'zPHOT'):
#import iminuit as iM
#from iminuit import Minuit as M
if debug:
print "Check this"
print Rate_Model
print f_Js
print Beta
print fracCCData
print "Done Checking"
CCScales = []
CCScaleErrs = []
simIaHists = []
simCCHists = []
dataHists = []
if not(f_Js is None):
f_Js = np.array(f_Js)
allSimCC = simCat[simCat['SIM_TYPE_INDEX'].astype(int) != 1]
allSimIa = simCat[simCat['SIM_TYPE_INDEX'].astype(int) == 1]
allData = np.copy(dataCat)
#fnorm2 = float(dataCat.shape[0])/float(np.sum(simHist))
simCat = simCat[(simCat['MURES'] < MURESWindow[0]) | (simCat['MURES'] > MURESWindow[1]) ]
dataCat = dataCat[(dataCat['MURES'] < MURESWindow[0]) | (dataCat['MURES'] > MURESWindow[1]) ]
for zl, zh in zip(zbins[:-1], zbins[1:]):
tempSim = simCat[(simCat[ztype] < zh) & (simCat[ztype] > zl)]
tempData = dataCat[(dataCat[ztype] < zh) & (dataCat[ztype] > zl)]
allSimCCZbin = allSimCC[(allSimCC[ztype] < zh) & (allSimCC[ztype] > zl)]
allSimIaZbin = allSimIa[(allSimIa[ztype] < zh) & (allSimIa[ztype] > zl)]
if debug:
print "all Sim CC Zbin/IaZbin"
print allSimCCZbin.shape[0]
print allSimIaZbin.shape[0]
allDataZbin = allData[(allData[ztype] < zh) & (allData[ztype] > zl)]
tempSimCC = tempSim[tempSim['SIM_TYPE_INDEX'] != 1]
tempSimIa = tempSim[tempSim['SIM_TYPE_INDEX'] == 1]
R = float(tempData.shape[0])/float(allDataZbin.shape[0])
if debug:
print "R"
print R
print "Hist CC, outlier and total"
print tempSim.shape[0]
print allSimCCZbin.shape[0]
print "pre Beta Correction allSimIa"
print tempData.shape[0]
print allSimIaZbin.shape[0]
if Rate_Model == 'discrete':
hist, bins = np.histogram(allSimIaZbin[ztype], bins = 11)
if debug:
print 'fJ shape'
print f_Js.shape
print f_Js
print hist
print bins
betaCorrAllSimIaZbin =np.sum(hist*f_Js)
else:
betaCorrAllSimIaZbin =np.sum((1+ allSimIaZbin[ztype])**Beta)
#S = float(np.array(R*histSAllIa) - np.array(tempSimIa.shape[0]))/float(np.array(tempSimCC.shape[0]) - np.array(R*histSAllCC))
try:
if debug:
print "Test S"
print R
print betaCorrAllSimIaZbin
print tempSimIa.shape[0]
print tempSimCC.shape[0]
print allSimCCZbin.shape
print 'EEE'
print np.array(R*betaCorrAllSimIaZbin)
print 'DDD'
print np.array(tempSimIa.shape[0])
print 'CCC'
print (np.array(tempSimCC.shape[0]) - np.array(R*allSimCCZbin.shape[0]))
print "AAA"
print (np.array(R*betaCorrAllSimIaZbin) - np.array(tempSimIa.shape[0]))/(np.array(tempSimCC.shape[0]) - np.array(R*allSimCCZbin.shape[0]))
print "BBB"
#S = (np.array(R*betaCorrAllSimIaZbin) - np.array(tempSimIa.shape[0]))/(np.array(tempSimCC.shape[0]) - np.array(R*allSimCCZbin.shape[0]))
S = float(np.array(R*betaCorrAllSimIaZbin) - np.array(tempSimIa.shape[0]))/float(np.array(tempSimCC.shape[0]) - np.array(R*allSimCCZbin.shape[0]))
except:
S = np.nan
if debug:
print "S WTF"
print S
print "Uncertainty Related Bullshit"
'''
print "Delta R"
dR = np.sqrt(histD + histDAll)
print dR
num1 = np.sqrt(np.sqrt((dR/R)**2 + histSAllIa) + tempSimIa.shape[0])
num2 = np.sqrt(np.sqrt((dR/R)**2 + histSAllCC) + tempSimCC.shape[0])
den1 = (R*histSAllIa - tempSimIa.shape[0])
den2 = (tempSimCC.shape[0] - R*histSAllCC)
dS = np.sqrt((num1/den1)**2 + (num2/den2)**2)
'''
#ddnCC = np.sqrt(tempSimCC.shape[0])*(tempSimIa.shape[0] - histSAllIa*R)/(tempSimCC.shape[0] - R*histSAllCC)**2
#ddNCC = np.sqrt(histSAllCC)*R*(histSAllIa*R - tempSimIa.shape[0])/(tempSimCC.shape[0] - R*histSAllCC)**2
#ddnIa = np.sqrt(tempSimIa.shape[0])/(tempSimCC.shape[0] - R*histSAllCC)
#ddNIa = np.sqrt(histSAllIa)*R/(tempSimCC.shape[0] - R*histSAllCC)
ddnCC = np.sqrt(tempSimCC.shape[0])*(tempSimIa.shape[0] - allSimIaZbin.shape[0]*R)/(tempSimCC.shape[0] - R*allSimCCZbin.shape[0])**2
ddNCC = np.sqrt(allSimCCZbin.shape[0])*R*(allSimIaZbin.shape[0]*R - tempSimIa.shape[0])/(tempSimCC.shape[0] - R*allSimCCZbin.shape[0])**2
ddnIa = np.sqrt(tempSimIa.shape[0])/(tempSimCC.shape[0] - R*allSimCCZbin.shape[0])
ddNIa = np.sqrt(allSimIaZbin.shape[0])*R/(tempSimCC.shape[0] - R*allSimCCZbin.shape[0])
#ddR = (histSAllIa*tempSimCC.shape[0] - histSAllCC * tempSimIa.shape[0])/(tempSimCC.shape[0] - R*histSAllCC)**2
dS = np.sqrt(ddnCC**2 + ddNCC**2 + ddnIa**2 + ddNIa**2)# + ddR**2)
if debug:
print "ddnCC"
print ddnCC
print "ddNCC"
print ddNCC
print "ddnIa"
print ddnIa
print "ddNIa"
print ddNIa
#print "ddR"
#print ddR
print "Delta S"
print dS
#assert(S > 0)
if S < 0:
S = np.nan
if np.isnan(S):
print 'SCALE IS NAN'
if len(CCScales) > 0:
#CCScales.append(CCScales[-1])
CCScales.append(1.0)
else:
CCScales.append(1.0)
else:
CCScales.append(S)
if type(dS) == np.ndarray:
if np.isnan(dS[0]):
CCScaleErrs.append(1.0)
else:
CCScaleErrs.append(dS[0])
else:
if np.isnan(dS):
CCScaleErrs.append(1.0)
else:
CCScaleErrs.append(dS)
#if debug:
# print "CC PlotDebug"
# print (simBinsCC[1:] + simBinsCC[:-1])/2.0
# print simHistCC
# print CCScales[0]
# print dS
# print fnorm2
# print histD
# print (muresBins[1:] + muresBins[:-1])/2.0
#if simInd ==1:
# plt.step((simBinsCC[1:] + simBinsCC[:-1])/2.0, simHistCC*fnorm2, c = 'b', where = 'mid', label = 'prescaled Sim CC')
# plt.step((simBinsCC[1:] + simBinsCC[:-1])/2.0, CCScales[0]*simHistCC*fnorm2, c = 'g', where = 'post', label = 'postscaledSimCC')
# plt.step((muresBins[1:] + muresBins[:-1])/2.0, histD, c = 'r', where = 'mid', label = 'data')
# plt.legend()
# plt.savefig(outfilePrefix + 'ScaledHist.png')
# plt.clf()
if debug:
print "CCScaleErrs"
print CCScaleErrs
if returnHist:
return CCScales, CCScaleErrs, simIaHists, simCCHists, dataHists
return CCScales, CCScaleErrs
def applyCCScale(NCC, CCScales, CCScaleErrs, datazbins = None, zbins = None):
if not(zbins is None):
zbins = np.array(zbins)
if not (datazbins is None):
datazbins = np.array(datazbins)
if type(CCScaleErrs) == list:
CCScaleErrs = np.array(CCScaleErrs)
if type(CCScales) == list:
CCScales = np.array(CCScales)
print 'CCScaleErrs'
print CCScaleErrs
print datazbins
print zbins
if type(CCScales) == np.ndarray:
if CCScales.shape[0] == 1:
NCCScaled = CCScales[0]*NCC
else:
if (datazbins is None) | (zbins is None):
assert(0)
if CCScales.shape[0] < 4:
k = CCScales.shape[0] -1
else:
k = 3
nancond = np.isnan(CCScales)
if np.sum(nancond) > 0:
CCScales[nancond] = 1.
CCScaleErrs[nancond] = 1.
zCenters = (zbins[1:]+ zbins[:-1])/2.0
print zCenters
print CCScales
#spline = UnivariateSpline(zbins, CCScales, w = 1.0/CCScaleErrs, k = k)
spline = UnivariateSpline(zCenters, CCScales, w = 1.0/CCScaleErrs, k = k)
print datazbins.shape
print datazbins
print NCC.shape
datazcents = (datazbins[1:]+ datazbins[:-1])/2.0
NCCScaled = spline(datazcents)*NCC
elif (type(CCScales) == int) | (type(CCScales) == float):
NCCScaled = CCScales*NCC
else:
assert(0)
NCCScaled = NCCScaled.clip(0)
print NCCScaled
assert(not bool(np.sum(NCCScaled < 0)))
return NCCScaled
if __name__ == '__main__':
from sys import argv
print "argv"
print argv
datadir = argv[1]
simdir = argv[2]
dataname = argv[3]
print "dataname"
simname = argv[4]
print simname
simgenfile = argv[5]
print simgenfile
NNCut = False
cheatType = bool(int(argv[6]))
cheatZ = bool(int(argv[7]))
trueBeta = float(argv[8])
paramFile = argv[9]
cutFiles = [argv[10]]
try:
debug = bool(int(argv[11]))
except:
debug = False
#if( ('Combine' in simdir) or ('SALT2' in simdir)) & (('Combine' in datadir) or ('SALT2' in simdir)):
#NNCut = True
#NNProbCut = 0.95
#if len(argv) > 6:
# NNCut = True
# NNProbCut = 0.9
# NNData = argv[6]
# NNSim = argv[7]
#default params
zminFit = 0.1
zmaxFit = 1.2
zminSamp = 0.1
zmaxSamp = 1.2
MJDMin = 0.0
MJDMax = np.inf
bins = "equalSize"
runFit = True
fracContamCuts = [-1]
fixBeta = True
fixK = False
nbins = None
binList = None
ScaleMuResCutLow = -1
ScaleMuResCutHigh = 1
#muresBins = 1
muresBinsLow = 3
muresBinsHigh = 3
scaleZBins = [0.0, 1.2]
nScaleZBins = None
cheatCCSub = False
cheatCCScale = False
ZSysFlag = False
Blind = False
Rate_Model = 'powerlaw'
MURESCuts = 2.0 #[(0.0, 0.8, -0.5, 0.5), (0.8, 1.5, -1, 1)]
noCCMC = False
fixCCScale = False
trueMCBeta = 1.65
trueMCK = 1.97E-5
priorRate = None
priorZEff = None
ratePriorErrUp = None
ratePriorErrDown =None
ratePriorErrAll = None
priors = None
#override file
params = open(paramFile, 'r').readlines()
for p in params:
print p
exec(p)
if nScaleZBins is None :
redoScaleZBinFlag = False
else:
redoScaleZBinFlag = True
if not(priors is None):
if len(priors) == 3:
priorRate, priorZEff, ratePriorErrAll = priors
ratePriorErrUp = None
ratePriorErrDown = None
elif len(priors) == 4:
priorRate, priorZEff, ratePriorErrUp, ratePriorErrDown = priors
ratePriorErrAll =None
cosVal = 47392945716038.134971247
kmean = []
ksigma = []
kErr = []
BetaMean = []
#BetaWeightMean = []
#KWeightMean = []
BetaSigma= []
BetaErr = []
zBreakMeans = []
zBreakSigmas =[]
zBreakErrs = []
Chi2Mean = []
Chi2Sigma = []
f_JStorage = []
f_JErrStorage = []
SampleSizes = []
CCScaleStorageGlobal = []
CCScaleErrStorageGlobal = []
#MURES_Cuts = [2.0]
#MURES_Cuts = [1.0, 1.5, 2.0, 3.0, 4.0, 99.0, 2.0]
#for MURES_Cut in MURES_Cuts:
fcc = -1
for cf in cutFiles:
cuts = [] # cuts = [('FITPROB', 0.01, np.inf), ('NN_PROB_IA', NNProbCut, np.inf)]
cutlist = open(cf, 'r').readlines()
for l in cutlist:
spl = l.split()
cuts.append(('{0}'.format(spl[0]), float('{0}'.format(spl[1])), float('{0}'.format(spl[2]))))
ks = []
kErrs = []
Betas = []
BetaErrs = []
zBreaks =[]
zBreakErrs = []
Chi2s = []
CCScaleStorage = []
CCScaleErrStorage = []
nFail = 0
simLoaded = False
#print "FUCK MPI"
#if Rate_Model == 'discrete':
# subprocess.call(['python', 'constructChi2Func.py', str(nbins)], shell = False)
#print "MPI Fucked"
if '{' in datadir:
if os.path.exists(datadir.format(98)):
print "MOAR SIMS"
nfile = 101
else:
print "FEWAR SIMS"
nfile = 49
else:
nfile = 2
for simInd in range(1,nfile):
#print "Sim {0}".format(simInd)
#SimBeta = 2.1 # simdir.split('_')[-3]
#SimR0 = 1.7*10**-5 #simdir.split('_')[-5]
#print "Sim R0 = {1}; Sim Beta = {0}".format(SimBeta, SimR0)
print datadir.format(simInd)
if simLoaded:
try:
RateTest.newData(datadir.format(simInd), dataname.format(simInd), simInd =simInd)
if ZSysFlag:
assert(0)
RateTest.zSystematic(nbins = nbins, binList = binList)
if redoScaleZBinFlag:
RealCat = RateTest.postCutRealCat
RealOutlierCat = RealCat[(RealCat['MURES'] > muresBinsHigh)| (RealCat['MURES'] < muresBinsLow)]
zArray =RealOutlierCat[RateTest.ztype]
zArray.sort()
splitZs = np.array_split(zArray, nScaleZBins)
#[(0[0], (0[-1] + 1[0]), (1[-1] + 2[0]), 2[1]]
scaleZBins = [splitZs[0][0]]
for i in range(1,nScaleZBins):
scaleZBins.append((splitZs[i-1][-1] + splitZs[i][0] )/2.0)
scaleZBins.append(splitZs[i][-1])
#RateTest.effCalc(nbins = nbins, fracContamCut = fcc, simInd =simInd)
#RateTest.effCalc(nbins = 20)
BetaIter = []
BetaErrIter = []
CCIter = []
CCErrIter = []
RateTest.fit_rate(fixK = fixK, fixBeta = fixBeta, simInd =simInd, trueBeta = trueBeta - trueMCBeta, CCScale = 1.0, TrueCCScale = TrueCCScale, scaleZBins = scaleZBins, Blind = Blind)
if Rate_Model != 'discrete':
if Blind:
print "Blinding A"
BetaIter.append(RateTest.Beta+ np.cos(cosVal))
else:
BetaIter.append(RateTest.Beta)
BetaErrIter.append(RateTest.BetaErr)
for iteration in range(nIter):
if not fixCCScale:
if not noCCMC:
CCScale, CCScaleErr = getCCScale(RateTest.postCutSimCat, RateTest.postCutRealCat, MURESWindow = (ScaleMuResCutLow, ScaleMuResCutHigh), zbins = scaleZBins, Beta = RateTest.Beta, binList = RateTest.binListFit, fracCCData = RateTest.fracCCData, outfilePrefix = dataname,Rate_Model = Rate_Model, f_Js =RateTest.fJList, simInd = simInd, debug = debug, ztype = RateTest.ztype)
CCIter.append(CCScale)
CCErrIter.append(CCScaleErr)
RateTest.fit_rate(fixK = fixK, fixBeta = fixBeta, trueBeta = trueBeta - trueMCBeta, CCScale = CCScale, CCScaleErr = CCScaleErr, TrueCCScale = TrueCCScale, BetaInit = RateTest.Beta, kInit = RateTest.k, BetaErr = RateTest.BetaErr, kErr = RateTest.kErr, f_Js =RateTest.fJList, CCZbins = scaleZBins , scaleZBins = scaleZBins, Blind = Blind)
else:
CCIter.append(0.0)
CCErrIter.append(0.0)
RateTest.fit_rate(fixK = fixK, fixBeta = fixBeta, trueBeta = trueBeta - trueMCBeta, CCScale = 0.0, CCScaleErr = 1.0, TrueCCScale = 0.0, BetaInit = RateTest.Beta, kInit = RateTest.k, BetaErr = RateTest.BetaErr, kErr = RateTest.kErr, f_Js =RateTest.fJList, CCZbins = scaleZBins , scaleZBins = scaleZBins, Blind = Blind)
else:
CCIter.append(1.0)
CCErrIter.append(0.0)
RateTest.fit_rate(fixK = fixK, fixBeta = fixBeta, trueBeta = trueBeta - trueMCBeta, CCScale = 1.0, CCScaleErr = 1.0, TrueCCScale = 0.0, BetaInit = RateTest.Beta, kInit = RateTest.k, BetaErr = RateTest.BetaErr, kErr = RateTest.kErr, f_Js =RateTest.fJList, CCZbins = scaleZBins , scaleZBins = scaleZBins, Blind = Blind)
if Blind:
print "Blinding b"
BetaIter.append(RateTest.Beta+ np.cos(cosVal))
else:
BetaIter.append(RateTest.Beta)
BetaErrIter.append(RateTest.BetaErr)
if not fixCCScale:
if not noCCMC:
CCScale, CCScaleErr = getCCScale(RateTest.postCutSimCat, RateTest.postCutRealCat, MURESWindow = (ScaleMuResCutLow, ScaleMuResCutHigh), zbins = scaleZBins, Beta = RateTest.Beta, binList = RateTest.binListFit, fracCCData = RateTest.fracCCData, outfilePrefix = dataname,Rate_Model = Rate_Model, f_Js =RateTest.fJList, simInd = simInd, debug = debug, ztype = RateTest.ztype)
CCIter.append(CCScale)
CCErrIter.append(CCScaleErr)
else:
CCIter.append(1.0)
CCErrIter.append(0.0)
print "CCScale Progression"
print CCIter
print "CCScale Err Progression"
print CCErrIter
if Rate_Model != 'discrete':
print "Beta Progression"
print BetaIter
print "Beta Err Progressions"
print BetaErrIter
print "Mean Betas"
print np.nanmean(BetaIter)
print "Mean CCScales"
print np.nanmean(CCIter)
else:
f_JStorage.append(RateTest.fJList)
f_JErrStorage.append(RateTest.fJErrList)
#print "AAA CC Scales"
if not fixCCScale:
if not noCCMC:
CCScale, CCScaleErr = getCCScale(RateTest.postCutSimCat, RateTest.postCutRealCat, MURESWindow = (ScaleMuResCutLow, ScaleMuResCutHigh), zbins = scaleZBins, Beta = RateTest.Beta, binList = RateTest.binListFit, fracCCData = RateTest.fracCCData, outfilePrefix = dataname, Rate_Model = Rate_Model, f_Js =RateTest.fJList, simInd = simInd, debug = debug, ztype = RateTest.ztype)
print CCScale
CCScaleStorage.append(CCScale)
CCScaleErrStorage.append(CCScaleErr)
else:
CCScaleStorage.append(0.0)
CCScaleErrStorage.append(1.0)
else:
CCScaleStorage.append(1.0)
CCScaleErrStorage.append(1.0)
ks.append(RateTest.k)
kErrs.append(RateTest.kErr)
if Blind:
print "Blinding c"
Betas.append(RateTest.Beta+ np.cos(cosVal))
else:
Betas.append(RateTest.Beta)
BetaErrs.append(RateTest.BetaErr)
if Rate_Model == 'brokenpowerlawVar':
zBreaks.append(Rate_Fitter.zBreak)
zBreakErrs.append(Rate_Fitter.zBreakErr)
Chi2s.append(RateTest.chi2)
print "CCScale Storage Iter {0}".format(simInd)
print CCScaleStorage
if not noCCMC:
print CCScale
print CCScale[0]
dnamestr = datadir.format(simInd)
cutdnamestr = dnamestr.split('.')[0] + '+CUTS.FITRES.gz'
#if saveCuts:
# np.savetxt(cutdnamestr, RateTest.realcat.Catalog, delimiter = ' ', fmt='%s')
lowzCut = zminFit
highzCut = zmaxFit
SampleSizes.append( RateTest.realcat.Catalog[(RateTest.realcat.Catalog[RateTest.ztype] < zmaxFit) & (RateTest.realcat.Catalog[RateTest.ztype] > zminFit)].shape[0])
if saveCuts:
np.savetxt(cutdnamestr, RateTest.realcat.Catalog[(RateTest.realcat.Catalog[RateTest.ztype] < zmaxFit) & (RateTest.realcat.Catalog[RateTest.ztype] > zminFit)], delimiter = ' ', fmt='%s')
#with open(cutdnamestr, 'rb') as f_in:
# with gzip.open(cutdnamestr + '.gz', 'wb') as f_out:
# shutil.copyfileobj(f_in, f_out)
except Exception, e:
print "FAILURE"
print e
traceback.print_exc()
nFail +=1
else:
try:
RateTest = Rate_Fitter(datadir.format(simInd), dataname.format(simInd), simdir, simname,simgenfile, trueMCBeta, trueMCK, zminSamp =zminSamp, zmaxSamp =zmaxSamp, zminFit =zminFit, zmaxFit =zmaxFit, cheatZ = cheatZ, cheatType = cheatType, cuts = cuts, cheatCCSub = cheatCCSub, cheatCCScale = cheatCCScale, Rate_Model = Rate_Model, MURESCuts = MURESCuts, noCCMC = noCCMC, priorRate = priorRate, priorZEff = priorZEff, ratePriorErrUp = ratePriorErrUp, ratePriorErrDown =ratePriorErrDown, ratePriorErrAll = ratePriorErrAll)# , MJDMin = 0, MJDMax = np.inf)
if ZSysFlag:
RateTest.zSystematic(nbins = nbins, binList = binList)
simLoaded = True
RateTest.effCalc(nbinsSamp = nbinsSamp,nbinsFit = nbinsFit, fracContamCut = fcc)
#RateTest.effCalc(nbins = 20)
BetaIter = []
BetaErrIter = []
CCIter = []
CCErrIter = []
if redoScaleZBinFlag:
RealCat = RateTest.postCutRealCat
RealOutlierCat = RealCat[(RealCat['MURES'] > muresBinsHigh)| (RealCat['MURES'] < muresBinsLow)]
zArray =RealOutlierCat[RateTest.ztype]
zArray.sort()
print 'zArray'
print zArray
print 'nScaleZBins'
print nScaleZBins
splitZs = np.array_split(zArray, nScaleZBins)
#[(0[0], (0[-1] + 1[0]), (1[-1] + 2[0]), 2[1]]
scaleZBins = [splitZs[0][0]]
for i in range(1,nScaleZBins):
scaleZBins.append((splitZs[i-1][-1] + splitZs[i][0] )/2.0)
scaleZBins.append(splitZs[i][-1])
RateTest.fit_rate(fixK = fixK, fixBeta = fixBeta, simInd =simInd, trueBeta = trueBeta - trueMCBeta, CCScale = 1.0, TrueCCScale = TrueCCScale, scaleZBins = scaleZBins, Blind = Blind)
if Rate_Model != 'discrete':
if Blind:
print "Blinding d"
BetaIter.append(RateTest.Beta+ np.cos(cosVal))
else:
BetaIter.append(RateTest.Beta)
BetaErrIter.append(RateTest.BetaErr)
for iteration in range(nIter):
print "interation Number"
print iteration
if not fixCCScale:
if not noCCMC:
CCScale, CCScaleErr = getCCScale(RateTest.postCutSimCat, RateTest.postCutRealCat, MURESWindow = (ScaleMuResCutLow, ScaleMuResCutHigh), zbins = scaleZBins, Beta = RateTest.Beta, binList = RateTest.binListFit, fracCCData = RateTest.fracCCData, outfilePrefix = dataname, Rate_Model = Rate_Model, f_Js =RateTest.fJList, simInd = simInd, debug = debug, ztype = RateTest.ztype)
CCIter.append(CCScale)
CCErrIter.append(CCScaleErr)
RateTest.fit_rate(fixK = fixK, fixBeta = fixBeta, trueBeta = trueBeta - trueMCBeta, CCScale = CCScale, CCScaleErr = CCScaleErr, TrueCCScale = TrueCCScale, BetaInit = RateTest.Beta, kInit = RateTest.k, BetaErr = RateTest.BetaErr, kErr = RateTest.kErr, CCZbins = scaleZBins, scaleZBins = scaleZBins, Blind = Blind)
else:
CCIter.append(0.0)
CCErrIter.append(1.0)
RateTest.fit_rate(fixK = fixK, fixBeta = fixBeta, trueBeta = trueBeta - trueMCBeta, CCScale = 0.0, CCScaleErr = 1.0, TrueCCScale = 0.0, BetaInit = RateTest.Beta, kInit = RateTest.k, BetaErr = RateTest.BetaErr, kErr = RateTest.kErr, CCZbins = scaleZBins, scaleZBins = scaleZBins, Blind = Blind)
else:
CCIter.append(1.0)
CCErrIter.append(1.0)
RateTest.fit_rate(fixK = fixK, fixBeta = fixBeta, trueBeta = trueBeta - trueMCBeta, CCScale = 1.0, CCScaleErr = 1.0, TrueCCScale = 0.0, BetaInit = RateTest.Beta, kInit = RateTest.k, BetaErr = RateTest.BetaErr, kErr = RateTest.kErr, CCZbins = scaleZBins, scaleZBins = scaleZBins, Blind = Blind)
if Rate_Model != 'discrete':
if Blind:
print "Blinding e"
BetaIter.append(RateTest.Beta+ np.cos(cosVal))
else:
BetaIter.append(RateTest.Beta)
BetaErrIter.append(RateTest.BetaErr)
if not fixCCScale:
if not noCCMC:
CCScale, CCScaleErr = getCCScale(RateTest.postCutSimCat, RateTest.postCutRealCat, MURESWindow = (ScaleMuResCutLow, ScaleMuResCutHigh), zbins = scaleZBins, Beta = RateTest.Beta, binList = RateTest.binListFit, fracCCData = RateTest.fracCCData, outfilePrefix = dataname, Rate_Model = Rate_Model, f_Js =RateTest.fJList, simInd = simInd, debug = debug, ztype = RateTest.ztype)
CCIter.append(CCScale)
CCErrIter.append(CCScaleErr)
if Rate_Model != 'discrete':
print "Beta Progression"
print BetaIter
print "Beta Err Progressions"
print BetaErrIter
print "Mean Betas"
print np.nanmean(BetaIter)
else:
f_JStorage.append(RateTest.fJList)
f_JErrStorage.append(RateTest.fJErrList)
print "CCScale Progression"
print CCIter
print "CCScale Err Progression"
print CCErrIter
print "Mean CCScales"
print np.nanmean(CCIter)
if not fixCCScale:
if not noCCMC:
print "AAA CC Scales"
CCScale, CCScaleErr = getCCScale(RateTest.postCutSimCat, RateTest.postCutRealCat, MURESWindow = (ScaleMuResCutLow, ScaleMuResCutHigh), zbins = scaleZBins, Beta = RateTest.Beta, binList = RateTest.binListFit, fracCCData = RateTest.fracCCData, outfilePrefix = dataname, f_Js =RateTest.fJList, Rate_Model = Rate_Model, simInd = simInd, debug = debug, ztype = RateTest.ztype)
print 'CC Scale'
print CCScale
CCScaleStorage.append(CCScale)
CCScaleErrStorage.append(CCScaleErr)
else:
CCScaleStorage.append(0.0)
CCScaleErrStorage.append(1.0)
else:
CCScaleStorage.append(1.0)
CCScaleErrStorage.append(1.0)
dnamestr = datadir.format(simInd)
cutdnamestr = dnamestr.split('.')[0] + '+CUTS.FITRES.gz'
np.savetxt(cutdnamestr, RateTest.realcat.Catalog, delimiter = ' ', fmt='%s')
#with open(cutdnamestr, 'rb') as f_in:
# with gzip.open(cutdnamestr + '.gz', 'wb') as f_out:
# shutil.copyfileobj(f_in, f_out)
cutsnamestr = simname.split('.')[0] + '+CUTS.FITRES.gz'
np.savetxt(cutsnamestr, RateTest.realcat.Catalog[(RateTest.realcat.Catalog[RateTest.ztype] < zmaxFit) & (RateTest.realcat.Catalog[RateTest.ztype] > zminFit)], delimiter = ' ', fmt = '%s')
lowzCut = zminFit
highzCut = zmaxFit
SampleSizes.append( RateTest.realcat.Catalog[(RateTest.realcat.Catalog[RateTest.ztype] < zmaxFit) & (RateTest.realcat.Catalog[RateTest.ztype] > zminFit)].shape[0])
#with open(cutsnamestr, 'rb') as f_in:
# with gzip.open(cutsnamestr + '.gz', 'wb') as f_out:
# shutil.copyfileobj(f_in, f_out)
ks.append(RateTest.k)
kErrs.append(RateTest.kErr)
if Rate_Model != 'discrete':
if Blind:
print "Blinding f"
Betas.append(RateTest.Beta+ np.cos(cosVal))
else:
Betas.append(RateTest.Beta)
BetaErrs.append(RateTest.BetaErr)
if Rate_Model == 'brokenpowerlawVar':
zBreaks.append(Rate_Fitter.zBreak)
zBreakErrs.append(Rate_Fitter.zBreakErr)
Chi2s.append(RateTest.chi2)
print "CCScale Storage Iter {0}".format(simInd)
print CCScaleStorage
if not noCCMC:
print CCScale
print CCScale[0]
if Rate_Model != 'discrete':
if np.isnan(RateTest.Beta):
nFail +=1
except Exception, e:
print "FAILURE"
print e
traceback.print_exc()
nFail +=1
#if Blind:
# Betas = np.array(Betas) + np.cos(47392945716038.134971247)
print "Number of Failures"
print nFail
if Rate_Model != 'discrete':
badSims = np.invert(np.isfinite(Betas) & (BetaErrs > 0) & np.isfinite(ks) & (kErrs > 0))
mBetas = ma.masked_array(Betas, mask=badSims)
mBetaErrs = ma.masked_array(BetaErrs, mask=badSims)
mks = ma.masked_array(ks, mask=badSims)
mkErrs = ma.masked_array(kErrs, mask=badSims)
print "mean k"
print np.nanmean(ks)
print "mean kerrs"
print np.nanmean(kErrs)
print "std. k"
print np.nanstd(ks)
print "Mean beta"
print np.nanmean(Betas)
print "Mean betaerrs"
print np.nanmean(BetaErrs)
print "std. beta"
print np.nanstd(Betas)
if len(Betas) == 1:
kmean.append(ks[0])
ksigma.append(0.0)
kErr.append(kErrs[0])
BetaMean.append(Betas[0])
BetaSigma.append(0.0)
BetaErr.append(BetaErrs[0])
else:
print "test here"
print ks
print mks
print Betas
print mBetas
print 'end test here'
kmean.append(np.average(mks, weights = 1.0/mkErrs**2))
ksigma.append(np.std(mks))
kErr.append(np.mean(mkErrs))
BetaMean.append(np.average(mBetas, weights = 1.0/mBetaErrs**2))
#BetaWeightMean.append(np.average(Betas, weights = 1.0/ma.masked_invalid(BetaErrs)**2))
#KWeightMean.append(np.average(ks, weights = 1.0/ma.masked_invalid(kErrs)**2))
BetaSigma.append(np.std(mBetas))
BetaErr.append(np.mean(mBetaErrs))
else:
print "mean f_Js"
print np.nanmean(f_JStorage, axis =0)
print "mean f_JErrs"
print np.nanmean(f_JErrStorage, axis =0)
if Rate_Model == 'brokenpowerlawVar':
zBreakMeans.append(np.nanmean(zBreaks))
zBreakSigmas.append(np.nanstd(zBreaks))
Chi2Mean.append(np.nanmean(Chi2s))
Chi2Sigma.append(np.nanstd(Chi2s))
#if simInd == 1:
print "Indiv Chi2s"
print Chi2s
bins0 = np.linspace(1.0, 20.0, 10)
hist, bins = np.histogram(Chi2s, bins = bins0)
xs = (bins[1:] + bins[:-1])/2.0
plt.bar(xs, hist, width = bins[1:] - bins[:-1])
print "Chi2 Hist"
print bins
print hist
chi2s = scipy.stats.chi2.pdf(xs, nbinsFit - 2)
norm = np.max(hist)*1.0/np.max(chi2s)
plt.plot(xs, chi2s*norm, color = 'g')
if cheatType and not cheatZ:
plt.savefig(dataname +'Chi2Plot_CheatType.png')
elif cheatZ and not cheatType:
plt.savefig(dataname +'Chi2Plot_CheatZ.png')
elif cheatZ and cheatType:
plt.savefig(dataname +'Chi2Plot_CheatTypeZ.png')
else:
plt.savefig(dataname +'Chi2Plot.png')
if not noCCMC:
print "AAA CC Scale means (weighted, unweighted)"
#print np.average(ma.masked_invalid(np.array(CCScaleStorage)),weights = 1.0/ma.masked_invalid(CCScaleErrStorage)**2, axis = 0)
#print np.nanmean(ma.masked_invalid(np.array(CCScaleStorage)),
| 1 |
27976e9f7fbe030910b3595ea1a13e0e505183e5
|
Python
|
axis = 0)
#print CCScaleStorage
#print CCScaleErrStorage
print np.average(np.array(CCScaleStorage),weights = 1.0/ma.masked_invalid(CCScaleErrStorage)**2, axis = 0)
print np.nanmean(np.array(CCScaleStorage), axis = 0)
print "AAA CC Scale stds"
print np.nanstd(np.array(CCScaleStorage), axis = 0)
CCScaleStorageGlobal.append(CCScaleStorage)
print "All Betas"
print Betas
if cheatType:
print "THESE RESULTS ONLY INCLUDE TRUE Ias BECAUSE WE CHEATED AND USED THE SIM INFORMATION"
if cheatZ:
print "THESE RESULTS Use Simulated Redshift info"
'''
print "lengths of lists"
print len(RateTest.globalNDataStorage)
print len(RateTest.globalChi2Storage)
print len(RateTest.globalZPhotBinStorage)
print len(RateTest.globalNDataIaPhotBinStorage)
plt.clf()
plt.scatter(RateTest.globalNDataStorage, RateTest.globalChi2Storage)
plt.xlabel('nData')
plt.ylabel('chi2 in bin')
string = ''
if cheatType: string += 'CheatType'
if cheatZ: string += 'CheatZ'
print 'string here'
print string
plt.savefig(RateTest.realName + 'Chi2VsnData' + string +'.png')
plt.clf()
plt.scatter(RateTest.globalZPhotBinStorage, RateTest.globalChi2Storage)
plt.xlabel('zPhot bin center')
plt.ylabel('chi2 in bin')
plt.savefig(RateTest.realName + 'Chi2VsZPhot' + string +'.png')
plt.clf()
plt.clf()
plt.scatter(RateTest.globalZPhotBinStorage, RateTest.globalNDataIaPhotBinStorage, s = 1, c = 'r', label = 'Type Ia Data, zPhot')
plt.scatter(RateTest.globalZPhotBinStorage, RateTest.globalNDataCCPhotBinStorage, s = 1, c = 'b', label = 'CC Data, zPhot')
plt.scatter(RateTest.globalZTrueBinStorage, RateTest.globalNDataIaTrueBinStorage, s = 1, c = 'Pink', label = 'Type Ia Data, zTrue')
plt.scatter(RateTest.globalZTrueBinStorage, RateTest.globalNDataCCTrueBinStorage, s = 1, c = 'Cyan', label = 'CC Data, zTrue')
plt.yscale('log')
plt.xlabel('redshift either true or phot')
plt.legend()
plt.savefig(RateTest.realName + 'AggregateZDistro' + string +'.png')
'''
#print "MURES CUTS"
#print MURES_Cuts
print "Frac Contam Cuts"
print fracContamCuts
if Rate_Model != 'discrete':
print "Kmeans"
print kmean
print "Ksigmas"
print ksigma
print "BetaMeans"
print BetaMean
print "BetaSigmas"
print BetaSigma
print "BetaErrs"
print BetaErr
else:
print "f_J mean unweighted"
print np.mean(f_JStorage, axis = 0)
print "f_J mean weighted"
print np.average(f_JStorage, weights = 1.0/(np.array(f_JErrStorage))**2, axis = 0)
print "f_J Errors"
print np.mean(f_JErrStorage, axis = 0)
if Rate_Model == 'brokenpowerlawVar':
print "mean powerlaw break z"
print zBreakMeans
print "st. dev powerlaw break z"
print zBreakSigmas
print "Chi2Means"
print Chi2Mean
print "Chi2Sigma"
print Chi2Sigma
assert(fracContamCuts[0] == -1)
outfile = dataname
if Rate_Model != 'discrete':
print "outfile Pre Prefix"
print outfile
if cheatType:
outfile = outfile + '_CheatType'
if cheatZ:
outfile = outfile + 'Z'
elif cheatZ:
outfile = outfile + '_CheatZ'
outfile1 = outfile + '.txt'
outfile2 = outfile + '-IndivBetaK.txt'
output2 = open(outfile2, 'w')
output2.write('i Beta_i k_i BetaErr_i kErr_i\n')
for i, b, k, berr, kerr in zip(range(len(Betas)),Betas, ks, BetaErrs, kErrs):
output2.write('{0} {1:.4f} {2:.4f} {3:.4f} {4:.4f}\n'.format(i, b, k, berr, kerr))
output2.close()
print "Outfile Name"
if not(os.path.isfile(outfile1)):
output = open(outfile1, 'w')
output.write('#Date Date/time at which job finished\n')
output.write('#DataBeta Input beta for the simulated data sample. Will be 0.0 for real data.\n')
output.write('#N_sims Number of datalike sims that go into the subsequent means\n')
output.write('#SampleSize Mean Number of Events in data post cut\n')
output.write('#delta_Beta mean difference between large MC sim beta (2.11 for the time being) and the measured beta for the data (not the beta in column 2.\n')
output.write('#sigma_Beta stdev of delta_Beta over N_sims sims\n')
output.write('#BetaStdErr std. error in the mean of delta_Beta over N_sims sims\n')
output.write('#Beta_err mean statistical error on beta\n')
output.write('#K mean ratio between large MC sim K (1.7E-5 for the time being) and the measured K for the data \n')
output.write('#sigma_K stdev of K over N_sims sims\n')
output.write('#KStdErr std. error in the mean of K over N_sims sims\n')
output.write('#KStaterr mean statistical error on K\n')
output.write('#meanZ mean photoZ of the large MC sim\n')
output.write('#sigmaZ std. deviation of the photoZs for the large Sim\n')
output.write('#sigmaDZ std. deviation of (zSim - zPHOT)\n')
output.write('#NCC/NTotScaled overall CC Contamination after adjusting CC Frac to data\n')
output.write('#NCC/NTot overall CC Contamination in sim only\n')
output.write('#CCScales relative sim vs. CC rate in z-bins \n')
output.write('#TypeChoice Internal Diagnostic, check code comments\n')
output.write('#NNProbCut Threshold for NN probability of Ia\n')
output.write('#NBins Number of Analysis Bins\n')
output.write('#MRSLow Threshold for Neg Mures Outliers\n')
output.write('#MRSHigh Threshold for Pos Mures Outliers\n')
output.write('#FitprobCut Lowest Fitprob in sim\n')
output.write('#MRSCut NSigma Hubble residual cut\n')
output.write('#Chi2 minimum value of Chi2 function\n')
output.write('#Correlation cov[0,1]/np.sqrt(cov[0,0]*cov[1,1])\n')
output.write('#Date \t\tDataBeta N_sims SampleSize delta_Beta sigma_Beta BetaStdErr BetaStatErr K sigma_K KStdErr KStatErr meanZ sigmaZ sigmaDz NCC/NTotScaled NCC/NTot CCScales TypeChoice NNProbCut NBins MRSLow MRSHigh FitprobCut MRSCut Chi2 Correlation\n')
else:
output = open(outfile1, 'a')
print 'outfile'
print outfile
cat = RateTest.simcat.Catalog
t = time.strftime('%b-%d-%H:%M')
N_Sims = np.sum(np.invert(np.isnan(ks)))
SigBeta = float(BetaSigma[0])
SigK = float(ksigma[0])
kStdErr = float(ksigma[0])/np.sqrt(N_Sims)
BetaStdErr = float(BetaSigma[0])/np.sqrt(N_Sims)
meanZ = np.nanmean(cat[RateTest.ztype])
sigZ = np.nanstd(cat[RateTest.ztype])
sigDZ = np.nanstd(cat[RateTest.ztype] - cat['SIM_ZCMB'])
lowzCut = zminFit
highzCut = zmaxFit
contam2 = np.sum(cat[(cat[RateTest.ztype] > lowzCut) & (cat[RateTest.ztype] < highzCut)]['SIM_TYPE_INDEX'] !=1).astype(float)/ float(cat[(cat[RateTest.ztype] > lowzCut) & (cat[RateTest.ztype] < highzCut)].shape[0])
contam = RateTest.fracCCDataTot
ccscales = np.average(np.array(CCScaleStorage),weights = 1.0/ma.masked_invalid(CCScaleErrStorage)**2, axis = 0)
cov = RateTest.covar
correlation = cov[0, 1] / np.sqrt(cov[0, 0] * cov[1, 1])
print "Outfile debug"
print t
print trueBeta
print N_Sims
print BetaMean[0]
print BetaStdErr
print BetaErrs[0]
print meanZ
print sigZ
print sigDZ
print contam
print RateTest.typeString
print RateTest.postCutSimCat['NN_PROB_IA'].min()
print SigBeta
print kmean[0]
print kErrs[0]
print kStdErr
print SigK
print np.nanmean(SampleSizes)
print int(nbinsFit)
print ScaleMuResCutLow
print ScaleMuResCutHigh
print RateTest.postCutSimCat['FITPROB'].min()
print MURESCuts
print np.mean(Chi2Mean)
print contam2
print ccscales
print correlation
ccscales = ','.join(str(ccscales).split())
output.write('{0}\t\t{1:.2f}\t{2}\t{17:.3f}\t{3:.3f}\t{12:.3f}\t{4:.3f}\t{5:.3f}\t{13:.3f}\t{14:.3f}\t{15:.3f}\t{16:.3f}\t{6:.3f}\t{7:.3f}\t{8:.3f}\t{9:.3f}\t{24:.3f}\t{25}\t{10}\t{11:.3f}\t{18:d}\t{19:.3f}\t{20:.3f}\t{21:.3f}\t{22:.2f}\t{23:.3f}\t{26:.3f}\n'.format(t, trueBeta, N_Sims, BetaMean[0], BetaStdErr, BetaErrs[0],meanZ, sigZ, sigDZ, contam, RateTest.typeString, RateTest.postCutSimCat['NN_PROB_IA'].min(), SigBeta, kmean[0], kErrs[0], kStdErr, SigK, np.nanmean(SampleSizes), int(nbinsFit), ScaleMuResCutLow, ScaleMuResCutHigh, RateTest.postCutSimCat['FITPROB'].min(), MURESCuts, np.mean(Chi2Mean), contam2, ccscales, correlation) )
print "BetaMean[0]"
print BetaMean[0]
print BetaMean
print "KMean[0]"
print kmean[0]
print kmean
print "Correlation"
print correlation
#print "BetaWeightMean[0]"
#print BetaWeightMean[0]
#print BetaWeightMean
#print "KWeightMean[0]"
#print KWeightMean[0]
#print KWeightMean
if not noCCMC:
print "Individual Scales"
print CCScaleStorage
print "Individual ScaleErrs"
print CCScaleErrStorage
print "average ScaleErrs"
print np.nanmean(CCScaleErrStorage)
print "AAA CC Scale means (weighted, unweighted)2"
print np.average(ma.masked_invalid(np.array(CCScaleStorage)), weights = 1.0/ma.masked_invalid(CCScaleErrStorage)**2)
print np.nanmean(ma.masked_invalid(np.array(CCScaleStorage)))
print "AAA CC Scale stds"
print np.nanstd(np.array(CCScaleStorage))
if simInd == 1:
plt.clf()
hist, bins = np.histogram(CCScaleStorage, bins = np.linspace(0.0, 5.0, 10))
plt.step((bins[1:]+bins[:-1])/2.0, hist, where = 'mid', c = 'g')
plt.savefig(dataname + 'ScaleDistro.png')
plt.clf()
print "nIter"
print nIter
if not (priorRate is None):
kPriorPlots = np.linspace(0.8, 1.5, 300)
kPriors = []
for ktemp in kPriorPlots:
kPriors.append(ratePrior(ktemp*trueMCK, BetaMean[0]*trueMCBeta, priorRate, priorZEff, priorRateErrUp = ratePriorErrUp, priorRateErrDown = ratePriorErrDown, priorRateErrAll = ratePriorErrAll))
betaPriorPlots = np.linspace(-0.5, 0.5, 300)
betaPriors = []
for btemp in betaPriorPlots:
betaPriors.append(ratePrior(kmean[0]*trueMCK, b*trueMCBeta, priorRate, priorZEff, priorRateErrUp = ratePriorErrUp, priorRateErrDown = ratePriorErrDown, priorRateErrAll = ratePriorErrAll))
actualPrior = ratePrior(kmean[0]*trueMCK, BetaMean[0]*trueMCBeta, priorRate, priorZEff, priorRateErrUp = ratePriorErrUp, priorRateErrDown = ratePriorErrDown, priorRateErrAll = ratePriorErrAll)
kPriors = np.array(kPriors)
betaPriors = np.array(betaPriors)
plt.clf()
plt.figure()
plt.plot(kPriorPlots, np.log10(kPriors) )
plt.hlines(np.log10(actualPrior), kPriorPlots[0], kPriorPlots[-1], label = 'Best Fit Prior = {0:.03f}'.format(actualPrior))
plt.vlines(kmean[0], np.log10(kPriors).min(), np.log10(kPriors).max(), label = 'Best Fit K = {0:.03f}'.format(kmean[0]))
plt.xlabel('k')
plt.ylabel('ratePrior')
plt.legend()
plt.savefig(dataname + '_LogKPriorPlot.png')
plt.clf()
plt.figure()
plt.plot(kPriorPlots, kPriors)
plt.hlines(actualPrior, kPriorPlots[0], kPriorPlots[-1], label = 'Best Fit Prior = {0:.03f}'.format(actualPrior))
plt.vlines(kmean[0], kPriors.min(), kPriors.max(), label = 'Best Fit K = {0:.03f}'.format(kmean[0]))
plt.xlabel('k')
plt.ylabel('ratePrior')
plt.legend()
plt.savefig(dataname + '_KPriorPlot.png')
plt.clf()
plt.figure()
plt.plot(betaPriorPlots, betaPriors)
plt.hlines(actualPrior, betaPriorPlots[0], betaPriorPlots[-1], label = 'Best Fit Prior = {0:.03f}'.format(actualPrior))
plt.vlines(BetaMean[0], betaPriors.min(), betaPriors.max(), label = 'Best Fit Beta = {0:.03f}'.format(BetaMean[0]))
plt.xlabel('beta')
plt.ylabel('ratePrior')
plt.legend()
plt.savefig(dataname + '_BetaPriorPlot.png')
'''
argList = ''
minObjList = ''
chi2Initargs = ''
for i in xrange(zCenters.shape[0]):
argList += 'f{0},'.format(i)
minObjList += 'f{0} = 1.0, error_f{0} = 0.1, limit_f{0} = (0.0, None),'.format(i)
chi2Initargs += '1.0,'
argList = argList[:-1]
minObjList = minObjList[:-1]
chi2Initargs = chi2Initargs[:-1]
#print argList
#print minObjList
#print chi2Initargs
exec('''
'''
def chi2func(nData, nSim, effmat, fnorm, zCenters, {0}, dump = False, complexdump = False):
Chi2Temp = 0.0
f_Js = [{0}]
chi2Mat = np.zeros((self.nbins))
adjNMC = np.zeros((self.nbins))
#print f_Js
#Check if I am scaling errors down with increasing MC size. Make MC twice as large as "Data" to test.
for row, nDataI, i in zip(effmat, nData, xrange(self.nbins)):
#if dump:
# print "nDataI"
# print nDataI
JSumTemp = 0.0
for eff, nSimJ, f_J, j in zip(row, nSim, f_Js, xrange(self.nbins)):
JSumTemp += nSimJ*f_J*eff*fnorm
if dump and i == j:
print "nDataI"
print nDataI
print "Bin Contribution to scaled nSim"
print nSimJ*f_J*eff*fnorm
#print "Product of nSimJ, f_J, eff, fnorm"
#print nSimJ
#print f_J
#print eff
#print fnorm
if nDataI > 1E-11 or JSumTemp > 1E-11:
if dump and i == j:
print "nDataI"
print nDataI
print "scaled nSim"
print JSumTemp
print "fnorm"
print fnorm
print "error"
print nDataI + JSumTemp*fnorm
if (nDataI + JSumTemp*fnorm) <= 0:
print (nDataI + JSumTemp*fnorm)
assert(0)
Chi2Temp += ((nDataI - JSumTemp)**2/(nDataI + JSumTemp*fnorm))#*fnorm**2
return Chi2Temp
''''''.format(argList), locals())
fnorm = float(np.sum(nData))/float(self.simcat.Catalog['zPHOT'].shape[0])
#print type(chi2func)
#print 'lamChi2 = lambda {0}: chi2func(nData, nSim, self.effmat, fnorm, zCenters, {0})'.format(argList)
exec('lamChi2 = lambda {0}: chi2func(nData, nSim, self.effmat, fnorm, zCenters, {0})'.format(argList),locals())
exec('lamChi2Dump = lambda {0}: chi2func(nData, nSim, self.effmat, fnorm, zCenters, {0}, dump = True)'.format(argList),locals())
#print type(lamChi2)
#print type(lamChi2Dump)
#print 'MinObj = M(lamChi2, {0})'.format(minObjList)
exec('MinObj = M(lamChi2, {0})'.format(minObjList),locals())
exec('chi2Init = lamChi2Dump({0})'.format(chi2Initargs),locals())
#print "Chi2 init = {0}".format(round(chi2Init, 4))
MinObj.set_strategy(2)
MinObj.migrad()
#MinObj.minos()
zCenters = (simBins[1:] + simBins[:-1])/2.0
print MinObj.values
fJs = []
fJErrs = []
for v in MinObj.values.keys():
fJs.append(MinObj.values[v])
fJErrs.append(MinObj.errors[v])
exec('lamChi22 = lambda k, Beta: self.chi2V2(fJs, fJErrs, zCenters, k, Beta)',locals())
exec('MinObj2 = M(lamChi22, k = 1.0, error_k = 0.1, limit_k = (0.0, None), Beta = 0.0, error_Beta = 0.1)',locals())
#print "Large Perfect Sim {0}".format(simInd)
#print "Sim R0 = 1.7E-5; Sim Beta = 4.2"
##print "Sim Beta = 1.5; Data Beta = 1.5"
##RateTest = Rate_Fitter('DES_FULLSURVEY_TEST/JLDESFULLSURVEYIaOnly+zPHOT+smearC11/FITOPT000+SALT2mu.FITRES', 'JLDESFULLSURVEYIaOnly+zPHOT+smearC11','JLDES_R0_7E-5_Beta_1-5_Shallow/JLDES_R0_7E-5_Beta_1-5_Shallow/FITOPT000+SALT2mu.FITRES', 'JLDES_R0_7E-5_Beta_1-5_Shallow','/project/rkessler/SN/SNDATA_ROOT/SIM/JLDES_R0_7E-5_Beta_1-5_Shallow/JLDES_R0_7E-5_Beta_1-5_Shallow.DUMP')
#print '/project/rkessler/jlasker/Rate_Analysis/TestSameK2Beta/outFit_datasize/JLDES_R0_1-7E-5_Beta_4-2_Datasize_Perfect-00{0:02d}/FITOPT000.FITRES'.format(simInd)
#RateTest = Rate_Fitter('/project/rkessler/jlasker/Rate_Analysis/TestSameK2Beta/outFit_datasize/JLDES_R0_1-7E-5_Beta_4-2_Datasize_Perfect-00{0:02d}/FITOPT000.FITRES'.format(simInd), 'TestSameK2Beta/JLDES_R0_1-7E-5_Beta_4-2-00{0:02d}'.format(simInd),'/project/rkessler/jlasker/Rate_Analysis/outFit_datalike/JLDES_R0_1-7E-5_Beta_2-1_Datalike_PERFECT/FITOPT000.FITRES', 'JLDES_R0_1-7E-5_Beta_2-1_DataLikePhotZ','/scratch/midway2/rkessler/SNDATA_ROOT/SIM/JLDES_R0_1-7E-5_Beta_2-1_Datalike_PERFECT/JLDES_R0_1-7E-5_Beta_2-1_Datalike_PERFECT.DUMP', 2.1, zmin = 0.1, zmax = 1.3)# , MJDMin = 0, MJDMax = np.inf)
#RateTest.effCalc(nbins = 12)
##RateTest.effCalc(nbins = 20)
#RateTest.fit_rate()
#ksPerf.append(RateTest.k)
#kErrsPerf.append(RateTest.kErr)
#BetasPerf.append(RateTest.Beta)
#BetaErrsPerf.append(RateTest.BetaErr)
#print "Sim Beta = 1.5; Data Beta = 1.5"
#RateTest = Rate_Fitter('DES_FULLSURVEY_TEST/JLDESFULLSURVEYIaOnly+zPHOT+smearC11/FITOPT000+SALT2mu.FITRES', 'JLDESFULLSURVEYIaOnly+zPHOT+smearC11','JLDES_R0_7E-5_Beta_1-5_Shallow/JLDES_R0_7E-5_Beta_1-5_Shallow/FITOPT000+SALT2mu.FITRES', 'JLDES_R0_7E-5_Beta_1-5_Shallow','/project/rkessler/SN/SNDATA_ROOT/SIM/JLDES_R0_7E-5_Beta_1-5_Shallow/JLDES_R0_7E-5_Beta_1-5_Shallow.DUMP')
try:
optfname = argv[1]
opts = open(optfname, 'r')
optlist = opts.readlines()
zmin = None; zmax = None; MJDMin = None; MJDMax = None; bins = None; runFit = None
for opt in optlist:
try:
optName, optVal = opt.split()
except:
print "{0} not formatted correctly".format(opt)
continue
if (optName.lower() == 'zmin') & (not zmin): zmin = optVal
if (optName.lower() == 'zmax') & (not zmax): zmax = optVal
if (optName.lower() == 'mjdmin') & (not MJDMin): MJDMin = optVal
if (optName.lower() == 'mjdmax') & (not MJDMax): MJDMax = optVal
if (optName.lower() == 'bins') & (not bins): zmin = optVal
if (optName.lower() == 'runfit') & (not runFit == None): zmin = optVal
if zmin == None: zmin = 0.1
if zmax == None: zmax = 1.2
if MJDMin == None: MJDMin = 0.0
if MJDMax == None: MJDMax = np.inf
if bins == None: bins = "equalSize"
if runFit == None: runFit = True
except:
print "Option File not working/Nonexistent. Using default values"
'''
| 2 |
c5e7fdcbd4a9281597a35a180f2853caac68f811
|
Python
|
# Copyright Contributors to the Pyro project.
# SPDX-License-Identifier: Apache-2.0
from collections import namedtuple
from functools import partial
import inspect
from itertools import product
import math
import os
import numpy as np
from numpy.testing import assert_allclose, assert_array_equal
import pytest
import scipy
from scipy.sparse import csr_matrix
import scipy.stats as osp
import jax
from jax import grad, lax, vmap
import jax.numpy as jnp
import jax.random as random
from jax.scipy.special import expit, logsumexp
from jax.scipy.stats import norm as jax_norm, truncnorm as jax_truncnorm
import numpyro.distributions as dist
from numpyro.distributions import (
SineBivariateVonMises,
constraints,
kl_divergence,
transforms,
)
from numpyro.distributions.batch_util import vmap_over
from numpyro.distributions.discrete import _to_probs_bernoulli, _to_probs_multinom
from numpyro.distributions.flows import InverseAutoregressiveTransform
from numpyro.distributions.gof import InvalidTest, auto_goodness_of_fit
from numpyro.distributions.transforms import (
LowerCholeskyAffine,
PermuteTransform,
PowerTransform,
SimplexToOrderedTransform,
SoftplusTransform,
biject_to,
)
from numpyro.distributions.util import (
matrix_to_tril_vec,
multinomial,
signed_stick_breaking_tril,
sum_rightmost,
vec_to_tril_matrix,
)
from numpyro.nn import AutoregressiveNN
TEST_FAILURE_RATE = 2e-5 # For all goodness-of-fit tests.
def my_kron(A, B):
D = A[..., :, None, :, None] * B[..., None, :, None, :]
ds = D.shape
newshape = (*ds[:-4], ds[-4] * ds[-3], ds[-2] * ds[-1])
return D.reshape(newshape)
def _identity(x):
return x
def _circ_mean(angles):
return jnp.arctan2(
jnp.mean(jnp.sin(angles), axis=0), jnp.mean(jnp.cos(angles), axis=0)
)
def sde_fn1(x, _):
lam = 0.1
sigma2 = 0.1
return lam * x, sigma2
def sde_fn2(xy, _):
tau, a = 2.0, 1.1
x, y = xy[0], xy[1]
dx = tau * (x - x**3.0 / 3.0 + y)
dy = (1.0 / tau) * (a - x)
dxy = jnp.vstack([dx, dy]).reshape(xy.shape)
sigma2 = 0.1
return dxy, sigma2
class T(namedtuple("TestCase", ["jax_dist", "sp_dist", "params"])):
def __new__(cls, jax_dist, *params):
sp_dist = get_sp_dist(jax_dist)
return super(cls, T).__new__(cls, jax_dist, sp_dist, params)
def _mvn_to_scipy(loc, cov, prec, tril):
jax_dist = dist.MultivariateNormal(loc, cov, prec, tril)
mean = jax_dist.mean
cov = jax_dist.covariance_matrix
return osp.multivariate_normal(mean=mean, cov=cov)
def _multivariate_t_to_scipy(df, loc, tril):
if scipy.__version__ < "1.6.0":
pytest.skip(
"Multivariate Student-T distribution is not available in scipy < 1.6"
)
jax_dist = dist.MultivariateStudentT(df, loc, tril)
mean = jax_dist.mean
cov = jax_dist.covariance_matrix
return osp.multivariate_t(loc=mean, shape=cov, df=df)
def _lowrank_mvn_to_scipy(loc, cov_fac, cov_diag):
jax_dist = dist.LowRankMultivariateNormal(loc, cov_fac, cov_diag)
mean = jax_dist.mean
cov = jax_dist.covariance_matrix
return osp.multivariate_normal(mean=mean, cov=cov)
def _truncnorm_to_scipy(loc, scale, low, high):
if low is None:
a = -np.inf
else:
a = (low - loc) / scale
if high is None:
b = np.inf
else:
b = (high - loc) / scale
return osp.truncnorm(a, b, loc=loc, scale=scale)
def _TruncatedNormal(loc, scale, low, high):
return dist.TruncatedNormal(loc=loc, scale=scale, low=low, high=high)
def _TruncatedCauchy(loc, scale, low, high):
return dist.TruncatedCauchy(loc=loc, scale=scale, low=low, high=high)
_TruncatedNormal.arg_constraints = {}
_TruncatedNormal.reparametrized_params = []
_TruncatedNormal.infer_shapes = lambda *args: (lax.broadcast_shapes(*args), ())
class SineSkewedUniform(dist.SineSkewed):
def __init__(self, skewness, **kwargs):
lower, upper = (np.array([-math.pi, -math.pi]), np.array([math.pi, math.pi]))
base_dist = dist.Uniform(lower, upper, **kwargs).to_event(lower.ndim)
super().__init__(base_dist, skewness, **kwargs)
@vmap_over.register
def _vmap_over_sine_skewed_uniform(self: SineSkewedUniform, skewness=None):
return vmap_over.dispatch(dist.SineSkewed)(self, base_dist=None, skewness=skewness)
class SineSkewedVonMises(dist.SineSkewed):
def __init__(self, skewness, **kwargs):
von_loc, von_conc = (np.array([0.0]), np.array([1.0]))
base_dist = dist.VonMises(von_loc, von_conc, **kwargs).to_event(von_loc.ndim)
super().__init__(base_dist, skewness, **kwargs)
@vmap_over.register
def _vmap_over_sine_skewed_von_mises(self: SineSkewedVonMises, skewness=None):
return vmap_over.dispatch(dist.SineSkewed)(self, base_dist=None, skewness=skewness)
class SineSkewedVonMisesBatched(dist.SineSkewed):
def __init__(self, skewness, **kwargs):
von_loc, von_conc = (np.array([0.0, -1.234]), np.array([1.0, 10.0]))
base_dist = dist.VonMises(von_loc, von_conc, **kwargs).to_event(von_loc.ndim)
super().__init__(base_dist, skewness, **kwargs)
@vmap_over.register
def _vmap_over_sine_skewed_von_mises_batched(
self: SineSkewedVonMisesBatched, skewness=None
):
return vmap_over.dispatch(dist.SineSkewed)(self, base_dist=None, skewness=skewness)
class _GaussianMixture(dist.MixtureSameFamily):
arg_constraints = {}
reparametrized_params = []
def __init__(self, mixing_probs, loc, scale):
component_dist = dist.Normal(loc=loc, scale=scale)
mixing_distribution = dist.Categorical(probs=mixing_probs)
super().__init__(
mixing_distribution=mixing_distribution,
component_distribution=component_dist,
)
@property
def loc(self):
return self.component_distribution.loc
@property
def scale(self):
return self.component_distribution.scale
@vmap_over.register
def _vmap_over_gaussian_mixture(self: _GaussianMixture, loc=None, scale=None):
component_distribution = vmap_over(
self.component_distribution, loc=loc, scale=scale
)
return vmap_over.dispatch(dist.MixtureSameFamily)(
self, _component_distribution=component_distribution
)
class _Gaussian2DMixture(dist.MixtureSameFamily):
arg_constraints = {}
reparametrized_params = []
def __init__(self, mixing_probs, loc, covariance_matrix):
component_dist = dist.MultivariateNormal(
loc=loc, covariance_matrix=covariance_matrix
)
mixing_distribution = dist.Categorical(probs=mixing_probs)
super().__init__(
mixing_distribution=mixing_distribution,
component_distribution=component_dist,
)
@property
def loc(self):
return self.component_distribution.loc
@property
def covariance_matrix(self):
return self.component_distribution.covariance_matrix
@vmap_over.register
def _vmap_over_gaussian_2d_mixture(self: _Gaussian2DMixture, loc=None):
component_distribution = vmap_over(self.component_distribution, loc=loc)
return vmap_over.dispatch(dist.MixtureSameFamily)(
self, _component_distribution=component_distribution
)
class _GeneralMixture(dist.MixtureGeneral):
arg_constraints = {}
reparametrized_params = []
def __init__(self, mixing_probs, locs, scales):
component_dists = [
dist.Normal(loc=loc_, scale=scale_) for loc_, scale_ in zip(locs, scales)
]
mixing_distribution = dist.Categorical(probs=mixing_probs)
return super().__init__(
mixing_distribution=mixing_distribution,
component_distributions=component_dists,
)
@property
def locs(self):
# hotfix for vmapping tests, which cannot easily check non-array attributes
return self.component_distributions[0].loc
@property
def scales(self):
return self.component_distributions[0].scale
@vmap_over.register
def _vmap_over_general_mixture(self: _GeneralMixture, locs=None, scales=None):
component_distributions = [
vmap_over(d, loc=locs, scale=scales) for d in self.component_distributions
]
return vmap_over.dispatch(dist.MixtureGeneral)(
self, _component_distributions=component_distributions
)
class _General2DMixture(dist.MixtureGeneral):
arg_constraints = {}
reparametrized_params = []
def __init__(self, mixing_probs, locs, covariance_matrices):
component_dists = [
dist.MultivariateNormal(loc=loc_, covariance_matrix=covariance_matrix)
for loc_, covariance_matrix in zip(locs, covariance_matrices)
]
mixing_distribution = dist.Categorical(probs=mixing_probs)
return super().__init__(
mixing_distribution=mixing_distribution,
component_distributions=component_dists,
)
@property
def locs(self):
# hotfix for vmapping tests, which cannot easily check non-array attributes
return self.component_distributions[0].loc
@property
def covariance_matrices(self):
return self.component_distributions[0].covariance_matrix
@vmap_over.register
def _vmap_over_general_2d_mixture(self: _General2DMixture, locs=None):
component_distributions = [
vmap_over(d, loc=locs) for d in self.component_distributions
]
return vmap_over.dispatch(dist.MixtureGeneral)(
self, _component_distributions=component_distributions
)
class _ImproperWrapper(dist.ImproperUniform):
def sample(self, key, sample_shape=()):
transform = biject_to(self.support)
prototype_value = jnp.zeros(self.event_shape)
unconstrained_event_shape = jnp.shape(transform.inv(prototype_value))
shape = sample_shape + self.batch_shape + unconstrained_event_shape
unconstrained_samples = random.uniform(key, shape, minval=-2, maxval=2)
return transform(unconstrained_samples)
class ZeroInflatedPoissonLogits(dist.discrete.ZeroInflatedLogits):
arg_constraints = {"rate": constraints.positive, "gate_logits": constraints.real}
pytree_data_fields = ("rate",)
def __init__(self, rate, gate_logits, *, validate_args=None):
self.rate = rate
super().__init__(dist.Poisson(rate), gate_logits, validate_args=validate_args)
@vmap_over.register
def _vmap_over_zero_inflated_poisson_logits(
self: ZeroInflatedPoissonLogits, rate=None, gate_logits=None
):
dist_axes = vmap_over.dispatch(dist.discrete.ZeroInflatedLogits)(
self,
base_dist=vmap_over(self.base_dist, rate=rate),
gate_logits=gate_logits,
gate=gate_logits,
)
dist_axes.rate = rate
return dist_axes
class SparsePoisson(dist.Poisson):
def __init__(self, rate, *, validate_args=None):
super().__init__(rate, is_sparse=True, validate_args=validate_args)
class FoldedNormal(dist.FoldedDistribution):
arg_constraints = {"loc": constraints.real, "scale": constraints.positive}
def __init__(self, loc, scale, validate_args=None):
self.loc = loc
self.scale = scale
super().__init__(dist.Normal(loc, scale), validate_args=validate_args)
@vmap_over.register
def _vmap_over_folded_normal(self: "FoldedNormal", loc=None, scale=None):
d = vmap_over.dispatch(dist.FoldedDistribution)(
self, base_dist=vmap_over(self.base_dist, loc=loc, scale=scale)
)
d.loc = loc
d.scale = scale
return d
class _SparseCAR(dist.CAR):
reparametrized_params = ["loc", "correlation", "conditional_precision"]
def __init__(
self,
loc,
correlation,
conditional_precision,
adj_matrix,
*,
is_sparse=True,
validate_args=None,
):
super().__init__(
loc,
correlation,
conditional_precision,
adj_matrix,
is_sparse=True,
validate_args=validate_args,
)
_DIST_MAP = {
dist.AsymmetricLaplace: lambda loc, scale, asymmetry: osp.laplace_asymmetric(
asymmetry, loc=loc, scale=scale
),
dist.BernoulliProbs: lambda probs: osp.bernoulli(p=probs),
dist.BernoulliLogits: lambda logits: osp.bernoulli(p=_to_probs_bernoulli(logits)),
dist.Beta: lambda con1, con0: osp.beta(con1, con0),
dist.BetaProportion: lambda mu, kappa: osp.beta(mu * kappa, (1 - mu) * kappa),
dist.BinomialProbs: lambda probs, total_count: osp.binom(n=total_count, p=probs),
dist.BinomialLogits: lambda logits, total_count: osp.binom(
n=total_count, p=_to_probs_bernoulli(logits)
),
dist.Cauchy: lambda loc, scale: osp.cauchy(loc=loc, scale=scale),
dist.Chi2: lambda df: osp.chi2(df),
dist.Dirichlet: lambda conc: osp.dirichlet(conc),
dist.Exponential: lambda rate: osp.expon(scale=jnp.reciprocal(rate)),
dist.Gamma: lambda conc, rate: osp.gamma(conc, scale=1.0 / rate),
dist.GeometricProbs: lambda probs: osp.geom(p=probs, loc=-1),
dist.GeometricLogits: lambda logits: osp.geom(
p=_to_probs_bernoulli(logits), loc=-1
),
dist.Gumbel: lambda loc, scale: osp.gumbel_r(loc=loc, scale=scale),
dist.HalfCauchy: lambda scale: osp.halfcauchy(scale=scale),
dist.HalfNormal: lambda scale: osp.halfnorm(scale=scale),
dist.InverseGamma: lambda conc, rate: osp.invgamma(conc, scale=rate),
dist.Laplace: lambda loc, scale: osp.laplace(loc=loc, scale=scale),
dist.LogNormal: lambda loc, scale: osp.lognorm(s=scale, scale=jnp.exp(loc)),
dist.LogUniform: lambda a, b: osp.loguniform(a, b),
dist.MultinomialProbs: lambda probs, total_count: osp.multinomial(
n=total_count, p=probs
),
dist.MultinomialLogits: lambda logits, total_count: osp.multinomial(
n=total_count, p=_to_probs_multinom(logits)
),
dist.MultivariateNormal: _mvn_to_scipy,
dist.MultivariateStudentT: _multivariate_t_to_scipy,
dist.LowRankMultivariateNormal: _lowrank_mvn_to_scipy,
dist.Normal: lambda loc, scale: osp.norm(loc=loc, scale=scale),
dist.Pareto: lambda scale, alpha: osp.pareto(alpha, scale=scale),
dist.Poisson: lambda rate: osp.poisson(rate),
dist.StudentT: lambda df, loc, scale: osp.t(df=df, loc=loc, scale=scale),
dist.Uniform: lambda a, b: osp.uniform(a, b - a),
dist.Logistic: lambda loc, scale: osp.logistic(loc=loc, scale=scale),
dist.VonMises: lambda loc, conc: osp.vonmises(
loc=np.array(loc, dtype=np.float64), kappa=np.array(conc, dtype=np.float64)
),
dist.Weibull: lambda scale, conc: osp.weibull_min(
c=conc,
scale=scale,
),
_TruncatedNormal: _truncnorm_to_scipy,
}
def get_sp_dist(jax_dist):
classes = jax_dist.mro() if isinstance(jax_dist, type) else [jax_dist]
for cls in classes:
if cls in _DIST_MAP:
return _DIST_MAP[cls]
CONTINUOUS = [
T(dist.AsymmetricLaplace, 1.0, 0.5, 1.0),
T(dist.AsymmetricLaplace, np.array([1.0, 2.0]), 2.0, 2.0),
T(dist.AsymmetricLaplace, np.array([[1.0], [2.0]]), 2.0, np.array([3.0, 5.0])),
T(dist.AsymmetricLaplaceQuantile, 0.0, 1.0, 0.5),
T(dist.AsymmetricLaplaceQuantile, np.array([1.0, 2.0]), 2.0, 0.7),
T(
dist.AsymmetricLaplaceQuantile,
np.array([[1.0], [2.0]]),
2.0,
np.array([0.2, 0.8]),
),
T(dist.Beta, 0.2, 1.1),
T(dist.Beta, 1.0, np.array([2.0, 2.0])),
T(dist.Beta, 1.0, np.array([[1.0, 1.0], [2.0, 2.0]])),
T(dist.BetaProportion, 0.2, 10.0),
T(dist.BetaProportion, 0.51, np.array([2.0, 1.0])),
T(dist.BetaProportion, 0.5, np.array([[4.0, 4.0], [2.0, 2.0]])),
T(dist.Chi2, 2.0),
T(dist.Chi2, np.array([0.3, 1.3])),
T(dist.Cauchy, 0.0, 1.0),
T(dist.Cauchy, 0.0, np.array([1.0, 2.0])),
T(dist.Cauchy, np.array([0.0, 1.0]), np.array([[1.0], [2.0]])),
T(dist.Dirichlet, np.array([1.7])),
T(dist.Dirichlet, np.array([0.2, 1.1])),
T(dist.Dirichlet, np.array([[0.2, 1.1], [2.0, 2.0]])),
T(
dist.EulerMaruyama,
np.array([0.0, 0.1, 0.2]),
sde_fn1,
dist.Normal(0.1, 1.0),
),
T(
dist.EulerMaruyama,
np.array([0.0, 0.1, 0.2]),
sde_fn2,
dist.Normal(jnp.array([0.0, 1.0]), 1e-3).to_event(1),
),
T(
dist.EulerMaruyama,
np.array([[0.0, 0.1, 0.2], [10.0, 10.1, 10.2]]),
sde_fn2,
dist.Normal(jnp.array([0.0, 1.0]), 1e-3).to_event(1),
),
T(
dist.EulerMaruyama,
np.array([[0.0, 0.1, 0.2], [10.0, 10.1, 10.2]]),
sde_fn2,
dist.Normal(jnp.array([[0.0, 1.0], [2.0, 3.0]]), 1e-2).to_event(1),
),
T(dist.Exponential, 2.0),
T(dist.Exponential, np.array([4.0, 2.0])),
T(dist.Gamma, np.array([1.7]), np.array([[2.0], [3.0]])),
T(dist.Gamma, np.array([0.5, 1.3]), np.array([[1.0], [3.0]])),
T(dist.GaussianRandomWalk, 0.1, 10),
T(dist.GaussianRandomWalk, np.array([0.1, 0.3, 0.25]), 10),
T(
dist.GaussianCopulaBeta,
np.array([7.0, 2.0]),
np.array([4.0, 10.0]),
np.array([[1.0, 0.75], [0.75, 1.0]]),
),
T(dist.GaussianCopulaBeta, 2.0, 1.5, np.eye(3)),
T(dist.GaussianCopulaBeta, 2.0, 1.5, np.full((5, 3, 3), np.eye(3))),
T(dist.Gompertz, np.array([1.7]), np.array([[2.0], [3.0]])),
T(dist.Gompertz, np.array([0.5, 1.3]), np.array([[1.0], [3.0]])),
T(dist.Gumbel, 0.0, 1.0),
T(dist.Gumbel, 0.5, 2.0),
T(dist.Gumbel, np.array([0.0, 0.5]), np.array([1.0, 2.0])),
T(FoldedNormal, 2.0, 4.0),
T(FoldedNormal, np.array([2.0, 50.0]), np.array([4.0, 100.0])),
T(dist.HalfCauchy, 1.0),
T(dist.HalfCauchy, np.array([1.0, 2.0])),
T(dist.HalfNormal, 1.0),
T(dist.HalfNormal, np.array([1.0, 2.0])),
T(_ImproperWrapper, constraints.positive, (), (3,)),
T(dist.InverseGamma, np.array([1.7]), np.array([[2.0], [3.0]])),
T(dist.InverseGamma, np.array([0.5, 1.3]), np.array([[1.0], [3.0]])),
T(dist.Kumaraswamy, 10.0, np.array([2.0, 3.0])),
T(dist.Kumaraswamy, np.array([1.7]), np.array([[2.0], [3.0]])),
T(dist.Kumaraswamy, 0.6, 0.5),
T(dist.Laplace, 0.0, 1.0),
T(dist.Laplace, 0.5, np.array([1.0, 2.5])),
T(dist.Laplace, np.array([1.0, -0.5]), np.array([2.3, 3.0])),
T(dist.LKJ, 2, 0.5, "onion"),
T(dist.LKJ, 5, np.array([0.5, 1.0, 2.0]), "cvine"),
T(dist.LKJCholesky, 2, 0.5, "onion"),
T(dist.LKJCholesky, 2, 0.5, "cvine"),
T(dist.LKJCholesky, 5, np.array([0.5, 1.0, 2.0]), "onion"),
pytest.param(
*T(dist.LKJCholesky, 5, np.array([0.5, 1.0, 2.0]), "cvine"),
marks=pytest.mark.skipif("CI" in os.environ, reason="reduce time for CI"),
),
pytest.param(
*T(dist.LKJCholesky, 3, np.array([[3.0, 0.6], [0.2, 5.0]]), "onion"),
marks=pytest.mark.skipif("CI" in os.environ, reason="reduce time for CI"),
),
T(dist.LKJCholesky, 3, np.array([[3.0, 0.6], [0.2, 5.0]]), "cvine"),
T(dist.Logistic, 0.0, 1.0),
T(dist.Logistic, 1.0, np.array([1.0, 2.0])),
T(dist.Logistic, np.array([0.0, 1.0]), np.array([[1.0], [2.0]])),
T(dist.LogNormal, 1.0, 0.2),
T(dist.LogNormal, -1.0, np.array([0.5, 1.3])),
T(dist.LogNormal, np.array([0.5, -0.7]), np.array([[0.1, 0.4], [0.5, 0.1]])),
T(dist.LogUniform, 1.0, 2.0),
T(dist.LogUniform, 1.0, np.array([2.0, 3.0])),
T(dist.LogUniform, np.array([1.0, 2.0]), np.array([[3.0], [4.0]])),
T(
dist.MatrixNormal,
1.0 * np.arange(6).reshape(3, 2),
np.array([[1.0, 0, 0], [0.3, 0.36, 0], [0.4, 0.49, 4]]),
np.array([[1.0, 0], [0.4, 1]]),
),
T(
dist.MatrixNormal,
1.0 * np.arange(12).reshape((2, 3, 2)),
np.array([[1.0, 0, 0], [0.3, 0.36, 0], [0.4, 0.49, 4]]) * np.ones((2, 3, 3)),
np.array([[1.0, 0], [0.4, 0.5]]) * np.ones((2, 2, 2)),
),
T(
dist.MatrixNormal,
1.0 * np.arange(36).reshape((2, 3, 3, 2)),
np.identity(3),
np.identity(2),
),
T(dist.MultivariateNormal, 0.0, np.array([[1.0, 0.5], [0.5, 1.0]]), None, None),
T(
dist.MultivariateNormal,
np.array([1.0, 3.0]),
None,
np.array([[1.0, 0.5], [0.5, 1.0]]),
None,
),
T(
dist.MultivariateNormal,
np.array([1.0, 3.0]),
None,
np.array([[[1.0, 0.5], [0.5, 1.0]]]),
None,
),
T(
dist.MultivariateNormal,
np.array([2.0]),
None,
None,
np.array([[1.0, 0.0], [0.5, 1.0]]),
),
T(
dist.MultivariateNormal,
np.arange(6, dtype=np.float32).reshape((3, 2)),
None,
None,
np.array([[1.0, 0.0], [0.0, 1.0]]),
),
T(
dist.MultivariateNormal,
0.0,
None,
np.broadcast_to(np.identity(3), (2, 3, 3)),
None,
),
T(
dist.CAR,
1.2,
np.array([-0.2, 0.3]),
0.1,
np.array(
[
[0.0, 1.0, 1.0, 0.0],
[1.0, 0.0, 0.0, 1.0],
[1.0, 0.0, 0.0, 1.0],
[0.0, 1.0, 1.0, 0.0],
]
),
),
T(
dist.CAR,
np.array([0.0, 1.0, 3.0, 4.0]),
0.1,
np.array([0.3, 0.7]),
np.array(
[
[0.0, 1.0, 1.0, 0.0],
[1.0, 0.0, 0.0, 1.0],
[1.0, 0.0, 0.0, 1.0],
[0.0, 1.0, 1.0, 0.0],
]
),
),
T(
_SparseCAR,
np.array([[0.0, 1.0, 3.0, 4.0], [2.0, -1.0, -3.0, 2.0]]),
0.0,
0.1,
np.array(
[
[0.0, 1.0, 1.0, 0.0],
[1.0, 0.0, 0.0, 1.0],
[1.0, 0.0, 0.0, 1.0],
[0.0, 1.0, 1.0, 0.0],
]
),
),
T(
dist.MultivariateStudentT,
15.0,
0.0,
np.array([[1.0, 0.0], [0.5, 1.0]]),
),
T(
dist.MultivariateStudentT,
15.0,
np.array([1.0, 3.0]),
np.array([[1.0, 0.0], [0.5, 1.0]]),
),
T(
dist.MultivariateStudentT,
15.0,
np.array([1.0, 3.0]),
np.array([[[1.0, 0.0], [0.5, 1.0]]]),
),
T(
dist.MultivariateStudentT,
15.0,
np.array([3.0]),
np.array([[1.0, 0.0], [0.5, 1.0]]),
),
T(
dist.MultivariateStudentT,
15.0,
np.arange(6, dtype=np.float32).reshape((3, 2)),
np.array([[1.0, 0.0], [0.5, 1.0]]),
),
T(
dist.MultivariateStudentT,
15.0,
np.ones(3),
np.broadcast_to(np.identity(3), (2, 3, 3)),
),
T(
dist.MultivariateStudentT,
np.array(7.0),
np.array([1.0, 3.0]),
np.array([[1.0, 0.0], [0.5, 1.0]]),
),
T(
dist.MultivariateStudentT,
np.arange(20, 22, dtype=jnp.float32),
np.ones(3),
np.broadcast_to(jnp.identity(3), (2, 3, 3)),
),
T(
dist.MultivariateStudentT,
np.arange(20, 26, dtype=jnp.float32).reshape((3, 2)),
np.ones(2),
np.array([[1.0, 0.0], [0.5, 1.0]]),
),
T(
dist.LowRankMultivariateNormal,
np.zeros(2),
np.array([[1.0], [0.0]]),
np.array([1.0, 1.0]),
),
T(
dist.LowRankMultivariateNormal,
np.arange(6, dtype=jnp.float32).reshape((2, 3)),
np.arange(6, dtype=jnp.float32).reshape((3, 2)),
np.array([1.0, 2.0, 3.0]),
),
T(dist.Normal, 0.0, 1.0),
T(dist.Normal, 1.0, np.array([1.0, 2.0])),
T(dist.Normal, np.array([0.0, 1.0]), np.array([[1.0], [2.0]])),
T(dist.Pareto, 1.0, 2.0),
T(dist.Pareto, np.array([1.0, 0.5]), np.array([0.3, 2.0])),
T(dist.Pareto, np.array([[1.0], [3.0]]), np.array([1.0, 0.5])),
T(dist.RelaxedBernoulliLogits, 2.0, -10.0),
T(dist.RelaxedBernoulliLogits, np.array([1.0, 3.0]), np.array([3.0, 8.0])),
T(dist.SoftLaplace, 1.0, 1.0),
T(dist.SoftLaplace, np.array([-1.0, 50.0]), np.array([4.0, 100.0])),
T(dist.StudentT, 1.0, 1.0, 0.5),
T(dist.StudentT, 2.0, np.array([1.0, 2.0]), 2.0),
T(dist.StudentT, np.array([3.0, 5.0]), np.array([[1.0], [2.0]]), 2.0),
T(_TruncatedCauchy, 0.0, 1.0, -1.0, None),
T(_TruncatedCauchy, 0.0, np.array([1.0, 2.0]), 1.0, None),
T(
_TruncatedCauchy,
np.array([0.0, 1.0]),
np.array([[1.0], [2.0]]),
np.array([-2.0, 2.0]),
None,
),
T(_TruncatedCauchy, 0.0, 1.0, None, 1.0),
T(_TruncatedCauchy, 0.0, 1.0, -1.0, 1.0),
T(_TruncatedNormal, 0.0, 1.0, -1.0, None),
T(_TruncatedNormal, -1.0, np.array([1.0, 2.0]), 1.0, None),
T(
_TruncatedNormal,
np.array([0.0, 1.0]),
np.array([[1.0], [2.0]]),
np.array([-2.0, 2.0]),
None,
),
T(_TruncatedNormal, -1.0, 2.0, 1.0, 5.0),
T(_TruncatedNormal, np.array([-1.0, 4.0]), 2.0, None, 5.0),
T(_TruncatedNormal, -1.0, np.array([2.0, 3.0]), 1.0, None),
T(_TruncatedNormal, -1.0, 2.0, np.array([-6.0, 4.0]), np.array([-4.0, 6.0])),
T(
_TruncatedNormal,
np.array([0.0, 1.0]),
np.array([[1.0], [2.0]]),
None,
np.array([-2.0, 2.0]),
),
T(dist.TwoSidedTruncatedDistribution, dist.Laplace(0.0, 1.0), -2.0, 3.0),
T(dist.Uniform, 0.0, 2.0),
T(dist.Uniform, 1.0, np.array([2.0, 3.0])),
T(dist.Uniform, np.array([0.0, 0.0]), np.array([[2.0], [3.0]])),
T(dist.Weibull, 0.2, 1.1),
T(dist.Weibull, 2.8, np.array([2.0, 2.0])),
T(dist.Weibull, 1.8, np.array([[1.0, 1.0], [2.0, 2.0]])),
T(
_GaussianMixture,
np.ones(3) / 3.0,
np.array([0.0, 7.7, 2.1]),
np.array([4.2, 7.7, 2.1]),
),
T(
_Gaussian2DMixture,
np.array([0.2, 0.5, 0.3]),
np.array([[-1.2, 1.5], [2.0, 2.0], [-1, 4.0]]), # Mean
np.array(
[
[
[0.1, -0.2],
[-0.2, 1.0],
],
[
[0.75, 0.0],
[0.0, 0.75],
],
[
[1.0, 0.5],
[0.5, 0.27],
],
]
), # Covariance
),
T(
_GeneralMixture,
np.array([0.2, 0.3, 0.5]),
np.array([0.0, 7.7, 2.1]),
np.array([4.2, 1.7, 2.1]),
),
T(
_General2DMixture,
np.array([0.2, 0.5, 0.3]),
np.array([[-1.2, 1.5], [2.0, 2.0], [-1, 4.0]]), # Mean
np.array(
[
[
[0.1, -0.2],
[-0.2, 1.0],
],
[
[0.75, 0.0],
[0.0, 0.75],
],
[
[1.0, 0.5],
[0.5, 0.27],
],
]
), # Covariance
),
]
DIRECTIONAL = [
T(dist.VonMises, 2.0, 10.0),
T(dist.VonMises, 2.0, np.array([150.0, 10.0])),
T(dist.VonMises, np.array([1 / 3 * np.pi, -1.0]), np.array([20.0, 30.0])),
pytest.param(
*T(
dist.SineBivariateVonMises,
0.0,
0.0,
5.0,
6.0,
2.0,
),
marks=pytest.mark.skipif("CI" in os.environ, reason="reduce time for CI"),
),
T(
dist.SineBivariateVonMises,
3.003,
-1.343,
5.0,
6.0,
2.0,
),
pytest.param(
*T(
dist.SineBivariateVonMises,
-1.232,
-1.3430,
3.4,
2.0,
1.0,
),
marks=pytest.mark.skipif("CI" in os.environ, reason="reduce time for CI"),
),
pytest.param(
*T(
dist.SineBivariateVonMises,
np.array([math.pi - 0.2, 1.0]),
np.array([0.0, 1.0]),
np.array([5.0, 5.0]),
np.array([7.0, 0.5]),
None,
np.array([0.5, 0.1]),
),
marks=pytest.mark.skipif("CI" in os.environ, reason="reduce time for CI"),
),
T(dist.ProjectedNormal, np.array([0.0, 0.0])),
T(dist.ProjectedNormal, np.array([[2.0, 3.0]])),
T(dist.ProjectedNormal, np.array([0.0, 0.0, 0.0])),
T(dist.ProjectedNormal, np.array([[-1.0, 2.0, 3.0]])),
T(SineSkewedUniform, np.array([-math.pi / 4, 0.1])),
T(SineSkewedVonMises, np.array([0.342355])),
T(SineSkewedVonMisesBatched, np.array([[0.342355, -0.0001], [0.91, 0.09]])),
]
DISCRETE = [
T(dist.BetaBinomial, 2.0, 5.0, 10),
T(
dist.BetaBinomial,
np.array([2.0, 4.0]),
np.array([5.0, 3.0]),
np.array([10, 12]),
),
T(dist.BernoulliProbs, 0.2),
T(dist.BernoulliProbs, np.array([0.2, 0.7])),
T(dist.BernoulliLogits, np.array([-1.0, 3.0])),
T(dist.BinomialProbs, np.array([0.2, 0.7]), np.array([10, 2])),
T(dist.BinomialProbs, np.array([0.2, 0.7]), np.array([5, 8])),
T(dist.BinomialLogits, np.array([-1.0, 3.0]), np.array([5, 8])),
T(dist.CategoricalProbs, np.array([1.0])),
T(dist.CategoricalProbs, np.array([0.1, 0.5, 0.4])),
T(dist.CategoricalProbs, np.array([[0.1, 0.5, 0.4], [0.4, 0.4, 0.2]])),
T(dist.CategoricalLogits, np.array([-5.0])),
T(dist.CategoricalLogits, np.array([1.0, 2.0, -2.0])),
T(dist.CategoricalLogits, np.array([[-1, 2.0, 3.0], [3.0, -4.0, -2.0]])),
T(dist.Delta, 1),
T(dist.Delta, np.array([0.0, 2.0])),
T(dist.Delta, np.array([0.0, 2.0]), np.array([-2.0, -4.0])),
T(dist.DirichletMultinomial, np.array([1.0, 2.0, 3.9]), 10),
T(dist.DirichletMultinomial, np.array([0.2, 0.7, 1.1]), np.array([5, 5])),
T(dist.GammaPoisson, 2.0, 2.0),
T(dist.GammaPoisson, np.array([6.0, 2]), np.array([2.0, 8.0])),
T(dist.GeometricProbs, 0.2),
T(dist.GeometricProbs, np.array([0.2, 0.7])),
T(dist.GeometricLogits, np.array([-1.0, 3.0])),
T(dist.MultinomialProbs, np.array([0.2, 0.7, 0.1]), 10),
T(dist.MultinomialProbs, np.array([0.2, 0.7, 0.1]), np.array([5, 8])),
T(dist.MultinomialLogits, np.array([-1.0, 3.0]), np.array([[5], [8]])),
T(dist.NegativeBinomialProbs, 10, 0.2),
T(dist.NegativeBinomialProbs, 10, np.array([0.2, 0.6])),
T(dist.NegativeBinomialProbs, np.array([4.2, 10.7, 2.1]), 0.2),
T(
dist.NegativeBinomialProbs,
np.array([4.2, 10.7, 2.1]),
np.array([0.2, 0.6, 0.5]),
),
T(dist.NegativeBinomialLogits, 10, -2.1),
T(dist.NegativeBinomialLogits, 10, np.array([-5.2, 2.1])),
T(dist.NegativeBinomialLogits, np.array([4.2, 10.7, 2.1]), -5.2),
T(
dist.NegativeBinomialLogits,
np.array([4.2, 7.7, 2.1]),
np.array([4.2, 0.7, 2.1]),
),
T(dist.NegativeBinomial2, 0.3, 10),
T(dist.NegativeBinomial2, np.array([10.2, 7, 31]), 10),
T(dist.NegativeBinomial2, np.array([10.2, 7, 31]), np.array([10.2, 20.7, 2.1])),
T(dist.OrderedLogistic, -2, np.array([-10.0, 4.0, 9.0])),
T(dist.OrderedLogistic, np.array([-4, 3, 4, 5]), np.array([-1.5])),
T(dist.DiscreteUniform, -2, np.array([-1.0, 4.0, 9.0])),
T(dist.DiscreteUniform, np.array([-4, 3, 4, 5]), np.array([6])),
T(dist.Poisson, 2.0),
T(dist.Poisson, np.array([2.0, 3.0, 5.0])),
T(SparsePoisson, 2.0),
T(SparsePoisson, np.array([2.0, 3.0, 5.0])),
T(SparsePoisson, 2),
T(dist.ZeroInflatedPoisson, 0.6, 2.0),
T(dist.ZeroInflatedPoisson, np.array([0.2, 0.7, 0.3]), np.array([2.0, 3.0, 5.0])),
T(ZeroInflatedPoissonLogits, 2.0, 3.0),
T(
ZeroInflatedPoissonLogits,
np.array([0.2, 4.0, 0.3]),
np.array([2.0, -3.0, 5.0]),
),
]
def _is_batched_multivariate(jax_dist):
return len(jax_dist.event_shape) > 0 and len(jax_dist.batch_shape) > 0
def gen_values_within_bounds(constraint, size, key=random.PRNGKey(11)):
eps = 1e-6
if constraint is constraints.boolean:
return random.bernoulli(key, shape=size)
elif isinstance(constraint, constraints.greater_than):
return jnp.exp(random.normal(key, size)) + constraint.lower_bound + eps
elif isinstance(constraint, constraints.integer_interval):
lower_bound = jnp.broadcast_to(constraint.lower_bound, size)
upper_bound = jnp.broadcast_to(constraint.upper_bound, size)
return random.randint(key, size, lower_bound, upper_bound + 1)
elif isinstance(constraint, constraints.integer_greater_than):
return constraint.lower_bound + random.poisson(key, np.array(5), shape=size)
elif isinstance(constraint, constraints.interval):
lower_bound = jnp.broadcast_to(constraint.lower_bound, size)
upper_bound = jnp.broadcast_to(constraint.upper_bound, size)
return random.uniform(key, size, minval=lower_bound, maxval=upper_bound)
elif constraint in (constraints.real, constraints.real_vector):
return random.normal(key, size)
elif constraint is constraints.simplex:
return osp.dirichlet.rvs(alpha=jnp.ones((size[-1],)), size=size[:-1])
elif isinstance(constraint, constraints.multinomial):
n = size[-1]
return multinomial(
key, p=jnp.ones((n,)) / n, n=constraint.upper_bound, shape=size[:-1]
)
elif constraint is constraints.corr_cholesky:
return signed_stick_breaking_tril(
random.uniform(
key, size[:-2] + (size[-1] * (size[-1] - 1) // 2,), minval=-1, maxval=1
)
)
elif constraint is constraints.corr_matrix:
cholesky = signed_stick_breaking_tril(
random.uniform(
key, size[:-2] + (size[-1] * (size[-1] - 1) // 2,), minval=-1, maxval=1
)
)
return jnp.matmul(cholesky, jnp.swapaxes(cholesky, -2, -1))
elif constraint is constraints.lower_cholesky:
return jnp.tril(random.uniform(key, size))
elif constraint is constraints.positive_definite:
x = random.normal(key, size)
return jnp.matmul(x, jnp.swapaxes(x, -2, -1))
elif constraint is constraints.ordered_vector:
x = jnp.cumsum(random.exponential(key, size), -1)
return x - random.normal(key, size[:-1] + (1,))
elif isinstance(constraint, constraints.independent):
return gen_values_within_bounds(constraint.base_constraint, size, key)
elif constraint is constraints.sphere:
x = random.normal(key, size)
return x / jnp.linalg.norm(x, axis=-1)
elif constraint is constraints.l1_ball:
key1, key2 = random.split(key)
sign = random.bernoulli(key1)
bounds = [0, (-1) ** sign * 0.5]
return random.uniform(key, size, float, *sorted(bounds))
else:
raise NotImplementedError("{} not implemented.".format(constraint))
def gen_values_outside_bounds(constraint, size, key=random.PRNGKey(11)):
if constraint is constraints.boolean:
return random.bernoulli(key, shape=size) - 2
elif isinstance(constraint, constraints.greater_than):
return constraint.lower_bound - jnp.exp(random.normal(key, size))
elif isinstance(constraint, constraints.integer_interval):
lower_bound = jnp.broadcast_to(constraint.lower_bound, size)
return random.randint(key, size, lower_bound - 1, lower_bound)
elif isinstance(constraint, constraints.integer_greater_than):
return constraint.lower_bound - random.poisson(key, np.array(5), shape=size)
elif isinstance(constraint, constraints.interval):
upper_bound = jnp.broadcast_to(constraint.upper_bound, size)
return random.uniform(key, size, minval=upper_bound, maxval=upper_bound + 1.0)
elif constraint in [constraints.real, constraints.real_vector]:
return lax.full(size, np.nan)
elif constraint is constraints.simplex:
return osp.dirichlet.rvs(alpha=jnp.ones((size[-1],)), size=size[:-1]) + 1e-2
elif isinstance(constraint, constraints.multinomial):
n = size[-1]
return (
multinomial(
key, p=jnp.ones((n,)) / n, n=constraint.upper_bound, shape=size[:-1]
)
+ 1
)
elif constraint is constraints.corr_cholesky:
return (
signed_stick_breaking_tril(
random.uniform(
key,
size[:-2] + (size[-1] * (size[-1] - 1) // 2,),
minval=-1,
maxval=1,
)
)
+ 1e-2
)
elif constraint is constraints.corr_matrix:
cholesky = 1e-2 + signed_stick_breaking_tril(
random.uniform(
key, size[:-2] + (size[-1] * (size[-1] - 1) // 2,), minval=-1, maxval=1
)
)
return jnp.matmul(cholesky, jnp.swapaxes(cholesky, -2, -1))
elif constraint is constraints.lower_cholesky:
return random.uniform(key, size)
elif constraint is constraints.positive_definite:
return random.normal(key, size)
elif constraint is constraints.ordered_vector:
x = jnp.cumsum(random.exponential(key, size), -1)
return x[..., ::-1]
elif isinstance(constraint, constraints.independent):
return gen_values_outside_bounds(constraint.base_constraint, size, key)
elif constraint is constraints.sphere:
x = random.normal(key, size)
x = x / jnp.linalg.norm(x, axis=-1, keepdims=True)
return 2 * x
elif constraint is constraints.l1_ball:
key1, key2 = random.split(key)
sign = random.bernoulli(key1)
bounds = [(-1) ** sign * 1.1, (-1) ** sign * 2]
return random.uniform(key, size, float, *sorted(bounds))
else:
raise NotImplementedError("{} not implemented.".format(constraint))
@pytest.mark.parametrize(
"jax_dist, sp_dist, params", CONTINUOUS + DISCRETE + DIRECTIONAL
)
@pytest.mark.parametrize("prepend_shape", [(), (2,), (2, 3)])
def test_dist_shape(jax_dist, sp_dist, params, prepend_shape):
jax_dist = jax_dist(*params)
rng_key = random.PRNGKey(0)
expected_shape = prepend_shape + jax_dist.batch_shape + jax_dist.event_shape
samples = jax_dist.sample(key=rng_key, sample_shape=prepend_shape)
assert isinstance(samples, jnp.ndarray)
assert jnp.shape(samples) == expected_shape
if (
sp_dist
and not _is_batched_multivariate(jax_dist)
and not isinstance(jax_dist, dist.MultivariateStudentT)
):
sp_dist = sp_dist(*params)
sp_samples = sp_dist.rvs(size=prepend_shape + jax_dist.batch_shape)
assert jnp.shape(sp_samples) == expected_shape
elif (
sp_dist
and not _is_batched_multivariate(jax_dist)
and isinstance(jax_dist, dist.MultivariateStudentT)
):
sp_dist = sp_dist(*params)
size_ = prepend_shape + jax_dist.batch_shape
size = (1) if size_ == () else size_
try:
sp_samples = sp_dist.rvs(size=size)
except ValueError:
pytest.skip("scipy multivariate t doesn't support size with > 1 element")
assert jnp.shape(sp_samples) == expected_shape
if isinstance(jax_dist, (dist.MultivariateNormal, dist.MultivariateStudentT)):
assert jax_dist.covariance_matrix.ndim == len(jax_dist.batch_shape) + 2
assert_allclose(
jax_dist.precision_matrix,
jnp.linalg.inv(jax_dist.covariance_matrix),
rtol=1e-6,
)
@pytest.mark.parametrize(
"jax_dist, sp_dist, params", CONTINUOUS + DISCRETE + DIRECTIONAL
)
def test_infer_shapes(jax_dist, sp_dist, params):
shapes = tuple(getattr(p, "shape", ()) for p in params)
shapes = tuple(x() if callable(x) else x for x in shapes)
jax_dist = jax_dist(*params)
try:
expected_batch_shape, expected_event_shape = type(jax_dist).infer_shapes(
*shapes
)
| 0 |
c5e7fdcbd4a9281597a35a180f2853caac68f811
|
Python
|
except NotImplementedError:
pytest.skip(f"{type(jax_dist).__name__}.infer_shapes() is not implemented")
assert jax_dist.batch_shape == expected_batch_shape
assert jax_dist.event_shape == expected_event_shape
@pytest.mark.parametrize(
"jax_dist, sp_dist, params", CONTINUOUS + DISCRETE + DIRECTIONAL
)
def test_has_rsample(jax_dist, sp_dist, params):
jax_dist = jax_dist(*params)
masked_dist = jax_dist.mask(False)
indept_dist = jax_dist.expand_by([2]).to_event(1)
transf_dist = dist.TransformedDistribution(jax_dist, biject_to(constraints.real))
assert masked_dist.has_rsample == jax_dist.has_rsample
assert indept_dist.has_rsample == jax_dist.has_rsample
assert transf_dist.has_rsample == jax_dist.has_rsample
if jax_dist.has_rsample:
assert isinstance(jax_dist, dist.Delta) or not jax_dist.is_discrete
if isinstance(jax_dist, dist.TransformedDistribution):
assert jax_dist.base_dist.has_rsample
else:
assert set(jax_dist.arg_constraints) == set(jax_dist.reparametrized_params)
jax_dist.rsample(random.PRNGKey(0))
if isinstance(jax_dist, dist.Normal):
masked_dist.rsample(random.PRNGKey(0))
indept_dist.rsample(random.PRNGKey(0))
transf_dist.rsample(random.PRNGKey(0))
else:
with pytest.raises(NotImplementedError):
jax_dist.rsample(random.PRNGKey(0))
if isinstance(jax_dist, dist.BernoulliProbs):
with pytest.raises(NotImplementedError):
masked_dist.rsample(random.PRNGKey(0))
with pytest.raises(NotImplementedError):
indept_dist.rsample(random.PRNGKey(0))
with pytest.raises(NotImplementedError):
transf_dist.rsample(random.PRNGKey(0))
@pytest.mark.parametrize("batch_shape", [(), (4,), (3, 2)])
def test_unit(batch_shape):
log_factor = random.normal(random.PRNGKey(0), batch_shape)
d = dist.Unit(log_factor=log_factor)
x = d.sample(random.PRNGKey(1))
assert x.shape == batch_shape + (0,)
assert (d.log_prob(x) == log_factor).all()
@pytest.mark.parametrize("jax_dist, sp_dist, params", CONTINUOUS)
def test_sample_gradient(jax_dist, sp_dist, params):
# we have pathwise gradient for gamma sampler
gamma_derived_params = {
"Gamma": ["concentration"],
"Beta": ["concentration1", "concentration0"],
"BetaProportion": ["mean", "concentration"],
"Chi2": ["df"],
"Dirichlet": ["concentration"],
"InverseGamma": ["concentration"],
"LKJ": ["concentration"],
"LKJCholesky": ["concentration"],
"StudentT": ["df"],
}.get(jax_dist.__name__, [])
dist_args = [
p
for p in (
inspect.getfullargspec(jax_dist.__init__)[0][1:]
if inspect.isclass(jax_dist)
# account the the case jax_dist is a function
else inspect.getfullargspec(jax_dist)[0]
)
]
params_dict = dict(zip(dist_args[: len(params)], params))
jax_class = type(jax_dist(**params_dict))
reparametrized_params = [
p for p in jax_class.reparametrized_params if p not in gamma_derived_params
]
if not reparametrized_params:
pytest.skip("{} not reparametrized.".format(jax_class.__name__))
nonrepara_params_dict = {
k: v for k, v in params_dict.items() if k not in reparametrized_params
}
repara_params = tuple(
v for k, v in params_dict.items() if k in reparametrized_params
)
rng_key = random.PRNGKey(0)
def fn(args):
args_dict = dict(zip(reparametrized_params, args))
return jnp.sum(
jax_dist(**args_dict, **nonrepara_params_dict).sample(key=rng_key)
)
actual_grad = jax.grad(fn)(repara_params)
assert len(actual_grad) == len(repara_params)
eps = 1e-3
for i in range(len(repara_params)):
if repara_params[i] is None:
continue
args_lhs = [p if j != i else p - eps for j, p in enumerate(repara_params)]
args_rhs = [p if j != i else p + eps for j, p in enumerate(repara_params)]
fn_lhs = fn(args_lhs)
fn_rhs = fn(args_rhs)
# finite diff approximation
expected_grad = (fn_rhs - fn_lhs) / (2.0 * eps)
assert jnp.shape(actual_grad[i]) == jnp.shape(repara_params[i])
assert_allclose(jnp.sum(actual_grad[i]), expected_grad, rtol=0.02, atol=0.03)
@pytest.mark.parametrize(
"jax_dist, params",
[
(dist.Gamma, (1.0,)),
(dist.Gamma, (0.1,)),
(dist.Gamma, (10.0,)),
(dist.Chi2, (1.0,)),
(dist.Chi2, (0.1,)),
(dist.Chi2, (10.0,)),
(dist.Beta, (1.0, 1.0)),
(dist.StudentT, (5.0, 2.0, 4.0)),
],
)
def test_pathwise_gradient(jax_dist, params):
rng_key = random.PRNGKey(0)
N = 1000000
def f(params):
z = jax_dist(*params).sample(key=rng_key, sample_shape=(N,))
return (z + z**2).mean(0)
def g(params):
d = jax_dist(*params)
return d.mean + d.variance + d.mean**2
actual_grad = grad(f)(params)
expected_grad = grad(g)(params)
assert_allclose(actual_grad, expected_grad, rtol=0.005)
@pytest.mark.parametrize(
"jax_dist, sp_dist, params", CONTINUOUS + DISCRETE + DIRECTIONAL
)
def test_jit_log_likelihood(jax_dist, sp_dist, params):
if jax_dist.__name__ in (
"EulerMaruyama",
"GaussianRandomWalk",
"_ImproperWrapper",
"LKJ",
"LKJCholesky",
"_SparseCAR",
):
pytest.xfail(reason="non-jittable params")
rng_key = random.PRNGKey(0)
samples = jax_dist(*params).sample(key=rng_key, sample_shape=(2, 3))
def log_likelihood(*params):
return jax_dist(*params).log_prob(samples)
expected = log_likelihood(*params)
actual = jax.jit(log_likelihood)(*params)
assert_allclose(actual, expected, atol=2e-5, rtol=2e-5)
@pytest.mark.parametrize(
"jax_dist, sp_dist, params", CONTINUOUS + DISCRETE + DIRECTIONAL
)
@pytest.mark.parametrize("prepend_shape", [(), (2,), (2, 3)])
@pytest.mark.parametrize("jit", [False, True])
def test_log_prob(jax_dist, sp_dist, params, prepend_shape, jit):
jit_fn = _identity if not jit else jax.jit
jax_dist = jax_dist(*params)
rng_key = random.PRNGKey(0)
samples = jax_dist.sample(key=rng_key, sample_shape=prepend_shape)
assert jax_dist.log_prob(samples).shape == prepend_shape + jax_dist.batch_shape
truncated_dists = (
dist.LeftTruncatedDistribution,
dist.RightTruncatedDistribution,
dist.TwoSidedTruncatedDistribution,
)
if sp_dist is None:
if isinstance(jax_dist, truncated_dists):
if isinstance(params[0], dist.Distribution):
# new api
loc, scale, low, high = (
params[0].loc,
params[0].scale,
params[1],
params[2],
)
else:
# old api
loc, scale, low, high = params
if low is None:
low = -np.inf
if high is None:
high = np.inf
sp_dist = get_sp_dist(type(jax_dist.base_dist))(loc, scale)
expected = sp_dist.logpdf(samples) - jnp.log(
sp_dist.cdf(high) - sp_dist.cdf(low)
)
assert_allclose(jit_fn(jax_dist.log_prob)(samples), expected, atol=1e-5)
return
pytest.skip("no corresponding scipy distn.")
if _is_batched_multivariate(jax_dist):
pytest.skip("batching not allowed in multivariate distns.")
if jax_dist.event_shape and prepend_shape:
# >>> d = sp.dirichlet([1.1, 1.1])
# >>> samples = d.rvs(size=(2,))
# >>> d.logpdf(samples)
# ValueError: The input vector 'x' must lie within the normal simplex ...
pytest.skip("batched samples cannot be scored by multivariate distributions.")
sp_dist = sp_dist(*params)
try:
expected = sp_dist.logpdf(samples)
except AttributeError:
expected = sp_dist.logpmf(samples)
except ValueError as e:
# precision issue: jnp.sum(x / jnp.sum(x)) = 0.99999994 != 1
if "The input vector 'x' must lie within the normal simplex." in str(e):
samples = jax.device_get(samples).astype("float64")
samples = samples / samples.sum(axis=-1, keepdims=True)
expected = sp_dist.logpdf(samples)
else:
raise e
assert_allclose(jit_fn(jax_dist.log_prob)(samples), expected, atol=1e-5)
def test_mixture_log_prob():
gmm = dist.MixtureSameFamily(
dist.Categorical(logits=np.zeros(2)), dist.Normal(0, 1).expand([2])
)
actual = gmm.log_prob(0.0)
expected = dist.Normal(0, 1).log_prob(0.0)
assert_allclose(actual, expected)
@pytest.mark.parametrize(
"jax_dist, sp_dist, params",
# TODO: add more complete pattern for Discrete.cdf
CONTINUOUS + [T(dist.Poisson, 2.0), T(dist.Poisson, np.array([2.0, 3.0, 5.0]))],
)
@pytest.mark.filterwarnings("ignore:overflow encountered:RuntimeWarning")
def test_cdf_and_icdf(jax_dist, sp_dist, params):
d = jax_dist(*params)
if d.event_dim > 0:
pytest.skip("skip testing cdf/icdf methods of multivariate distributions")
samples = d.sample(key=random.PRNGKey(0), sample_shape=(100,))
quantiles = random.uniform(random.PRNGKey(1), (100,) + d.shape())
try:
rtol = 2e-3 if jax_dist in (dist.Gamma, dist.StudentT) else 1e-5
if d.shape() == () and not d.is_discrete:
assert_allclose(
jax.vmap(jax.grad(d.cdf))(samples),
jnp.exp(d.log_prob(samples)),
atol=1e-5,
rtol=rtol,
)
assert_allclose(
jax.vmap(jax.grad(d.icdf))(quantiles),
jnp.exp(-d.log_prob(d.icdf(quantiles))),
atol=1e-5,
rtol=rtol,
)
assert_allclose(d.cdf(d.icdf(quantiles)), quantiles, atol=1e-5, rtol=1e-5)
assert_allclose(d.icdf(d.cdf(samples)), samples, atol=1e-5, rtol=rtol)
except NotImplementedError:
pass
# test against scipy
if not sp_dist:
pytest.skip("no corresponding scipy distn.")
sp_dist = sp_dist(*params)
try:
actual_cdf = d.cdf(samples)
expected_cdf = sp_dist.cdf(samples)
assert_allclose(actual_cdf, expected_cdf, atol=1e-5, rtol=1e-5)
actual_icdf = d.icdf(quantiles)
expected_icdf = sp_dist.ppf(quantiles)
assert_allclose(actual_icdf, expected_icdf, atol=1e-4, rtol=1e-4)
except NotImplementedError:
pass
@pytest.mark.parametrize("jax_dist, sp_dist, params", CONTINUOUS + DIRECTIONAL)
def test_gof(jax_dist, sp_dist, params):
if "Improper" in jax_dist.__name__:
pytest.skip("distribution has improper .log_prob()")
if "LKJ" in jax_dist.__name__:
pytest.xfail("incorrect submanifold scaling")
if jax_dist is dist.EulerMaruyama:
d = jax_dist(*params)
if d.event_dim > 1:
pytest.skip("EulerMaruyama skip test when event shape is non-trivial.")
num_samples = 10000
if "BetaProportion" in jax_dist.__name__:
num_samples = 20000
rng_key = random.PRNGKey(0)
d = jax_dist(*params)
samples = d.sample(key=rng_key, sample_shape=(num_samples,))
probs = np.exp(d.log_prob(samples))
dim = None
if jax_dist is dist.ProjectedNormal:
dim = samples.shape[-1] - 1
# Test each batch independently.
probs = probs.reshape(num_samples, -1)
samples = samples.reshape(probs.shape + d.event_shape)
if "Dirichlet" in jax_dist.__name__:
# The Dirichlet density is over all but one of the probs.
samples = samples[..., :-1]
for b in range(probs.shape[1]):
try:
gof = auto_goodness_of_fit(samples[:, b], probs[:, b], dim=dim)
except InvalidTest:
pytest.skip("expensive test")
else:
assert gof > TEST_FAILURE_RATE
@pytest.mark.parametrize("jax_dist, sp_dist, params", CONTINUOUS + DISCRETE)
def test_independent_shape(jax_dist, sp_dist, params):
d = jax_dist(*params)
batch_shape, event_shape = d.batch_shape, d.event_shape
shape = batch_shape + event_shape
for i in range(len(batch_shape)):
indep = dist.Independent(d, reinterpreted_batch_ndims=i)
sample = indep.sample(random.PRNGKey(0))
event_boundary = len(shape) - len(event_shape) - i
assert indep.batch_shape == shape[:event_boundary]
assert indep.event_shape == shape[event_boundary:]
assert jnp.shape(indep.log_prob(sample)) == shape[:event_boundary]
def _tril_cholesky_to_tril_corr(x):
w = vec_to_tril_matrix(x, diagonal=-1)
diag = jnp.sqrt(1 - jnp.sum(w**2, axis=-1))
cholesky = w + jnp.expand_dims(diag, axis=-1) * jnp.identity(w.shape[-1])
corr = jnp.matmul(cholesky, cholesky.T)
return matrix_to_tril_vec(corr, diagonal=-1)
@pytest.mark.parametrize("dimension", [2, 3, 5])
def test_log_prob_LKJCholesky_uniform(dimension):
# When concentration=1, the distribution of correlation matrices is uniform.
# We will test that fact here.
d = dist.LKJCholesky(dimension=dimension, concentration=1)
N = 5
corr_log_prob = []
for i in range(N):
sample = d.sample(random.PRNGKey(i))
log_prob = d.log_prob(sample)
sample_tril = matrix_to_tril_vec(sample, diagonal=-1)
cholesky_to_corr_jac = np.linalg.slogdet(
jax.jacobian(_tril_cholesky_to_tril_corr)(sample_tril)
)[1]
corr_log_prob.append(log_prob - cholesky_to_corr_jac)
corr_log_prob = np.array(corr_log_prob)
# test if they are constant
assert_allclose(
corr_log_prob,
jnp.broadcast_to(corr_log_prob[0], corr_log_prob.shape),
rtol=1e-6,
)
if dimension == 2:
# when concentration = 1, LKJ gives a uniform distribution over correlation matrix,
# hence for the case dimension = 2,
# density of a correlation matrix will be Uniform(-1, 1) = 0.5.
# In addition, jacobian of the transformation from cholesky -> corr is 1 (hence its
# log value is 0) because the off-diagonal lower triangular element does not change
# in the transform.
# So target_log_prob = log(0.5)
assert_allclose(corr_log_prob[0], jnp.log(0.5), rtol=1e-6)
@pytest.mark.parametrize("dimension", [2, 3, 5])
@pytest.mark.parametrize("concentration", [0.6, 2.2])
def test_log_prob_LKJCholesky(dimension, concentration):
# We will test against the fact that LKJCorrCholesky can be seen as a
# TransformedDistribution with base distribution is a distribution of partial
# correlations in C-vine method (modulo an affine transform to change domain from (0, 1)
# to (1, 0)) and transform is a signed stick-breaking process.
d = dist.LKJCholesky(dimension, concentration, sample_method="cvine")
beta_sample = d._beta.sample(random.PRNGKey(0))
beta_log_prob = jnp.sum(d._beta.log_prob(beta_sample))
partial_correlation = 2 * beta_sample - 1
affine_logdet = beta_sample.shape[-1] * jnp.log(2)
sample = signed_stick_breaking_tril(partial_correlation)
# compute signed stick breaking logdet
inv_tanh = lambda t: jnp.log((1 + t) / (1 - t)) / 2 # noqa: E731
inv_tanh_logdet = jnp.sum(jnp.log(vmap(grad(inv_tanh))(partial_correlation)))
unconstrained = inv_tanh(partial_correlation)
corr_cholesky_logdet = biject_to(constraints.corr_cholesky).log_abs_det_jacobian(
unconstrained, sample
)
signed_stick_breaking_logdet = corr_cholesky_logdet + inv_tanh_logdet
actual_log_prob = d.log_prob(sample)
expected_log_prob = beta_log_prob - affine_logdet - signed_stick_breaking_logdet
assert_allclose(actual_log_prob, expected_log_prob, rtol=2e-5)
assert_allclose(jax.jit(d.log_prob)(sample), d.log_prob(sample), atol=2e-6)
def test_zero_inflated_logits_probs_agree():
concentration = np.exp(np.random.normal(1))
rate = np.exp(np.random.normal(1))
d = dist.GammaPoisson(concentration, rate)
gate_logits = np.random.normal(0)
gate_probs = expit(gate_logits)
zi_logits = dist.ZeroInflatedDistribution(d, gate_logits=gate_logits)
zi_probs = dist.ZeroInflatedDistribution(d, gate=gate_probs)
sample = np.random.randint(
0,
20,
(
1000,
100,
),
)
assert_allclose(zi_probs.log_prob(sample), zi_logits.log_prob(sample))
@pytest.mark.parametrize("rate", [0.1, 0.5, 0.9, 1.0, 1.1, 2.0, 10.0])
def test_ZIP_log_prob(rate):
# if gate is 0 ZIP is Poisson
zip_ = dist.ZeroInflatedPoisson(0.0, rate)
pois = dist.Poisson(rate)
s = zip_.sample(random.PRNGKey(0), (20,))
zip_prob = zip_.log_prob(s)
pois_prob = pois.log_prob(s)
assert_allclose(zip_prob, pois_prob, rtol=1e-6)
# if gate is 1 ZIP is Delta(0)
zip_ = dist.ZeroInflatedPoisson(1.0, rate)
delta = dist.Delta(0.0)
s = np.array([0.0, 1.0])
zip_prob = zip_.log_prob(s)
delta_prob = delta.log_prob(s)
assert_allclose(zip_prob, delta_prob, rtol=1e-6)
@pytest.mark.parametrize("total_count", [1, 2, 3, 10])
@pytest.mark.parametrize("shape", [(1,), (3, 1), (2, 3, 1)])
def test_beta_binomial_log_prob(total_count, shape):
concentration0 = np.exp(np.random.normal(size=shape))
concentration1 = np.exp(np.random.normal(size=shape))
value = jnp.arange(1 + total_count)
num_samples = 100000
probs = np.random.beta(concentration1, concentration0, size=(num_samples,) + shape)
log_probs = dist.Binomial(total_count, probs).log_prob(value)
expected = logsumexp(log_probs, 0) - jnp.log(num_samples)
actual = dist.BetaBinomial(concentration1, concentration0, total_count).log_prob(
value
)
assert_allclose(actual, expected, rtol=0.02)
@pytest.mark.parametrize("total_count", [1, 2, 3, 10])
@pytest.mark.parametrize("batch_shape", [(1,), (3, 1), (2, 3, 1)])
def test_dirichlet_multinomial_log_prob(total_count, batch_shape):
event_shape = (3,)
concentration = np.exp(np.random.normal(size=batch_shape + event_shape))
# test on one-hots
value = total_count * jnp.eye(event_shape[-1]).reshape(
event_shape + (1,) * len(batch_shape) + event_shape
)
num_samples = 100000
probs = dist.Dirichlet(concentration).sample(random.PRNGKey(0), (num_samples, 1))
log_probs = dist.Multinomial(total_count, probs).log_prob(value)
expected = logsumexp(log_probs, 0) - jnp.log(num_samples)
actual = dist.DirichletMultinomial(concentration, total_count).log_prob(value)
assert_allclose(actual, expected, rtol=0.05)
@pytest.mark.parametrize("shape", [(1,), (3, 1), (2, 3, 1)])
def test_gamma_poisson_log_prob(shape):
gamma_conc = np.exp(np.random.normal(size=shape))
gamma_rate = np.exp(np.random.normal(size=shape))
value = jnp.arange(15)
num_samples = 300000
poisson_rate = np.random.gamma(
gamma_conc, 1 / gamma_rate, size=(num_samples,) + shape
)
log_probs = dist.Poisson(poisson_rate).log_prob(value)
expected = logsumexp(log_probs, 0) - jnp.log(num_samples)
actual = dist.GammaPoisson(gamma_conc, gamma_rate).log_prob(value)
assert_allclose(actual, expected, rtol=0.05)
@pytest.mark.parametrize(
"jax_dist, sp_dist, params", CONTINUOUS + DISCRETE + DIRECTIONAL
)
def test_log_prob_gradient(jax_dist, sp_dist, params):
if jax_dist in [dist.LKJ, dist.LKJCholesky]:
pytest.skip("we have separated tests for LKJCholesky distribution")
if jax_dist is _ImproperWrapper:
pytest.skip("no param for ImproperUniform to test for log_prob gradient")
rng_key = random.PRNGKey(0)
value = jax_dist(*params).sample(rng_key)
def fn(*args):
return jnp.sum(jax_dist(*args).log_prob(value))
eps = 1e-3
for i in range(len(params)):
if jax_dist is dist.EulerMaruyama and i == 1:
# skip taking grad w.r.t. sde_fn
continue
if jax_dist is _SparseCAR and i == 3:
# skip taking grad w.r.t. adj_matrix
continue
if isinstance(
params[i], dist.Distribution
): # skip taking grad w.r.t. base_dist
continue
if params[i] is None or jnp.result_type(params[i]) in (jnp.int32, jnp.int64):
continue
actual_grad = jax.grad(fn, i)(*params)
args_lhs = [p if j != i else p - eps for j, p in enumerate(params)]
args_rhs = [p if j != i else p + eps for j, p in enumerate(params)]
fn_lhs = fn(*args_lhs)
fn_rhs = fn(*args_rhs)
# finite diff approximation
expected_grad = (fn_rhs - fn_lhs) / (2.0 * eps)
assert jnp.shape(actual_grad) == jnp.shape(params[i])
if i == 0 and jax_dist is dist.Delta:
# grad w.r.t. `value` of Delta distribution will be 0
# but numerical value will give nan (= inf - inf)
expected_grad = 0.0
assert_allclose(jnp.sum(actual_grad), expected_grad, rtol=0.01, atol=0.01)
@pytest.mark.parametrize(
"jax_dist, sp_dist, params", CONTINUOUS + DISCRETE + DIRECTIONAL
)
def test_mean_var(jax_dist, sp_dist, params):
if jax_dist is _ImproperWrapper:
pytest.skip("Improper distribution does not has mean/var implemented")
if jax_dist is FoldedNormal:
pytest.skip("Folded distribution does not has mean/var implemented")
if jax_dist is dist.EulerMaruyama:
pytest.skip("EulerMaruyama distribution does not has mean/var implemented")
if jax_dist is dist.RelaxedBernoulliLogits:
pytest.skip("RelaxedBernoulli distribution does not has mean/var implemented")
if "SineSkewed" in jax_dist.__name__:
pytest.skip("Skewed Distribution are not symmetric about location.")
if jax_dist in (
_TruncatedNormal,
_TruncatedCauchy,
dist.LeftTruncatedDistribution,
dist.RightTruncatedDistribution,
dist.TwoSidedTruncatedDistribution,
):
pytest.skip("Truncated distributions do not has mean/var implemented")
if jax_dist is dist.ProjectedNormal:
pytest.skip("Mean is defined in submanifold")
n = (
20000
if jax_dist in [dist.LKJ, dist.LKJCholesky, dist.SineBivariateVonMises]
else 200000
)
d_jax = jax_dist(*params)
k = random.PRNGKey(0)
samples = d_jax.sample(k, sample_shape=(n,)).astype(np.float32)
# check with suitable scipy implementation if available
# XXX: VonMises is already tested below
if (
sp_dist
and not _is_batched_multivariate(d_jax)
and jax_dist
not in [dist.VonMises, dist.MultivariateStudentT, dist.MatrixNormal]
):
d_sp = sp_dist(*params)
try:
sp_mean = d_sp.mean()
except TypeError: # mvn does not have .mean() method
sp_mean = d_sp.mean
# for multivariate distns try .cov first
if d_jax.event_shape:
try:
sp_var = jnp.diag(d_sp.cov())
except TypeError: # mvn does not have .cov() method
sp_var = jnp.diag(d_sp.cov)
except AttributeError:
sp_var = d_sp.var()
else:
sp_var = d_sp.var()
assert_allclose(d_jax.mean, sp_mean, rtol=0.01, atol=1e-7)
assert_allclose(d_jax.variance, sp_var, rtol=0.01, atol=1e-7)
if jnp.all(jnp.isfinite(sp_mean)):
assert_allclose(jnp.mean(samples, 0), d_jax.mean, rtol=0.05, atol=1e-2)
if jnp.all(jnp.isfinite(sp_var)):
assert_allclose(
jnp.std(samples, 0), jnp.sqrt(d_jax.variance), rtol=0.05, atol=1e-2
)
elif jax_dist in [dist.LKJ, dist.LKJCholesky]:
if jax_dist is dist.LKJCholesky:
corr_samples = jnp.matmul(samples, jnp.swapaxes(samples, -2, -1))
else:
corr_samples = samples
dimension, concentration, _ = params
# marginal of off-diagonal entries
marginal = dist.Beta(
concentration + 0.5 * (dimension - 2), concentration + 0.5 * (dimension - 2)
)
# scale statistics due to linear mapping
marginal_mean = 2 * marginal.mean - 1
marginal_std = 2 * jnp.sqrt(marginal.variance)
expected_mean = jnp.broadcast_to(
jnp.reshape(marginal_mean, jnp.shape(marginal_mean) + (1, 1)),
jnp.shape(marginal_mean) + d_jax.event_shape,
)
expected_std = jnp.broadcast_to(
jnp.reshape(marginal_std, jnp.shape(marginal_std) + (1, 1)),
jnp.shape(marginal_std) + d_jax.event_shape,
)
# diagonal elements of correlation matrices are 1
expected_mean = expected_mean * (1 - jnp.identity(dimension)) + jnp.identity(
dimension
)
expected_std = expected_std * (1 - jnp.identity(dimension))
assert_allclose(jnp.mean(corr_samples, axis=0), expected_mean, atol=0.01)
assert_allclose(jnp.std(corr_samples, axis=0), expected_std, atol=0.01)
elif jax_dist in [dist.VonMises]:
# circular mean = sample mean
assert_allclose(d_jax.mean, jnp.mean(samples, 0), rtol=0.05, atol=1e-2)
# circular variance
x, y = jnp.mean(jnp.cos(samples), 0), jnp.mean(jnp.sin(samples), 0)
expected_variance = 1 - jnp.sqrt(x**2 + y**2)
assert_allclose(d_jax.variance, expected_variance, rtol=0.05, atol=1e-2)
elif jax_dist in [dist.SineBivariateVonMises]:
phi_loc = _circ_mean(samples[..., 0])
psi_loc = _circ_mean(samples[..., 1])
assert_allclose(
d_jax.mean, jnp.stack((phi_loc, psi_loc), axis=-1), rtol=0.05, atol=1e-2
)
elif jax_dist in [dist.MatrixNormal]:
sample_shape = (200_000,)
# use X ~ MN(loc, U, V) then vec(X) ~ MVN(vec(loc), kron(V, U))
if len(d_jax.batch_shape) > 0:
axes = [len(sample_shape) + i for i in range(len(d_jax.batch_shape))]
axes = tuple(axes)
samples_re = jnp.moveaxis(samples, axes, jnp.arange(len(axes)))
subshape = samples_re.shape[: len(axes)]
ixi = product(*[range(k) for k in subshape])
for ix in ixi:
# mean
def get_min_shape(ix, batch_shape):
return min(ix, tuple(map(lambda x: x - 1, batch_shape)))
ix_loc = get_min_shape(ix, d_jax.loc.shape[: len(ix)])
jnp.allclose(
jnp.mean(samples_re[ix], 0),
jnp.squeeze(d_jax.mean[ix_loc]),
rtol=0.5,
atol=1e-2,
)
# cov
samples_mvn = jnp.squeeze(samples_re[ix]).reshape(
sample_shape + (-1,), order="F"
)
ix_col = get_min_shape(ix, d_jax.scale_tril_column.shape[: len(ix)])
ix_row = get_min_shape(ix, d_jax.scale_tril_row.shape[: len(ix)])
scale_tril = my_kron(
d_jax.scale_tril_column[ix_col],
d_jax.scale_tril_row[ix_row],
)
sample_scale_tril = jnp.linalg.cholesky(jnp.cov(samples_mvn.T))
jnp.allclose(sample_scale_tril, scale_tril, atol=0.5, rtol=1e-2)
else: # unbatched
# mean
jnp.allclose(
jnp.mean(samples, 0),
jnp.squeeze(d_jax.mean),
rtol=0.5,
atol=1e-2,
)
# cov
samples_mvn = jnp.squeeze(samples).reshape(sample_shape + (-1,), order="F")
scale_tril = my_kron(
jnp.squeeze(d_jax.scale_tril_column), jnp.squeeze(d_jax.scale_tril_row)
)
sample_scale_tril = jnp.linalg.cholesky(jnp.cov(samples_mvn.T))
jnp.allclose(sample_scale_tril, scale_tril, atol=0.5, rtol=1e-2)
else:
if jnp.all(jnp.isfinite(d_jax.mean)):
assert_allclose(jnp.mean(samples, 0), d_jax.mean, rtol=0.05, atol=1e-2)
if isinstance(d_jax, dist.CAR):
pytest.skip("CAR distribution does not have `variance` implemented.")
if isinstance(d_jax, dist.Gompertz):
pytest.skip("Gompertz distribution does not have `variance` implemented.")
if jnp.all(jnp.isfinite(d_jax.variance)):
assert_allclose(
jnp.std(samples, 0), jnp.sqrt(d_jax.variance), rtol=0.05, atol=1e-2
)
@pytest.mark.parametrize(
"jax_dist, sp_dist, params", CONTINUOUS + DISCRETE + DIRECTIONAL
)
@pytest.mark.parametrize("prepend_shape", [(), (2,), (2, 3)])
def test_distribution_constraints(jax_dist, sp_dist, params, prepend_shape):
if jax_dist in (
_TruncatedNormal,
_TruncatedCauchy,
_GaussianMixture,
_Gaussian2DMixture,
_GeneralMixture,
_General2DMixture,
):
pytest.skip(f"{jax_dist.__name__} is a function, not a class")
dist_args = [p for p in inspect.getfullargspec(jax_dist.__init__)[0][1:]]
valid_params, oob_params = list(params), list(params)
key = random.PRNGKey(1)
dependent_constraint = False
for i in range(len(params)):
if (
jax_dist in (_ImproperWrapper, dist.LKJ, dist.LKJCholesky)
and dist_args[i] != "concentration"
):
continue
if "SineSkewed" in jax_dist.__name__ and dist_args[i] != "skewness":
continue
if jax_dist is dist.EulerMaruyama and dist_args[i] != "t":
continue
if (
jax_dist is dist.TwoSidedTruncatedDistribution
and dist_args[i] == "base_dist"
):
continue
if jax_dist is dist.GaussianRandomWalk and dist_args[i] == "num_steps":
continue
if (
jax_dist is dist.SineBivariateVonMises
and dist_args[i] == "weighted_correlation"
):
continue
if params[i] is None:
oob_params[i] = None
valid_params[i] = None
continue
constraint = jax_dist.arg_constraints[dist_args[i]]
if isinstance(constraint, constraints._Dependent):
dependent_constraint = True
break
key, key_gen = random.split(key)
oob_params[i] = gen_values_outside_bounds(
constraint, jnp.shape(params[i]), key_gen
)
valid_params[i] = gen_values_within_bounds(
constraint, jnp.shape(params[i]), key_gen
)
if jax_dist is dist.MultivariateStudentT:
# As mean is only defined for df > 1 & we instantiate
# scipy.stats.multivariate_t with same mean as jax_dist
# we need to ensure this is defined, so force df >= 1
valid_params[0] += 1
if jax_dist is dist.LogUniform:
# scipy.stats.loguniform take parameter a and b
# which is a > 0 and b > a.
# gen_values_within_bounds() generates just
# a > 0 and b > 0. Then, make b = a + b.
valid_params[1] += valid_params[0]
assert jax_dist(*oob_params)
# Invalid parameter values throw ValueError
if not dependent_constraint and (
jax_dist is not _ImproperWrapper and "SineSkewed" not in jax_dist.__name__
):
with pytest.raises(ValueError):
jax_dist(*oob_params, validate_args=True)
with pytest.raises(ValueError):
# test error raised under jit omnistaging
oob_params = jax.device_get(oob_params)
def dist_gen_fn():
d = jax_dist(*oob_params, validate_args=True)
return d
jax.jit(dist_gen_fn)()
d = jax_dist(*valid_params, validate_args=True)
# Test agreement of log density evaluation on randomly generated samples
# with scipy's implementation when available.
if (
sp_dist
and not _is_batched_multivariate(d)
and not (d.event_shape and prepend_shape)
):
valid_samples = gen_values_within_bounds(
d.support, size=prepend_shape + d.batch_shape + d.event_shape
)
try:
expected = sp_dist(*valid_params).logpdf(valid_samples)
except AttributeError:
expected = sp_dist(*valid_params).logpmf(valid_samples)
assert_allclose(d.log_prob(valid_samples), expected, atol=1e-5, rtol=1e-5)
# Out of support samples throw ValueError
oob_samples = gen_values_outside_bounds(
d.support, size=prepend_shape + d.batch_shape + d.event_shape
)
with pytest.warns(UserWarning, match="Out-of-support"):
d.log_prob(oob_samples)
with pytest.warns(UserWarning, match="Out-of-support"):
# test warning work under jit omnistaging
oob_samples = jax.device_get(oob_samples)
valid_params = jax.device_get(valid_params)
def log_prob_fn():
d = jax_dist(*valid_params, validate_args=True)
return d.log_prob(oob_samples)
jax.jit(log_prob_fn)()
def test_omnistaging_invalid_param():
def f(x):
return dist.LogNormal(x, -np.ones(2), validate_args=True).log_prob(0)
with pytest.raises(ValueError, match="got invalid"):
jax.jit(f)(0)
def test_omnistaging_invalid_sample():
def f(x):
return dist.LogNormal(x, np.ones(2), validate_args=True).log_prob(-1)
with pytest.warns(UserWarning, match="Out-of-support"):
jax.jit(f)(0)
def test_categorical_log_prob_grad():
data = jnp.repeat(jnp.arange(3), 10)
def f(x):
return (
dist.Categorical(jax.nn.softmax(x * jnp.arange(1, 4))).log_prob(data).sum()
)
def g(x):
return dist.Categorical(logits=x * jnp.arange(1, 4)).log_prob(data).sum()
x = 0.5
fx, grad_fx = jax.value_and_grad(f)(x)
gx, grad_gx = jax.value_and_grad(g)(x)
assert_allclose(fx, gx, rtol=1e-6)
assert_allclose(grad_fx, grad_gx, atol=1e-4)
def test_beta_proportion_invalid_mean():
with dist.distribution.validation_enabled(), pytest.raises(
ValueError, match=r"^BetaProportion distribution got invalid mean parameter\.$"
):
dist.BetaProportion(1.0, 1.0)
########################################
# Tests for constraints and transforms #
########################################
@pytest.mark.parametrize(
"constraint, x, expected",
[
(constraints.boolean, np.array([True, False]), np.array([True, True])),
(constraints.boolean, np.array([1, 1]), np.array([True, True])),
(constraints.boolean, np.array([-1, 1]), np.array([False, True])),
(
constraints.corr_cholesky,
np.array([[[1, 0], [0, 1]], [[1, 0.1], [0, 1]]]),
np.array([True, False]),
), # NB: not lower_triangular
(
constraints.corr_cholesky,
np.array([[[1, 0], [1, 0]], [[1, 0], [0.5, 0.5]]]),
np.array([False, False]),
), # NB: not positive_diagonal & not unit_norm_row
(
constraints.corr_matrix,
np.array([[[1, 0], [0, 1]], [[1, 0.1], [0, 1]]]),
np.array([True, False]),
), # NB: not lower_triangular
(
constraints.corr_matrix,
np.array([[[1, 0], [1, 0]], [[1, 0], [0.5, 0.5]]]),
np.array([False, False]),
), # NB: not unit diagonal
(constraints.greater_than(1), 3, True),
(
constraints.greater_than(1),
np.array([-1, 1, 5]),
np.array([False, False, True]),
),
(constraints.integer_interval(-3, 5), 0, True),
(
constraints.integer_interval(-3, 5),
np.array([-5, -3, 0, 1.1, 5, 7]),
np.array([False, True, True, False, True, False]),
),
(constraints.interval(-3, 5), 0, True),
(
constraints.interval(-3, 5),
np.array([-5, -3, 0, 5, 7]),
np.array([False, True, True, True, False]),
),
(constraints.less_than(1), -2, True),
(
constraints.less_than(1),
np.array([-1, 1, 5]),
np.array([True, False, False]),
),
(constraints.lower_cholesky, np.array([[1.0, 0.0], [-2.0, 0.1]]), True),
(
constraints.lower_cholesky,
np.array([[[1.0, 0.0], [-2.0, -0.1]], [[1.0, 0.1], [2.0, 0.2]]]),
np.array([False, False]),
),
(constraints.nonnegative_integer, 3, True),
(
constraints.nonnegative_integer,
np.array([-1.0, 0.0, 5.0]),
np.array([False, True, True]),
),
(constraints.positive, 3, True),
(constraints.positive, np.array([-1, 0, 5]), np.array([False, False, True])),
(constraints.positive_definite, np.array([[1.0, 0.3], [0.3, 1.0]]), True),
(
constraints.positive_definite,
np.array([[[2.0, 0.4], [0.3, 2.0]], [[1.0, 0.1], [0.1, 0.0]]]),
np.array([False, False]),
),
(constraints.positive_integer, 3, True),
(
constraints.positive_integer,
np.array([-1.0, 0.0, 5.0]),
np.array([False, False, True]),
),
(constraints.real, -1, True),
(
constraints.real,
np.array([np.inf, -np.inf, np.nan, np.pi]),
np.array([False, False, False, True]),
),
(constraints.simplex, np.array([0.1, 0.3, 0.6]), True),
(
constraints.simplex,
np.array([[0.1, 0.3, 0.6], [-0.1, 0.6, 0.5], [0.1, 0.6, 0.5]]),
np.array([True, False, False]),
),
(constraints.softplus_positive, 3, True),
(
constraints.softplus_positive,
np.array([-1, 0, 5]),
np.array([False, False, True]),
),
(
constraints.softplus_lower_cholesky,
np.array([[1.0, 0.0], [-2.0, 0.1]]),
True,
),
(
constraints.softplus_lower_cholesky,
np.array([[[1.0, 0.0], [-2.0, -0.1]], [[1.0, 0.1], [2.0, 0.2]]]),
np.array([False, False]),
),
(constraints.unit_interval, 0.1, True),
(
constraints.unit_interval,
np.array([-5, 0, 0.5, 1, 7]),
np.array([False, True, True, True, False]),
),
(
constraints.sphere,
np.array([[1, 0, 0], [0.5, 0.5, 0]]),
np.array([True, False]),
),
(
constraints.open_interval(0.0, 1.0),
np.array([-5, 0, 0.5, 1, 7]),
np.array([False, False, True, False, False]),
),
],
)
def test_constraints(constraint, x, expected):
v = constraint.feasible_like(x)
if jnp.result_type(v) == "float32" or jnp.result_type(v) == "float64":
assert not constraint.is_discrete
assert_array_equal(constraint(x), expected)
feasible_value = constraint.feasible_like(x)
assert jnp.shape(feasible_value) == jnp.shape(x)
assert_allclose(constraint(feasible_value), jnp.full(jnp.shape(expected), True))
try:
inverse = biject_to(constraint).inv(feasible_value)
except NotImplementedError:
pass
else:
assert_allclose(inverse, jnp.zeros_like(inverse), atol=2e-7)
@pytest.mark.parametrize(
"constraint",
[
constraints.corr_cholesky,
constraints.corr_matrix,
constraints.greater_than(2),
constraints.interval(-3, 5),
constraints.l1_ball,
constraints.less_than(1),
constraints.lower_cholesky,
constraints.scaled_unit_lower_cholesky,
constraints.ordered_vector,
constraints.positive,
constraints.positive_definite,
constraints.positive_ordered_vector,
constraints.real,
constraints.real_vector,
constraints.simplex,
constraints.softplus_positive,
constraints.softplus_lower_cholesky,
constraints.unit_interval,
constraints.open_interval(0.0, 1.0),
],
ids=lambda x: x.__class__,
)
@pytest.mark.parametrize("shape", [(), (1,), (3,), (6,), (3, 1), (1, 3), (5, 3)])
def test_biject_to(constraint, shape):
transform = biject_to(constraint)
event_dim = transform.domain.event_dim
if isinstance(constraint, constraints._Interval):
assert transform.codomain.upper_bound == constraint.upper_bound
assert transform.codomain.lower_bound == constraint.lower_bound
elif isinstance(constraint, constraints._GreaterThan):
assert transform.codomain.lower_bound == constraint.lower_bound
elif isinstance(constraint, constraints._LessThan):
assert transform.codomain.upper_bound == constraint.upper_bound
if len(shape) < event_dim:
return
rng_key = random.PRNGKey(0)
x = random.normal(rng_key, shape)
y = transform(x)
assert transform.forward_shape(x.shape) == y.shape
assert transform.inverse_shape(y.shape) == x.shape
# test inv work for NaN arrays:
x_nan = transform.inv(jnp.full(jnp.shape(y), np.nan))
assert x_nan.shape == x.shape
# test codomain
batch_shape = shape if event_dim == 0 else shape[:-1]
assert_array_equal(transform.codomain(y), jnp.ones(batch_shape, dtype=jnp.bool_))
# test inv
z = transform.inv(y)
assert_allclose(x, z, atol=1e-5, rtol=1e-5)
# test domain, currently all is constraints.real or constraints.real_vector
assert_array_equal(transform.domain(z), jnp.ones(batch_shape))
# test log_abs_det_jacobian
actual = transform.log_abs_det_jacobian(x, y)
assert jnp.shape(actual) == batch_shape
if len(shape) == event_dim:
if constraint is constraints.simplex:
expected = np.linalg.slogdet(jax.jacobian(transform)(x)[:-1, :])[1]
inv_expected = np.linalg.slogdet(jax.jacobian(transform.inv)(y)[:, :-1])[1]
elif constraint in [
constraints.real_vector,
constraints.ordered_vector,
constraints.positive_ordered_vector,
constraints.l1_ball,
]:
expected = np.linalg.slogdet(jax.jacobian(transform)(x))[1]
inv_expected = np.linalg.slogdet(jax.jacobian(transform.inv)(y))[1]
elif constraint in [constraints.corr_cholesky, constraints.corr_matrix]:
vec_transform = lambda x: matrix_to_tril_vec( # noqa: E731
transform(x), diagonal=-1
)
y_tril = matrix_to_tril_vec(y, diagonal=-1)
def inv_vec_transform(y):
matrix = vec_to_tril_matrix(y, diagonal=-1)
if constraint is constraints.corr_matrix:
# fill the upper triangular part
matrix = (
matrix
+ jnp.swapaxes(matrix, -2, -1)
+ jnp.identity(matrix.shape[-1])
)
return transform.inv(matrix)
expected = np.linalg.slogdet(jax.jacobian(vec_transform)(x))[1]
inv_expected = np.linalg.slogdet(jax.jacobian(inv_vec_transform)(y_tril))[1]
elif constraint in [
constraints.lower_cholesky,
constraints.scaled_unit_lower_cholesky,
constraints.positive_definite,
constraints.softplus_lower_cholesky,
]:
vec_transform = lambda x: matrix_to_tril_vec(transform(x)) # noqa: E731
y_tril = matrix_to_tril_vec(y)
def inv_vec_transform(y):
matrix = vec_to_tril_matrix(y)
if constraint is constraints.positive_definite:
# fill the upper triangular part
matrix = (
matrix
+ jnp.swapaxes(matrix, -2, -1)
- jnp.diag(jnp.diag(matrix))
)
return transform.inv(matrix)
expected = np.linalg.slogdet(jax.jacobian(vec_transform)(x))[1]
inv_expected = np.linalg.slogdet(jax.jacobian(inv_vec_transform)(y_tril))[1]
else:
expected = jnp.log(jnp.abs(grad(transform)(x)))
inv_expected = jnp.log(jnp.abs(grad(transform.inv)(y)))
assert_allclose(actual, expected, atol=1e-5, rtol=1e-5)
assert_allclose(actual, -inv_expected, atol=1e-5, rtol=1e-5)
# NB: skip transforms which are tested in `test_biject_to`
@pytest.mark.parametrize(
"transform, event_shape",
[
(PermuteTransform(np.array([3, 0, 4, 1, 2])), (5,)),
(PowerTransform(2.0), ()),
(SoftplusTransform(), ()),
(
LowerCholeskyAffine(
np.array([1.0, 2.0]), np.array([[0.6, 0.0], [1.5, 0.4]])
),
(2,),
),
(
transforms.ComposeTransform(
[
biject_to(constraints.simplex),
SimplexToOrderedTransform(0.0),
biject_to(constraints.ordered_vector).inv,
]
),
(5,),
),
],
)
@pytest.mark.parametrize(
"batch_shape",
[
(),
(1,),
(3,),
(6,),
(3, 1),
(1, 3),
(5, 3),
],
)
def test_bijective_transforms(transform, event_shape, batch_shape):
shape = batch_shape + event_shape
rng_key = random.PRNGKey(0)
x = biject_to(transform.domain)(random.normal(rng_key, shape))
y = transform(x)
# test codomain
assert_array_equal(transform.codomain(y), jnp.ones(batch_shape))
# test inv
z = transform.inv(y)
assert_allclose(x, z, atol=
| 1 |
c5e7fdcbd4a9281597a35a180f2853caac68f811
|
Python
|
1e-6, rtol=1e-4)
assert transform.inv.inv is transform
assert transform.inv is transform.inv
assert transform.domain is transform.inv.codomain
assert transform.codomain is transform.inv.domain
# test domain
assert_array_equal(transform.domain(z), jnp.ones(batch_shape))
# test log_abs_det_jacobian
actual = transform.log_abs_det_jacobian(x, y)
assert_allclose(actual, -transform.inv.log_abs_det_jacobian(y, x))
assert jnp.shape(actual) == batch_shape
if len(shape) == transform.domain.event_dim:
if len(event_shape) == 1:
expected = np.linalg.slogdet(jax.jacobian(transform)(x))[1]
inv_expected = np.linalg.slogdet(jax.jacobian(transform.inv)(y))[1]
else:
expected = jnp.log(jnp.abs(grad(transform)(x)))
inv_expected = jnp.log(jnp.abs(grad(transform.inv)(y)))
assert_allclose(actual, expected, atol=1e-6)
assert_allclose(actual, -inv_expected, atol=1e-6)
@pytest.mark.parametrize("batch_shape", [(), (5,)])
def test_composed_transform(batch_shape):
t1 = transforms.AffineTransform(0, 2)
t2 = transforms.LowerCholeskyTransform()
t = transforms.ComposeTransform([t1, t2, t1])
assert t.domain.event_dim == 1
assert t.codomain.event_dim == 2
x = np.random.normal(size=batch_shape + (6,))
y = t(x)
log_det = t.log_abs_det_jacobian(x, y)
assert log_det.shape == batch_shape
expected_log_det = (
jnp.log(2) * 6 + t2.log_abs_det_jacobian(x * 2, y / 2) + jnp.log(2) * 9
)
assert_allclose(log_det, expected_log_det)
@pytest.mark.parametrize("batch_shape", [(), (5,)])
def test_composed_transform_1(batch_shape):
t1 = transforms.AffineTransform(0, 2)
t2 = transforms.LowerCholeskyTransform()
t = transforms.ComposeTransform([t1, t2, t2])
assert t.domain.event_dim == 1
assert t.codomain.event_dim == 3
x = np.random.normal(size=batch_shape + (6,))
y = t(x)
log_det = t.log_abs_det_jacobian(x, y)
assert log_det.shape == batch_shape
z = t2(x * 2)
expected_log_det = (
jnp.log(2) * 6
+ t2.log_abs_det_jacobian(x * 2, z)
+ t2.log_abs_det_jacobian(z, t2(z)).sum(-1)
)
assert_allclose(log_det, expected_log_det)
@pytest.mark.parametrize("batch_shape", [(), (5,)])
def test_simplex_to_order_transform(batch_shape):
simplex = jnp.arange(5.0) / jnp.arange(5.0).sum()
simplex = jnp.broadcast_to(simplex, batch_shape + simplex.shape)
transform = SimplexToOrderedTransform()
out = transform(simplex)
assert out.shape == transform.forward_shape(simplex.shape)
assert simplex.shape == transform.inverse_shape(out.shape)
@pytest.mark.parametrize("batch_shape", [(), (5,)])
@pytest.mark.parametrize("prepend_event_shape", [(), (4,)])
@pytest.mark.parametrize("sample_shape", [(), (7,)])
def test_transformed_distribution(batch_shape, prepend_event_shape, sample_shape):
base_dist = (
dist.Normal(0, 1)
.expand(batch_shape + prepend_event_shape + (6,))
.to_event(1 + len(prepend_event_shape))
)
t1 = transforms.AffineTransform(0, 2)
t2 = transforms.LowerCholeskyTransform()
d = dist.TransformedDistribution(base_dist, [t1, t2, t1])
assert d.event_dim == 2 + len(prepend_event_shape)
y = d.sample(random.PRNGKey(0), sample_shape)
t = transforms.ComposeTransform([t1, t2, t1])
x = t.inv(y)
assert x.shape == sample_shape + base_dist.shape()
log_prob = d.log_prob(y)
assert log_prob.shape == sample_shape + batch_shape
t_log_det = t.log_abs_det_jacobian(x, y)
if prepend_event_shape:
t_log_det = t_log_det.sum(-1)
expected_log_prob = base_dist.log_prob(x) - t_log_det
assert_allclose(log_prob, expected_log_prob, atol=1e-5)
@pytest.mark.parametrize(
"transformed_dist",
[
dist.TransformedDistribution(
dist.Normal(np.array([2.0, 3.0]), 1.0), transforms.ExpTransform()
),
dist.TransformedDistribution(
dist.Exponential(jnp.ones(2)),
[
transforms.PowerTransform(0.7),
transforms.AffineTransform(0.0, jnp.ones(2) * 3),
],
),
],
)
def test_transformed_distribution_intermediates(transformed_dist):
sample, intermediates = transformed_dist.sample_with_intermediates(
random.PRNGKey(1)
)
assert_allclose(
transformed_dist.log_prob(sample, intermediates),
transformed_dist.log_prob(sample),
)
def test_transformed_transformed_distribution():
loc, scale = -2, 3
dist1 = dist.TransformedDistribution(
dist.Normal(2, 3), transforms.PowerTransform(2.0)
)
dist2 = dist.TransformedDistribution(dist1, transforms.AffineTransform(-2, 3))
assert isinstance(dist2.base_dist, dist.Normal)
assert len(dist2.transforms) == 2
assert isinstance(dist2.transforms[0], transforms.PowerTransform)
assert isinstance(dist2.transforms[1], transforms.AffineTransform)
rng_key = random.PRNGKey(0)
assert_allclose(loc + scale * dist1.sample(rng_key), dist2.sample(rng_key))
intermediates = dist2.sample_with_intermediates(rng_key)
assert len(intermediates) == 2
def _make_iaf(input_dim, hidden_dims, rng_key):
arn_init, arn = AutoregressiveNN(input_dim, hidden_dims, param_dims=[1, 1])
_, init_params = arn_init(rng_key, (input_dim,))
return InverseAutoregressiveTransform(partial(arn, init_params))
@pytest.mark.parametrize(
"ts",
[
[transforms.PowerTransform(0.7), transforms.AffineTransform(2.0, 3.0)],
[transforms.ExpTransform()],
[
transforms.ComposeTransform(
[transforms.AffineTransform(-2, 3), transforms.ExpTransform()]
),
transforms.PowerTransform(3.0),
],
[
_make_iaf(5, hidden_dims=[10], rng_key=random.PRNGKey(0)),
transforms.PermuteTransform(jnp.arange(5)[::-1]),
_make_iaf(5, hidden_dims=[10], rng_key=random.PRNGKey(1)),
],
],
)
def test_compose_transform_with_intermediates(ts):
transform = transforms.ComposeTransform(ts)
x = random.normal(random.PRNGKey(2), (7, 5))
y, intermediates = transform.call_with_intermediates(x)
logdet = transform.log_abs_det_jacobian(x, y, intermediates)
assert_allclose(y, transform(x))
assert_allclose(logdet, transform.log_abs_det_jacobian(x, y))
@pytest.mark.parametrize("x_dim, y_dim", [(3, 3), (3, 4)])
def test_unpack_transform(x_dim, y_dim):
xy = np.random.randn(x_dim + y_dim)
unpack_fn = lambda xy: {"x": xy[:x_dim], "y": xy[x_dim:]} # noqa: E731
transform = transforms.UnpackTransform(unpack_fn)
z = transform(xy)
if x_dim == y_dim:
with pytest.warns(UserWarning, match="UnpackTransform.inv"):
t = transform.inv(z)
else:
t = transform.inv(z)
assert_allclose(t, xy)
@pytest.mark.parametrize("jax_dist, sp_dist, params", CONTINUOUS)
def test_generated_sample_distribution(
jax_dist, sp_dist, params, N_sample=100_000, key=random.PRNGKey(11)
):
"""On samplers that we do not get directly from JAX, (e.g. we only get
Gumbel(0,1) but also provide samplers for Gumbel(loc, scale)), also test
agreement in the empirical distribution of generated samples between our
samplers and those from SciPy.
"""
if jax_dist not in [dist.Gumbel]:
pytest.skip(
"{} sampling method taken from upstream, no need to"
"test generated samples.".format(jax_dist.__name__)
)
jax_dist = jax_dist(*params)
if sp_dist and not jax_dist.event_shape and not jax_dist.batch_shape:
our_samples = jax_dist.sample(key, (N_sample,))
ks_result = osp.kstest(our_samples, sp_dist(*params).cdf)
assert ks_result.pvalue > 0.05
@pytest.mark.parametrize(
"jax_dist, params, support",
[
(dist.BernoulliLogits, (5.0,), jnp.arange(2)),
(dist.BernoulliProbs, (0.5,), jnp.arange(2)),
(dist.BinomialLogits, (4.5, 10), jnp.arange(11)),
(dist.BinomialProbs, (0.5, 11), jnp.arange(12)),
(dist.BetaBinomial, (2.0, 0.5, 12), jnp.arange(13)),
(dist.CategoricalLogits, (np.array([3.0, 4.0, 5.0]),), jnp.arange(3)),
(dist.CategoricalProbs, (np.array([0.1, 0.5, 0.4]),), jnp.arange(3)),
],
)
@pytest.mark.parametrize("batch_shape", [(5,), ()])
@pytest.mark.parametrize("expand", [False, True])
def test_enumerate_support_smoke(jax_dist, params, support, batch_shape, expand):
p0 = jnp.broadcast_to(params[0], batch_shape + jnp.shape(params[0]))
actual = jax_dist(p0, *params[1:]).enumerate_support(expand=expand)
expected = support.reshape((-1,) + (1,) * len(batch_shape))
if expand:
expected = jnp.broadcast_to(expected, support.shape + batch_shape)
assert_allclose(actual, expected)
def test_zero_inflated_enumerate_support():
base_dist = dist.Bernoulli(0.5)
d = dist.ZeroInflatedDistribution(base_dist, gate=0.5)
assert d.has_enumerate_support
assert_allclose(d.enumerate_support(), base_dist.enumerate_support())
@pytest.mark.parametrize("jax_dist, sp_dist, params", CONTINUOUS + DISCRETE)
@pytest.mark.parametrize("prepend_shape", [(), (2, 3)])
@pytest.mark.parametrize("sample_shape", [(), (4,)])
def test_expand(jax_dist, sp_dist, params, prepend_shape, sample_shape):
jax_dist = jax_dist(*params)
new_batch_shape = prepend_shape + jax_dist.batch_shape
expanded_dist = jax_dist.expand(new_batch_shape)
rng_key = random.PRNGKey(0)
samples = expanded_dist.sample(rng_key, sample_shape)
assert expanded_dist.batch_shape == new_batch_shape
assert samples.shape == sample_shape + new_batch_shape + jax_dist.event_shape
assert expanded_dist.log_prob(samples).shape == sample_shape + new_batch_shape
# test expand of expand
assert (
expanded_dist.expand((3,) + new_batch_shape).batch_shape
== (3,) + new_batch_shape
)
# test expand error
if prepend_shape:
with pytest.raises(ValueError, match="Cannot broadcast distribution of shape"):
assert expanded_dist.expand((3,) + jax_dist.batch_shape)
@pytest.mark.parametrize("base_shape", [(2, 1, 5), (3, 1), (2, 1, 1), (1, 1, 5)])
@pytest.mark.parametrize("event_dim", [0, 1, 2, 3])
@pytest.mark.parametrize("sample_shape", [(1000,), (1000, 7, 1), (1000, 1, 7)])
def test_expand_shuffle_regression(base_shape, event_dim, sample_shape):
expand_shape = (2, 3, 5)
event_dim = min(event_dim, len(base_shape))
loc = random.normal(random.PRNGKey(0), base_shape) * 10
base_dist = dist.Normal(loc, 0.1).to_event(event_dim)
expanded_dist = base_dist.expand(expand_shape[: len(expand_shape) - event_dim])
samples = expanded_dist.sample(random.PRNGKey(1), sample_shape)
expected_mean = jnp.broadcast_to(loc, sample_shape[1:] + expanded_dist.shape())
assert_allclose(samples.mean(0), expected_mean, atol=0.1)
@pytest.mark.parametrize("batch_shape", [(), (4,), (10, 3)])
def test_sine_bivariate_von_mises_batch_shape(batch_shape):
phi_loc = jnp.broadcast_to(jnp.array(0.0), batch_shape)
psi_loc = jnp.array(0.0)
phi_conc = jnp.array(1.0)
psi_conc = jnp.array(1.0)
corr = jnp.array(0.1)
sine = SineBivariateVonMises(phi_loc, psi_loc, phi_conc, psi_conc, corr)
assert sine.batch_shape == batch_shape
samples = sine.sample(random.PRNGKey(0))
assert samples.shape == (*batch_shape, 2)
def test_sine_bivariate_von_mises_sample_mean():
loc = jnp.array([[2.0, -1.0], [-2, 1.0]])
sine = SineBivariateVonMises(*loc, 5000, 5000, 0.0)
samples = sine.sample(random.PRNGKey(0), (5000,))
assert_allclose(_circ_mean(samples).T, loc, rtol=5e-3)
@pytest.mark.parametrize("batch_shape", [(), (4,)])
def test_polya_gamma(batch_shape, num_points=20000):
d = dist.TruncatedPolyaGamma(batch_shape=batch_shape)
rng_key = random.PRNGKey(0)
# test density approximately normalized
x = jnp.linspace(1.0e-6, d.truncation_point, num_points)
prob = (d.truncation_point / num_points) * jnp.exp(
logsumexp(d.log_prob(x), axis=-1)
)
assert_allclose(prob, jnp.ones(batch_shape), rtol=1.0e-4)
# test mean of approximate sampler
z = d.sample(rng_key, sample_shape=(3000,))
mean = jnp.mean(z, axis=-1)
assert_allclose(mean, 0.25 * jnp.ones(batch_shape), rtol=0.07)
@pytest.mark.parametrize(
"extra_event_dims,expand_shape",
[(0, (4, 3, 2, 1)), (0, (4, 3, 2, 2)), (1, (5, 4, 3, 2)), (2, (5, 4, 3))],
)
def test_expand_reshaped_distribution(extra_event_dims, expand_shape):
loc = jnp.zeros((1, 6))
scale_tril = jnp.eye(6)
d = dist.MultivariateNormal(loc, scale_tril=scale_tril)
full_shape = (4, 1, 1, 1, 6)
reshaped_dist = d.expand([4, 1, 1, 1]).to_event(extra_event_dims)
cut = 4 - extra_event_dims
batch_shape, event_shape = full_shape[:cut], full_shape[cut:]
assert reshaped_dist.batch_shape == batch_shape
assert reshaped_dist.event_shape == event_shape
large = reshaped_dist.expand(expand_shape)
assert large.batch_shape == expand_shape
assert large.event_shape == event_shape
# Throws error when batch shape cannot be broadcasted
with pytest.raises((RuntimeError, ValueError)):
reshaped_dist.expand(expand_shape + (3,))
# Throws error when trying to shrink existing batch shape
with pytest.raises((RuntimeError, ValueError)):
large.expand(expand_shape[1:])
@pytest.mark.parametrize(
"batch_shape, mask_shape",
[((), ()), ((2,), ()), ((), (2,)), ((2,), (2,)), ((4, 2), (1, 2)), ((2,), (4, 2))],
)
@pytest.mark.parametrize("event_shape", [(), (3,)])
def test_mask(batch_shape, event_shape, mask_shape):
jax_dist = (
dist.Normal().expand(batch_shape + event_shape).to_event(len(event_shape))
)
mask = dist.Bernoulli(0.5).sample(random.PRNGKey(0), mask_shape)
if mask_shape == ():
mask = bool(mask)
samples = jax_dist.sample(random.PRNGKey(1))
actual = jax_dist.mask(mask).log_prob(samples)
assert_allclose(
actual != 0,
jnp.broadcast_to(mask, lax.broadcast_shapes(batch_shape, mask_shape)),
)
@pytest.mark.parametrize("event_shape", [(), (4,), (2, 4)])
def test_mask_grad(event_shape):
def f(x, data):
base_dist = dist.Beta(jnp.exp(x), jnp.ones(event_shape)).to_event()
mask = jnp.all(
jnp.isfinite(data), tuple(-i - 1 for i in range(len(event_shape)))
)
log_prob = base_dist.mask(mask).log_prob(data)
assert log_prob.shape == data.shape[: len(data.shape) - len(event_shape)]
return log_prob.sum()
data = np.array([[0.4, np.nan, 0.2, np.nan], [0.5, 0.5, 0.5, 0.5]])
log_prob, grad = jax.value_and_grad(f)(1.0, data)
assert jnp.isfinite(grad) and jnp.isfinite(log_prob)
@pytest.mark.parametrize(
"jax_dist, sp_dist, params", CONTINUOUS + DISCRETE + DIRECTIONAL
)
def test_dist_pytree(jax_dist, sp_dist, params):
def f(x):
return jax_dist(*params)
if jax_dist is _ImproperWrapper:
pytest.skip("Cannot flattening ImproperUniform")
if jax_dist is dist.EulerMaruyama:
pytest.skip("EulerMaruyama doesn't define flatten/unflatten")
jax.jit(f)(0) # this test for flatten/unflatten
lax.map(f, np.ones(3)) # this test for compatibility w.r.t. scan
# Test that parameters do not change after flattening.
expected_dist = f(0)
actual_dist = jax.jit(f)(0)
expected_sample = expected_dist.sample(random.PRNGKey(0))
actual_sample = actual_dist.sample(random.PRNGKey(0))
expected_log_prob = expected_dist.log_prob(expected_sample)
actual_log_prob = actual_dist.log_prob(actual_sample)
assert_allclose(actual_sample, expected_sample, rtol=1e-6)
assert_allclose(actual_log_prob, expected_log_prob, rtol=2e-6)
@pytest.mark.parametrize(
"method, arg", [("to_event", 1), ("mask", False), ("expand", [5])]
)
def test_special_dist_pytree(method, arg):
def f(x):
d = dist.Normal(np.zeros(1), np.ones(1))
return getattr(d, method)(arg)
jax.jit(f)(0)
lax.map(f, np.ones(3))
def test_expand_no_unnecessary_batch_shape_expansion():
# ExpandedDistribution can mutate the `batch_shape` of
# its base distribution in order to make ExpandedDistribution
# mappable, see #684. However, this mutation should not take
# place if no mapping operation is performed.
for arg in (jnp.array(1.0), jnp.ones((2,)), jnp.ones((2, 2))):
# Low level test: ensure that (tree_flatten o tree_unflatten)(expanded_dist)
# amounts to an identity operation.
d = dist.Normal(arg, arg).expand([10, 3, *arg.shape])
roundtripped_d = type(d).tree_unflatten(*d.tree_flatten()[::-1])
assert d.batch_shape == roundtripped_d.batch_shape
assert d.base_dist.batch_shape == roundtripped_d.base_dist.batch_shape
assert d.base_dist.event_shape == roundtripped_d.base_dist.event_shape
assert jnp.allclose(d.base_dist.loc, roundtripped_d.base_dist.loc)
assert jnp.allclose(d.base_dist.scale, roundtripped_d.base_dist.scale)
# High-level test: `jax.jit`ting a function returning an ExpandedDistribution
# (which involves an instance of the low-level case as it will transform
# the original function by adding some flattening and unflattening steps)
# should return same object as its non-jitted equivalent.
def bs(arg):
return dist.Normal(arg, arg).expand([10, 3, *arg.shape])
d = bs(arg)
dj = jax.jit(bs)(arg)
assert isinstance(d, dist.ExpandedDistribution)
assert isinstance(dj, dist.ExpandedDistribution)
assert d.batch_shape == dj.batch_shape
assert d.base_dist.batch_shape == dj.base_dist.batch_shape
assert d.base_dist.event_shape == dj.base_dist.event_shape
assert jnp.allclose(d.base_dist.loc, dj.base_dist.loc)
assert jnp.allclose(d.base_dist.scale, dj.base_dist.scale)
@pytest.mark.parametrize("batch_shape", [(), (4,), (2, 3)], ids=str)
def test_kl_delta_normal_shape(batch_shape):
v = np.random.normal(size=batch_shape)
loc = np.random.normal(size=batch_shape)
scale = np.exp(np.random.normal(size=batch_shape))
p = dist.Delta(v)
q = dist.Normal(loc, scale)
assert kl_divergence(p, q).shape == batch_shape
def test_kl_delta_normal():
v = np.random.normal()
loc = np.random.normal()
scale = np.exp(np.random.normal())
p = dist.Delta(v, 10.0)
q = dist.Normal(loc, scale)
assert_allclose(kl_divergence(p, q), 10.0 - q.log_prob(v))
@pytest.mark.parametrize("batch_shape", [(), (4,), (2, 3)], ids=str)
@pytest.mark.parametrize("event_shape", [(), (4,), (2, 3)], ids=str)
def test_kl_independent_normal(batch_shape, event_shape):
shape = batch_shape + event_shape
p = dist.Normal(np.random.normal(size=shape), np.exp(np.random.normal(size=shape)))
q = dist.Normal(np.random.normal(size=shape), np.exp(np.random.normal(size=shape)))
actual = kl_divergence(
dist.Independent(p, len(event_shape)), dist.Independent(q, len(event_shape))
)
expected = sum_rightmost(kl_divergence(p, q), len(event_shape))
assert_allclose(actual, expected)
@pytest.mark.parametrize("batch_shape", [(), (4,), (2, 3)], ids=str)
@pytest.mark.parametrize("event_shape", [(), (4,), (2, 3)], ids=str)
def test_kl_expanded_normal(batch_shape, event_shape):
shape = batch_shape + event_shape
p = dist.Normal(np.random.normal(), np.exp(np.random.normal())).expand(shape)
q = dist.Normal(np.random.normal(), np.exp(np.random.normal())).expand(shape)
actual = kl_divergence(
dist.Independent(p, len(event_shape)), dist.Independent(q, len(event_shape))
)
expected = sum_rightmost(kl_divergence(p, q), len(event_shape))
assert_allclose(actual, expected)
@pytest.mark.parametrize("shape", [(), (4,), (2, 3)], ids=str)
@pytest.mark.parametrize(
"p_dist, q_dist",
[
(dist.Beta, dist.Beta),
(dist.Gamma, dist.Gamma),
(dist.Kumaraswamy, dist.Beta),
(dist.Normal, dist.Normal),
(dist.Weibull, dist.Gamma),
],
)
def test_kl_univariate(shape, p_dist, q_dist):
def make_dist(dist_class):
params = {}
for k, c in dist_class.arg_constraints.items():
if c is constraints.real:
params[k] = np.random.normal(size=shape)
elif c is constraints.positive:
params[k] = np.exp(np.random.normal(size=shape))
else:
raise ValueError(f"Missing pattern for param {k}.")
d = dist_class(**params)
if dist_class is dist.Kumaraswamy:
d.KL_KUMARASWAMY_BETA_TAYLOR_ORDER = 1000
return d
p = make_dist(p_dist)
q = make_dist(q_dist)
actual = kl_divergence(p, q)
x = p.sample(random.PRNGKey(0), (10000,)).copy()
expected = jnp.mean((p.log_prob(x) - q.log_prob(x)), 0)
assert_allclose(actual, expected, rtol=0.05)
@pytest.mark.parametrize("shape", [(4,), (2, 3)], ids=str)
def test_kl_dirichlet_dirichlet(shape):
p = dist.Dirichlet(np.exp(np.random.normal(size=shape)))
q = dist.Dirichlet(np.exp(np.random.normal(size=shape)))
actual = kl_divergence(p, q)
x = p.sample(random.PRNGKey(0), (10_000,)).copy()
expected = jnp.mean((p.log_prob(x) - q.log_prob(x)), 0)
assert_allclose(actual, expected, rtol=0.05)
def test_vmapped_binomial_p0():
# test that vmapped binomial with p = 0 does not have an infinite loop
def sample_binomial_withp0(key):
n = 2 * (random.uniform(key) > 0.5)
_, key = random.split(key)
return dist.Binomial(total_count=n, probs=0).sample(key)
jax.vmap(sample_binomial_withp0)(random.split(random.PRNGKey(0), 1))
def _get_vmappable_dist_init_params(jax_dist):
if jax_dist.__name__ == ("_TruncatedCauchy"):
return [2, 3]
elif jax_dist.__name__ == ("_TruncatedNormal"):
return [2, 3]
elif issubclass(jax_dist, dist.Distribution):
init_parameters = list(inspect.signature(jax_dist.__init__).parameters.keys())[
1:
]
vmap_over_parameters = list(
inspect.signature(vmap_over.dispatch(jax_dist)).parameters.keys()
)[1:]
return list(
[
i
for i, name in enumerate(init_parameters)
if name in vmap_over_parameters
]
)
else:
raise ValueError
def _allclose_or_equal(a1, a2):
if isinstance(a1, np.ndarray):
return np.allclose(a2, a1)
elif isinstance(a1, jnp.ndarray):
return jnp.allclose(a2, a1)
elif isinstance(a1, csr_matrix):
return np.allclose(a2.todense(), a1.todense())
else:
return a2 == a1 or a2 is a1
def _tree_equal(t1, t2):
t = jax.tree_util.tree_map(_allclose_or_equal, t1, t2)
return jnp.all(jax.flatten_util.ravel_pytree(t)[0])
@pytest.mark.parametrize(
"jax_dist, sp_dist, params", CONTINUOUS + DISCRETE + DIRECTIONAL
)
def test_vmap_dist(jax_dist, sp_dist, params):
param_names = list(inspect.signature(jax_dist).parameters.keys())
vmappable_param_idxs = _get_vmappable_dist_init_params(jax_dist)
vmappable_param_idxs = vmappable_param_idxs[: len(params)]
if len(vmappable_param_idxs) == 0:
return
def make_jax_dist(*params):
return jax_dist(*params)
def sample(d: dist.Distribution):
return d.sample(random.PRNGKey(0))
d = make_jax_dist(*params)
if isinstance(d, _SparseCAR) and d.is_sparse:
# In this case, since csr arrays are not jittable,
# _SparseCAR has a csr_matrix as part of its pytree
# definition (not as a pytree leaf). This causes pytree
# operations like tree_map to fail, since these functions
# compare the pytree def of each of the arguments using ==
# which is ambiguous for array-like objects.
return
in_out_axes_cases = [
# vmap over all args
(
tuple(0 if i in vmappable_param_idxs else None for i in range(len(params))),
0,
),
# vmap over a single arg, out over all attributes of a distribution
*(
([0 if i == idx else None for i in range(len(params))], 0)
for idx in vmappable_param_idxs
if params[idx] is not None
),
# vmap over a single arg, out over the associated attribute of the distribution
*(
(
[0 if i == idx else None for i in range(len(params))],
vmap_over(d, **{param_names[idx]: 0}),
)
for idx in vmappable_param_idxs
if params[idx] is not None
),
# vmap over a single arg, axis=1, (out single attribute, axis=1)
*(
(
[1 if i == idx else None for i in range(len(params))],
vmap_over(d, **{param_names[idx]: 1}),
)
for idx in vmappable_param_idxs
if isinstance(params[idx], jnp.ndarray) and jnp.array(params[idx]).ndim > 0
# skip this distribution because _GeneralMixture.__init__ turns
# 1d inputs into 0d attributes, thus breaks the expectations of
# the vmapping test case where in_axes=1, only done for rank>=1 tensors.
and jax_dist is not _GeneralMixture
),
]
for in_axes, out_axes in in_out_axes_cases:
batched_params = [
jax.tree_map(lambda x: jnp.expand_dims(x, ax), arg)
if isinstance(ax, int)
else arg
for arg, ax in zip(params, in_axes)
]
# Recreate the jax_dist to avoid side effects coming from `d.sample`
# triggering lazy_property computations, which, in a few cases, break
# vmap_over's expectations regarding existing attributes to be vmapped.
d = make_jax_dist(*params)
batched_d = jax.vmap(make_jax_dist, in_axes=in_axes, out_axes=out_axes)(
*batched_params
)
eq = vmap(lambda x, y: _tree_equal(x, y), in_axes=(out_axes, None))(
batched_d, d
)
assert eq == jnp.array([True])
samples_dist = sample(d)
samples_batched_dist = jax.vmap(sample, in_axes=(out_axes,))(batched_d)
assert samples_batched_dist.shape == (1, *samples_dist.shape)
def test_multinomial_abstract_total_count():
probs = jnp.array([0.2, 0.5, 0.3])
key = random.PRNGKey(0)
def f(x):
total_count = x.sum(-1)
return dist.Multinomial(total_count, probs=probs, total_count_max=10).sample(
key
)
x = dist.Multinomial(10, probs).sample(key)
y = jax.jit(f)(x)
assert_allclose(x, y, rtol=1e-6)
def test_normal_log_cdf():
# test if log_cdf method agrees with jax.scipy.stats.norm.logcdf
# and if exp(log_cdf) agrees with cdf
loc = jnp.array([[0.0, -10.0, 20.0]])
scale = jnp.array([[1, 5, 7]])
values = jnp.linspace(-5, 5, 100).reshape(-1, 1)
numpyro_log_cdf = dist.Normal(loc=loc, scale=scale).log_cdf(values)
numpyro_cdf = dist.Normal(loc=loc, scale=scale).cdf(values)
jax_log_cdf = jax_norm.logcdf(loc=loc, scale=scale, x=values)
assert_allclose(numpyro_log_cdf, jax_log_cdf)
assert_allclose(jnp.exp(numpyro_log_cdf), numpyro_cdf, rtol=1e-6)
@pytest.mark.parametrize(
"value",
[
-15.0,
jnp.array([[-15.0], [-10.0], [-5.0]]),
jnp.array([[[-15.0], [-10.0], [-5.0]], [[-14.0], [-9.0], [-4.0]]]),
],
)
def test_truncated_normal_log_prob_in_tail(value):
# define set of distributions truncated in tail of distribution
loc = 1.35
scale = jnp.geomspace(0.01, 1, 10)
low, high = (-20, -1.0)
a, b = (low - loc) / scale, (high - loc) / scale # rescale for jax input
numpyro_log_prob = dist.TruncatedNormal(loc, scale, low=low, high=high).log_prob(
value
)
jax_log_prob = jax_truncnorm.logpdf(value, loc=loc, scale=scale, a=a, b=b)
assert_allclose(numpyro_log_prob, jax_log_prob, rtol=1e-06)
def test_sample_truncated_normal_in_tail():
# test, if samples from distributions truncated in
# tail of distribution returns any inf's
tail_dist = dist.TruncatedNormal(loc=0, scale=1, low=-16, high=-15)
samples = tail_dist.sample(random.PRNGKey(0), sample_shape=(10_000,))
assert ~jnp.isinf(samples).any()
@jax.enable_custom_prng()
def test_jax_custom_prng():
samples = dist.Normal(0, 5).sample(random.PRNGKey(0), sample_shape=(1000,))
assert ~jnp.isinf(samples).any()
| 2 |
4453b8176cda60a3a8f4800860b87bddfdb6cafa
|
Python
|
# -*- coding: utf-8 -*-
"""
ORIGINAL PROGRAM SOURCE CODE:
1: from __future__ import division, print_function, absolute_import
2:
3: import os
4: from os.path import join
5:
6: from scipy._build_utils import numpy_nodepr_api
7:
8:
9: def configuration(parent_package='',top_path=None):
10: from numpy.distutils.misc_util import Configuration
11: from numpy.distutils.system_info import get_info
12: config = Configuration('integrate', parent_package, top_path)
13:
14: # Get a local copy of lapack_opt_info
15: lapack_opt = dict(get_info('lapack_opt',notfound_action=2))
16: # Pop off the libraries list so it can be combined with
17: # additional required libraries
18: lapack_libs = lapack_opt.pop('libraries', [])
19:
20: mach_src = [join('mach','*.f')]
21: quadpack_src = [join('quadpack', '*.f')]
22: lsoda_src = [join('odepack', fn) for fn in [
23: 'blkdta000.f', 'bnorm.f', 'cfode.f',
24: 'ewset.f', 'fnorm.f', 'intdy.f',
25: 'lsoda.f', 'prja.f', 'solsy.f', 'srcma.f',
26: 'stoda.f', 'vmnorm.f', 'xerrwv.f', 'xsetf.f',
27: 'xsetun.f']]
28: vode_src = [join('odepack', 'vode.f'), join('odepack', 'zvode.f')]
29: dop_src = [join('dop','*.f')]
30: quadpack_test_src = [join('tests','_test_multivariate.c')]
31: odeint_banded_test_src = [join('tests', 'banded5x5.f')]
32:
33: config.add_library('mach', sources=mach_src,
34: config_fc={'noopt':(__file__,1)})
35: config.add_library('quadpack', sources=quadpack_src)
36: config.add_library('lsoda', sources=lsoda_src)
37: config.add_library('vode', sources=vode_src)
38: config.add_library('dop', sources=dop_src)
39:
40: # Extensions
41: # quadpack:
42: include_dirs = [join(os.path.dirname(__file__), '..', '_lib', 'src')]
43: if 'include_dirs' in lapack_opt:
44: lapack_opt = dict(lapack_opt)
45: include_dirs.extend(lapack_opt.pop('include_dirs'))
46:
47: config.add_extension('_quadpack',
48: sources=['_quadpackmodule.c'],
49: libraries=['quadpack', 'mach'] + lapack_libs,
50: depends=(['__quadpack.h']
51: + quadpack_src + mach_src),
52: include_dirs=include_dirs,
53: **lapack_opt)
54:
55: # odepack/lsoda-odeint
56: odepack_opts = lapack_opt.copy()
57: odepack_opts.update(numpy_nodepr_api)
58: config.add_extension('_odepack',
59: sources=['_odepackmodule.c'],
60: libraries=['lsoda', 'mach'] + lapack_libs,
61: depends=(lsoda_src + mach_src),
62: **odepack_opts)
63:
64: # vode
65: config.add_extension('vode',
66: sources=['vode.pyf'],
67: libraries=['vode'] + lapack_libs,
68: depends=vode_src,
69: **lapack_opt)
70:
71: # lsoda
72: config.add_extension('lsoda',
73: sources=['lsoda.pyf'],
74: libraries=['lsoda', 'mach'] + lapack_libs,
75: depends=(lsoda_src + mach_src),
76: **lapack_opt)
77:
78: # dop
79: config.add_extension('_dop',
80: sources=['dop.pyf'],
81: libraries=['dop'],
82: depends=dop_src)
83:
84: config.add_extension('_test_multivariate',
85: sources=quadpack_test_src)
86:
87: # Fortran+f2py extension module for testing odeint.
88: config.add_extension('_test_odeint_banded',
89: sources=odeint_banded_test_src,
90: libraries=['lsoda', 'mach'] + lapack_libs,
91: depends=(lsoda_src + mach_src),
92: **lapack_opt)
93:
94: config.add_subpackage('_ivp')
95:
96: config.add_data_dir('tests')
97: return config
98:
99:
100: if __name__ == '__main__':
101: from numpy.distutils.core import setup
102: setup(**configuration(top_path='').todict())
103:
"""
# Import the stypy library necessary elements
from stypy.type_inference_programs.type_inference_programs_imports import *
# Create the module type store
module_type_store = Context(None, __file__)
# ################# Begin of the type inference program ##################
stypy.reporting.localization.Localization.set_current(stypy.reporting.localization.Localization(__file__, 3, 0))
# 'import os' statement (line 3)
import os
import_module(stypy.reporting.localization.Localization(__file__, 3, 0), 'os', os, module_type_store)
stypy.reporting.localization.Localization.set_current(stypy.reporting.localization.Localization(__file__, 4, 0))
# 'from os.path import join' statement (line 4)
update_path_to_current_file_folder('C:/Python27/lib/site-packages/scipy/integrate/')
import_32066 = generate_type_inference_code_for_module(stypy.reporting.localization.Localization(__file__, 4, 0), 'os.path')
if (type(import_32066) is not StypyTypeError):
if (import_32066 != 'pyd_module'):
__import__(import_32066)
sys_modules_32067 = sys.modules[import_32066]
import_from_module(stypy.reporting.localization.Localization(__file__, 4, 0), 'os.path', sys_modules_32067.module_type_store, module_type_store, ['join'])
nest_module(stypy.reporting.localization.Localization(__file__, 4, 0), __file__, sys_modules_32067, sys_modules_32067.module_type_store, module_type_store)
else:
from os.path import join
import_from_module(stypy.reporting.localization.Localization(__file__, 4, 0), 'os.path', None, module_type_store, ['join'], [join])
else:
# Assigning a type to the variable 'os.path' (line 4)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 4, 0), 'os.path', import_32066)
remove_current_file_folder_from_path('C:/Python27/lib/site-packages/scipy/integrate/')
stypy.reporting.localization.Localization.set_current(stypy.reporting.localization.Localization(__file__, 6, 0))
# 'from scipy._build_utils import numpy_nodepr_api' statement (line 6)
update_path_to_current_file_folder('C:/Python27/lib/site-packages/scipy/integrate/')
import_32068 = generate_type_inference_code_for_module(stypy.reporting.localization.Localization(__file__, 6, 0), 'scipy._build_utils')
if (type(import_32068) is not StypyTypeError):
if (import_32068 != 'pyd_module'):
__import__(import_32068)
sys_modules_32069 = sys.modules[import_32068]
import_from_module(stypy.reporting.localization.Localization(__file__, 6, 0), 'scipy._build_utils', sys_modules_32069.module_type_store, module_type_store, ['numpy_nodepr_api'])
nest_module(stypy.reporting.localization.Localization(__file__, 6, 0), __file__, sys_modules_32069, sys_modules_32069.module_type_store, module_type_store)
else:
from scipy._build_utils import numpy_nodepr_api
import_from_module(stypy.reporting.localization.Localization(__file__, 6, 0), 'scipy._build_utils', None, module_type_store, ['numpy_nodepr_api'], [numpy_nodepr_api])
else:
# Assigning a type to the variable 'scipy._build_utils' (line 6)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 6, 0), 'scipy._build_utils', import_32068)
remove_current_file_folder_from_path('C:/Python27/lib/site-packages/scipy/integrate/')
@norecursion
def configuration(localization, *varargs, **kwargs):
global module_type_store
# Assign values to the parameters with defaults
str_32070 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 9, 33), 'str', '')
# Getting the type of 'None' (line 9)
None_32071 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 9, 45), 'None')
defaults = [str_32070, None_32071]
# Create a new context for function 'configuration'
module_type_store = module_type_store.open_function_context('configuration', 9, 0, False)
# Passed parameters checking function
configuration.stypy_localization = localization
configuration.stypy_type_of_self = None
configuration.stypy_type_store = module_type_store
configuration.stypy_function_name = 'configuration'
configuration.stypy_param_names_list = ['parent_package', 'top_path']
configuration.stypy_varargs_param_name = None
configuration.stypy_kwargs_param_name = None
configuration.stypy_call_defaults = defaults
configuration.stypy_call_varargs = varargs
configuration.stypy_call_kwargs = kwargs
arguments = process_argument_values(localization, None, module_type_store, 'configuration', ['parent_package', 'top_path'], None, None, defaults, varargs, kwargs)
if is_error_type(arguments):
# Destroy the current context
module_type_store = module_type_store.close_function_context()
return arguments
# Initialize method data
init_call_information(module_type_store, 'configuration', localization, ['parent_package', 'top_path'], arguments)
# Default return type storage variable (SSA)
# Assigning a type to the variable 'stypy_return_type'
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 0, 0), 'stypy_return_type', None)
# ################# Begin of 'configuration(...)' code ##################
stypy.reporting.localization.Localization.set_current(stypy.reporting.localization.Localization(__file__, 10, 4))
# 'from numpy.distutils.misc_util import Configuration' statement (line 10)
update_path_to_current_file_folder('C:/Python27/lib/site-packages/scipy/integrate/')
import_32072 = generate_type_inference_code_for_module(stypy.reporting.localization.Localization(__file__, 10, 4), 'numpy.distutils.misc_util')
if (type(import_32072) is not StypyTypeError):
if (import_32072 != 'pyd_module'):
__import__(import_32072)
sys_modules_32073 = sys.modules[import_32072]
import_from_module(stypy.reporting.localization.Localization(__file__, 10, 4), 'numpy.distutils.misc_util', sys_modules_32073.module_type_store, module_type_store, ['Configuration'])
nest_module(stypy.reporting.localization.Localization(__file__, 10, 4), __file__, sys_modules_32073, sys_modules_32073.module_type_store, module_type_store)
else:
from numpy.distutils.misc_util import Configuration
import_from_module(stypy.reporting.localization.Localization(__file__, 10, 4), 'numpy.distutils.misc_util', None, module_type_store, ['Configuration'], [Configuration])
else:
# Assigning a type to the variable 'numpy.distutils.misc_util' (line 10)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 10, 4), 'numpy.distutils.misc_util', import_32072)
remove_current_file_folder_from_path('C:/Python27/lib/site-packages/scipy/integrate/')
stypy.reporting.localization.Localization.set_current(stypy.reporting.localization.Localization(__file__, 11, 4))
# 'from numpy.distutils.system_info import get_info' statement (line 11)
update_path_to_current_file_folder('C:/Python27/lib/site-packages/scipy/integrate/')
import_32074 = generate_type_inference_code_for_module(stypy.reporting.localization.Localization(__file__, 11, 4), 'numpy.distutils.system_info')
if (type(import_32074) is not StypyTypeError):
if (import_32074 != 'pyd_module'):
__import__(import_32074)
sys_modules_32075 = sys.modules[import_32074]
import_from_module(stypy.reporting.localization.Localization(__file__, 11, 4), 'numpy.distutils.system_info', sys_modules_32075.module_type_store, module_type_store, ['get_info'])
nest_module(stypy.reporting.localization.Localization(__file__, 11, 4), __file__, sys_modules_32075, sys_modules_32075.module_type_store, module_type_store)
else:
from numpy.distutils.system_info import get_info
import_from_module(stypy.reporting.localization.Localization(__file__, 11, 4), 'numpy.distutils.system_info', None, module_type_store, ['get_info'], [get_info])
else:
# Assigning a type to the variable 'numpy.distutils.system_info' (line 11)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 11, 4), 'numpy.distutils.system_info', import_32074)
remove_current_file_folder_from_path('C:/Python27/lib/site-packages/scipy/integrate/')
# Assigning a Call to a Name (line 12):
# Call to Configuration(...): (line 12)
# Processing the call arguments (line 12)
str_32077 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 12, 27), 'str', 'integrate')
# Getting the type of 'parent_package' (line 12)
parent_package_32078 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 12, 40), 'parent_package', False)
# Getting the type of 'top_path' (line 12)
top_path_32079 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 12, 56), 'top_path', False)
# Processing the call keyword arguments (line 12)
kwargs_32080 = {}
# Getting the type of 'Configuration' (line 12)
Configuration_32076 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 12, 13), 'Configuration', False)
# Calling Configuration(args, kwargs) (line 12)
Configuration_call_result_32081 = invoke(stypy.reporting.localization.Localization(__file__, 12, 13), Configuration_32076, *[str_32077, parent_package_32078, top_path_32079], **kwargs_32080)
# Assigning a type to the variable 'config' (line 12)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 12, 4), 'config', Configuration_call_result_32081)
# Assigning a Call to a Name (line 15):
# Call to dict(...): (line 15)
# Processing the call arguments (line 15)
# Call to get_info(...): (line 15)
# Processing the call arguments (line 15)
str_32084 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 15, 31), 'str', 'lapack_opt')
# Processing the call keyword arguments (line 15)
int_32085 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 15, 60), 'int')
keyword_32086 = int_32085
kwargs_32087 = {'notfound_action': keyword_32086}
# Getting the type of 'get_info' (line 15)
get_info_32083 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 15, 22), 'get_info', False)
# Calling get_info(args, kwargs) (line 15)
get_info_call_result_32088 = invoke(stypy.reporting.localization.Localization(__file__, 15, 22), get_info_32083, *[str_32084], **kwargs_32087)
# Processing the call keyword arguments (line 15)
kwargs_32089 = {}
# Getting the type of 'dict' (line 15)
dict_32082 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 15, 17), 'dict', False)
# Calling dict(args, kwargs) (line 15)
dict_call_result_32090 = invoke(stypy.reporting.localization.Localization(__file__, 15, 17), dict_32082, *[get_info_call_result_32088], **kwargs_32089)
# Assigning a type to the variable 'lapack_opt' (line 15)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 15, 4), 'lapack_opt', dict_call_result_32090)
# Assigning a Call to a Name (line 18):
# Call to pop(...): (line 18)
# Processing the call arguments (line 18)
str_32093 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 18, 33), 'str', 'libraries')
# Obtaining an instance of the builtin type 'list' (line 18)
list_32094 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 18, 46), 'list')
# Adding type elements to the builtin type 'list' instance (line 18)
# Processing the call keyword arguments (line 18)
kwargs_32095 = {}
# Getting the type of 'lapack_opt' (line 18)
lapack_opt_32091 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 18, 18), 'lapack_opt', False)
# Obtaining the member 'pop' of a type (line 18)
pop_32092 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 18, 18), lapack_opt_32091, 'pop')
# Calling pop(args, kwargs) (line 18)
pop_call_result_32096 = invoke(stypy.reporting.localization.Localization(__file__, 18, 18), pop_32092, *[str_32093, list_32094], **kwargs_32095)
# Assigning a type to the variable 'lapack_libs' (line 18)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 18, 4), 'lapack_libs', pop_call_result_32096)
# Assigning a List to a Name (line 20):
# Obtaining an instance of the builtin type 'list' (line 20)
list_32097 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 20, 15), 'list')
# Adding type elements to the builtin type 'list' instance (line 20)
# Adding element type (line 20)
# Call to join(...): (line 20)
# Processing the call arguments (line 20)
str_32099 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 20, 21), 'str', 'mach')
str_32100 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 20, 28), 'str', '*.f')
# Processing the call keyword arguments (line 20)
kwargs_32101 = {}
# Getting the type of 'join' (line 20)
join_32098 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 20, 16), 'join', False)
# Calling join(args, kwargs) (line 20)
join_call_result_32102 = invoke(stypy.reporting.localization.Localization(__file__, 20, 16), join_32098, *[str_32099, str_32100], **kwargs_32101)
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 20, 15), list_32097, join_call_result_32102)
# Assigning a type to the variable 'mach_src' (line 20)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 20, 4), 'mach_src', list_32097)
# Assigning a List to a Name (line 21):
# Obtaining an instance of the builtin type 'list' (line 21)
list_32103 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 21, 19), 'list')
# Adding type elements to the builtin type 'list' instance (line 21)
# Adding element type (line 21)
# Call to join(...): (line 21)
# Processing the call arguments (line 21)
str_32105 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 21, 25), 'str', 'quadpack')
str_32106 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 21, 37), 'str', '*.f')
# Processing the call keyword arguments (line 21)
kwargs_32107 = {}
# Getting the type of 'join' (line 21)
join_32104 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 21, 20), 'join', False)
# Calling join(args, kwargs) (line 21)
join_call_result_32108 = invoke(stypy.reporting.localization.Localization(__file__, 21, 20), join_32104, *[str_32105, str_32106], **kwargs_32107)
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 21, 19), list_32103, join_call_result_32108)
# Assigning a type to the variable 'quadpack_src' (line 21)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 21, 4), 'quadpack_src', list_32103)
# Assigning a ListComp to a Name (line 22):
# Calculating list comprehension
# Calculating comprehension expression
# Obtaining an instance of the builtin type 'list' (line 22)
list_32114 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 22, 47), 'list')
# Adding type elements to the builtin type 'list' instance (line 22)
# Adding element type (line 22)
str_32115 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 23, 8), 'str', 'blkdta000.f')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 22, 47), list_32114, str_32115)
# Adding element type (line 22)
str_32116 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 23, 23), 'str', 'bnorm.f')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 22, 47), list_32114, str_32116)
# Adding element type (line 22)
str_32117 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 23, 34), 'str', 'cfode.f')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 22, 47), list_32114, str_32117)
# Adding element type (line 22)
str_32118 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 24, 8), 'str', 'ewset.f')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 22, 47), list_32114, str_32118)
# Adding element type (line 22)
str_32119 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 24, 19), 'str', 'fnorm.f')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 22, 47), list_32114, str_32119)
# Adding element type (line 22)
str_32120 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 24, 30), 'str', 'intdy.f')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 22, 47), list_32114, str_32120)
# Adding element type (line 22)
str_32121 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 25, 8), 'str', 'lsoda.f')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 22, 47), list_32114, str_32121)
# Adding element type (line 22)
str_32122 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 25, 19), 'str', 'prja.f')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 22, 47), list_32114, str_32122)
# Adding element type (line 22)
str_32123 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 25, 29), 'str', 'solsy.f')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 22, 47), list_32114, str_32123)
# Adding element type (line 22)
str_32124 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 25, 40), 'str', 'srcma.f')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 22, 47), list_32114, str_32124)
# Adding element type (line 22)
str_32125 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 26, 8), 'str', 'stoda.f')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 22, 47), list_32114, str_32125)
# Adding element type (line 22)
str_32126 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 26, 19), 'str', 'vmnorm.f')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 22, 47), list_32114, str_32126)
# Adding element type (line 22)
str_32127 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 26, 31), 'str', 'xerrwv.f')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 22, 47), list_32114, str_32127)
# Adding element type (line 22)
str_32128 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 26, 43), 'str', 'xsetf.f')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 22, 47), list_32114, str_32128)
# Adding element type (line 22)
str_32129 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 27, 8), 'str', 'xsetun.f')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 22, 47), list_32114, str_32129)
comprehension_32130 = get_contained_elements_type(stypy.reporting.localization.Localization(__file__, 22, 17), list_32114)
# Assigning a type to the variable 'fn' (line 22)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 22, 17), 'fn', comprehension_32130)
# Call to join(...): (line 22)
# Processing the call arguments (line 22)
str_32110 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 22, 22), 'str', 'odepack')
# Getting the type of 'fn' (line 22)
fn_32111 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 22, 33), 'fn', False)
# Processing the call keyword arguments (line 22)
kwargs_32112 = {}
# Getting the type of 'join' (line 22)
join_32109 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 22, 17), 'join', False)
# Calling join(args, kwargs) (line 22)
join_call_result_32113 = invoke(stypy.reporting.localization.Localization(__file__, 22, 17), join_32109, *[str_32110, fn_32111], **kwargs_32112)
list_32131 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 22, 17), 'list')
set_contained_elements_type(stypy.reporting.localization.Localization(__file__, 22, 17), list_32131, join_call_result_32113)
# Assigning a type to the variable 'lsoda_src' (line 22)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 22, 4), 'lsoda_src', list_32131)
# Assigning a List to a Name (line 28):
# Obtaining an instance of the builtin type 'list' (line 28)
list_32132 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 28, 15), 'list')
# Adding type elements to the builtin type 'list' instance (line 28)
# Adding element type (line 28)
# Call to join(...): (line 28)
# Processing the call arguments (line 28)
str_32134 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 28, 21), 'str', 'odepack')
str_32135 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 28, 32), 'str', 'vode.f')
# Processing the call keyword arguments (line 28)
kwargs_32136 = {}
# Getting the type of 'join' (line 28)
join_32133 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 28, 16), 'join', False)
# Calling join(args, kwargs) (line 28)
join_call_result_32137 = invoke(stypy.reporting.localization.Localization(__file__, 28, 16), join_32133, *[str_32134, str_32135], **kwargs_32136)
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 28, 15), list_32132, join_call_result_32137)
# Adding element type (line 28)
# Call to join(...): (line 28)
# Processing the call arguments (line 28)
str_32139 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 28, 48), 'str', 'odepack')
str_32140 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 28, 59), 'str', 'zvode.f')
# Processing the call keyword arguments (line 28)
kwargs_32141 = {}
# Getting the type of 'join' (line 28)
join_32138 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 28, 43), 'join', False)
# Calling join(args, kwargs) (line 28)
join_call_result_32142 = invoke(stypy.reporting.localization.Localization(__file__, 28, 43), join_32138, *[str_32139, str_32140], **kwargs_32141)
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 28, 15), list_32132, join_call_result_32142)
# Assigning a type to the variable 'vode_src' (line 28)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 28, 4), 'vode_src', list_32132)
# Assigning a List to a Name (line 29):
# Obtaining an instance of the builtin type 'list' (line 29)
list_32143 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 29, 14), 'list')
# Adding type elements to the builtin type 'list' instance (line 29)
# Adding element type (line 29)
# Call to join(...): (line 29)
# Processing the call arguments (line 29)
str_32145 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 29, 20), 'str', 'dop')
str_32146 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 29, 26), 'str', '*.f')
# Processing the call keyword arguments (line 29)
kwargs_32147 = {}
# Getting the type of 'join' (line 29)
join_32144 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 29, 15), 'join', False)
# Calling join(args, kwargs) (line 29)
join_call_result_32148 = invoke(stypy.reporting.localization.Localization(__file__, 29, 15), join_32144, *[str_32145, str_32146], **kwargs_32147)
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 29, 14), list_32143, join_call_result_32148)
# Assigning a type to the variable 'dop_src' (line 29)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 29, 4), 'dop_src', list_32143)
# Assigning a List to a Name (line 30):
# Obtaining an instance of the builtin type 'list' (line 30)
list_32149 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 30, 24), 'list')
# Adding type elements to the builtin type 'list' instance (line 30)
# Adding element type (line 30)
# Call to join(...): (line 30)
# Processing the call arguments (line 30)
str_32151 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 30, 30), 'str', 'tests')
str_32152 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 30, 38), 'str', '_test_multivariate.c')
# Processing the call keyword arguments (line 30)
kwargs_32153 = {}
# Getting the type of 'join' (line 30)
join_32150 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 30, 25), 'join', False)
# Calling join(args, kwargs) (line 30)
join_call_result_32154 = invoke(stypy.reporting.localization.Localization(__file__, 30, 25), join_32150, *[str_32151, str_32152], **kwargs_32153)
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 30, 24), list_32149, join_call_result_32154)
# Assigning a type to the variable 'quadpack_test_src' (line 30)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 30, 4), 'quadpack_test_src', list_32149)
# Assigning a List to a Name (line 31):
# Obtaining an instance of the builtin type 'list' (line 31)
list_32155 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 31, 29), 'list')
# Adding type elements to the builtin type 'list' instance (line 31)
# Adding element type (line 31)
# Call to join(...): (line 31)
# Processing the call arguments (line 31)
str_32157 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 31, 35), 'str', 'tests')
str_32158 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 31, 44), 'str', 'banded5x5.f')
# Processing the call keyword arguments (line 31)
kwargs_32159 = {}
# Getting the type of 'join' (line 31)
join_32156 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 31, 30), 'join', False)
# Calling join(args, kwargs) (line 31)
join_call_result_32160 = invoke(stypy.reporting.localization.Localization(__file__, 31, 30), join_32156, *[str_32157, str_32158], **kwargs_32159)
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 31, 29), list_32155, join_call_result_32160)
# Assigning a type to the variable 'odeint_banded_test_src' (line 31)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 31, 4), 'odeint_banded_test_src', list_32155)
# Call to add_library(...): (line 33)
# Processing the call arguments (line 33)
str_32163 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 33, 23), 'str', 'mach')
# Processing the call keyword arguments (line 33)
# Getting the type of 'mach_src' (line 33)
mach_src_32164 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 33, 39), 'mach_src', False)
keyword_32165 = mach_src_32164
# Obtaining an instance of the builtin type 'dict' (line 34)
dict_32166 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 34, 33), 'dict')
# Adding type elements to the builtin type 'dict' instance (line 34)
# Adding element type (key, value) (line 34)
str_32167 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 34, 34), 'str', 'noopt')
# Obtaining an instance of the builtin type 'tuple' (line 34)
tuple_32168 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 34, 43), 'tuple')
# Adding type elements to the builtin type 'tuple' instance (line 34)
# Adding element type (line 34)
# Getting the type of '__file__' (line 34)
file___32169 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 34, 43), '__file__', False)
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 34, 43), tuple_32168, file___32169)
# Adding element type (line 34)
int_32170 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 34, 52), 'int')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 34, 43), tuple_32168, int_32170)
set_contained_elements_type(stypy.reporting.localization.Localization(__file__, 34, 33), dict_32166, (str_32167, tuple_32168))
keyword_32171 = dict_32166
kwargs_32172 = {'sources': keyword_32165, 'config_fc': keyword_32171}
# Getting the type of 'config' (line 33)
config_32161 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 33, 4), 'config', False)
# Obtaining the member 'add_library' of a type (line 33)
add_library_32162 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 33, 4), config_32161, 'add_library')
# Calling add_library(args, kwargs) (line 33)
add_library_call_result_32173 = invoke(stypy.reporting.localization.Localization(__file__, 33, 4), add_library_32162, *[str_32163], **kwargs_32172)
# Call to add_library(...): (line 35)
# Processing the call arguments (line 35)
str_32176 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 35, 23), 'str', 'quadpack')
# Processing the call keyword arguments (line 35)
# Getting the type of 'quadpack_src' (line 35)
quadpack_src_32177 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 35, 43), 'quadpack_src', False)
keyword_32178 = quadpack_src_32177
kwargs_32179 = {'sources': keyword_32178}
# Getting the type of 'config' (line 35)
config_32174 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 35, 4), 'config', False)
# Obtaining the member 'add_library' of a type (line 35)
add_library_32175 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 35, 4), config_32174, 'add_library')
# Calling add_library(args, kwargs) (line 35)
add_library_call_result_32180 = invoke(stypy.reporting.localization.Localization(__file__, 35, 4), add_library_32175, *[str_32176], **kwargs_32179)
# Call to add_library(...): (line 36)
# Processing the call arguments (line 36)
str_32183 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 36, 23), 'str', 'lsoda')
# Processing the call keyword arguments (line 36)
# Getting the type of 'lsoda_src' (line 36)
lsoda_src_32184 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 36, 40), 'lsoda_src', False)
keyword_32185 = lsoda_src_32184
kwargs_32186 = {'sources': keyword_32185}
# Getting the type of 'config' (line 36)
config_32181 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 36, 4), 'config', False)
# Obtaining the member 'add_library' of a type (line 36)
add_library_32182 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 36, 4), config_32181, 'add_library')
# Calling add_library(args, kwargs) (line 36)
add_library_call_result_32187 = invoke(stypy.reporting.localization.Localization(__file__, 36, 4), add_library_32182, *[str_32183], **kwargs_32186)
# Call to add_library(...): (line 37)
# Processing the call arguments (line 37)
str_32190 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 37, 23), 'str', 'vode')
# Processing the call keyword arguments (line 37)
# Getting the type of 'vode_src' (line 37)
vode_src_32191 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 37, 39), 'vode_src', False)
keyword_32192 = vode_src_32191
kwargs_32193 = {'sources': keyword_32192}
# Getting the type of 'config' (line 37)
config_32188 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 37, 4), 'config', False)
# Obtaining the member 'add_library' of a type (line 37)
add_library_32189 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 37, 4), config_32188, 'add_library')
# Calling add_library(args, kwargs) (line 37)
add_library_call_result_32194 = invoke(stypy.reporting.localization.Localization(__file__, 37, 4), add_library_32189, *[str_32190], **kwargs_32193)
# Call to add_library(...): (line 38)
# Processing the call arguments (line 38)
str_32197 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 38, 23), 'str', 'dop')
# Processing the call keyword arguments (line 38)
# Getting the type of 'dop_src' (line 38)
dop_src_32198 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 38, 38), 'dop_src', False)
keyword_32199 = dop_src_32198
kwargs_32200 = {'sources': keyword_32199}
# Getting the type of 'config' (line 38)
config_32195 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 38, 4), 'config', False)
# Obtaining the member 'add_library' of a type (line 38)
add_library_32196 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 38, 4), config_32195, 'add_library')
# Calling add_library(args, kwargs) (line 38)
add_library_call_result_32201 = invoke(stypy.reporting.localization.Localization(__file__, 38, 4), add_library_32196, *[str_32197], **kwargs_32200)
# Assigning a List to a Name (line 42):
# Obtaining an instance of the builtin type 'list' (line 42)
list_32202 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 42, 19), 'list')
# Adding type elements to the builtin type 'list' instance (line 42)
# Adding element type (line 42)
# Call to join(...): (line 42)
# Processing the call arguments (line 42)
# Call to dirname(...): (line 42)
# Processing the call arguments (line 42)
# Getting the type of '__file__' (line 42)
file___32207 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 42, 41), '__file__', False)
# Processing the call keyword arguments (line 42)
kwargs_32208 = {}
# Getting the type of 'os' (line 42)
os_32204 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 42, 25), 'os', False)
# Obtaining the member 'path' of a type (line 42)
path_32205 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 42, 25), os_32204, 'path')
# Obtaining the member 'dirname' of a type (line 42)
dirname_32206 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 42, 25), path_32205, 'dirname')
# Calling dirname(args, kwargs) (line 42)
dirname_call_result_32209 = invoke(stypy.reporting.localization.Localization(__file__, 42, 25), dirname_32206, *[file___32207], **kwargs_32208)
str_32210 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 42, 52), 'str', '..')
str_32211 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 42, 58), 'str', '_lib')
str_32212 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 42, 66), 'str', '
| 0 |
4453b8176cda60a3a8f4800860b87bddfdb6cafa
|
Python
|
src')
# Processing the call keyword arguments (line 42)
kwargs_32213 = {}
# Getting the type of 'join' (line 42)
join_32203 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 42, 20), 'join', False)
# Calling join(args, kwargs) (line 42)
join_call_result_32214 = invoke(stypy.reporting.localization.Localization(__file__, 42, 20), join_32203, *[dirname_call_result_32209, str_32210, str_32211, str_32212], **kwargs_32213)
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 42, 19), list_32202, join_call_result_32214)
# Assigning a type to the variable 'include_dirs' (line 42)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 42, 4), 'include_dirs', list_32202)
str_32215 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 43, 7), 'str', 'include_dirs')
# Getting the type of 'lapack_opt' (line 43)
lapack_opt_32216 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 43, 25), 'lapack_opt')
# Applying the binary operator 'in' (line 43)
result_contains_32217 = python_operator(stypy.reporting.localization.Localization(__file__, 43, 7), 'in', str_32215, lapack_opt_32216)
# Testing the type of an if condition (line 43)
if_condition_32218 = is_suitable_condition(stypy.reporting.localization.Localization(__file__, 43, 4), result_contains_32217)
# Assigning a type to the variable 'if_condition_32218' (line 43)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 43, 4), 'if_condition_32218', if_condition_32218)
# SSA begins for if statement (line 43)
module_type_store = SSAContext.create_ssa_context(module_type_store, 'if')
# Assigning a Call to a Name (line 44):
# Call to dict(...): (line 44)
# Processing the call arguments (line 44)
# Getting the type of 'lapack_opt' (line 44)
lapack_opt_32220 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 44, 26), 'lapack_opt', False)
# Processing the call keyword arguments (line 44)
kwargs_32221 = {}
# Getting the type of 'dict' (line 44)
dict_32219 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 44, 21), 'dict', False)
# Calling dict(args, kwargs) (line 44)
dict_call_result_32222 = invoke(stypy.reporting.localization.Localization(__file__, 44, 21), dict_32219, *[lapack_opt_32220], **kwargs_32221)
# Assigning a type to the variable 'lapack_opt' (line 44)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 44, 8), 'lapack_opt', dict_call_result_32222)
# Call to extend(...): (line 45)
# Processing the call arguments (line 45)
# Call to pop(...): (line 45)
# Processing the call arguments (line 45)
str_32227 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 45, 43), 'str', 'include_dirs')
# Processing the call keyword arguments (line 45)
kwargs_32228 = {}
# Getting the type of 'lapack_opt' (line 45)
lapack_opt_32225 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 45, 28), 'lapack_opt', False)
# Obtaining the member 'pop' of a type (line 45)
pop_32226 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 45, 28), lapack_opt_32225, 'pop')
# Calling pop(args, kwargs) (line 45)
pop_call_result_32229 = invoke(stypy.reporting.localization.Localization(__file__, 45, 28), pop_32226, *[str_32227], **kwargs_32228)
# Processing the call keyword arguments (line 45)
kwargs_32230 = {}
# Getting the type of 'include_dirs' (line 45)
include_dirs_32223 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 45, 8), 'include_dirs', False)
# Obtaining the member 'extend' of a type (line 45)
extend_32224 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 45, 8), include_dirs_32223, 'extend')
# Calling extend(args, kwargs) (line 45)
extend_call_result_32231 = invoke(stypy.reporting.localization.Localization(__file__, 45, 8), extend_32224, *[pop_call_result_32229], **kwargs_32230)
# SSA join for if statement (line 43)
module_type_store = module_type_store.join_ssa_context()
# Call to add_extension(...): (line 47)
# Processing the call arguments (line 47)
str_32234 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 47, 25), 'str', '_quadpack')
# Processing the call keyword arguments (line 47)
# Obtaining an instance of the builtin type 'list' (line 48)
list_32235 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 48, 33), 'list')
# Adding type elements to the builtin type 'list' instance (line 48)
# Adding element type (line 48)
str_32236 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 48, 34), 'str', '_quadpackmodule.c')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 48, 33), list_32235, str_32236)
keyword_32237 = list_32235
# Obtaining an instance of the builtin type 'list' (line 49)
list_32238 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 49, 35), 'list')
# Adding type elements to the builtin type 'list' instance (line 49)
# Adding element type (line 49)
str_32239 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 49, 36), 'str', 'quadpack')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 49, 35), list_32238, str_32239)
# Adding element type (line 49)
str_32240 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 49, 48), 'str', 'mach')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 49, 35), list_32238, str_32240)
# Getting the type of 'lapack_libs' (line 49)
lapack_libs_32241 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 49, 58), 'lapack_libs', False)
# Applying the binary operator '+' (line 49)
result_add_32242 = python_operator(stypy.reporting.localization.Localization(__file__, 49, 35), '+', list_32238, lapack_libs_32241)
keyword_32243 = result_add_32242
# Obtaining an instance of the builtin type 'list' (line 50)
list_32244 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 50, 34), 'list')
# Adding type elements to the builtin type 'list' instance (line 50)
# Adding element type (line 50)
str_32245 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 50, 35), 'str', '__quadpack.h')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 50, 34), list_32244, str_32245)
# Getting the type of 'quadpack_src' (line 51)
quadpack_src_32246 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 51, 36), 'quadpack_src', False)
# Applying the binary operator '+' (line 50)
result_add_32247 = python_operator(stypy.reporting.localization.Localization(__file__, 50, 34), '+', list_32244, quadpack_src_32246)
# Getting the type of 'mach_src' (line 51)
mach_src_32248 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 51, 51), 'mach_src', False)
# Applying the binary operator '+' (line 51)
result_add_32249 = python_operator(stypy.reporting.localization.Localization(__file__, 51, 49), '+', result_add_32247, mach_src_32248)
keyword_32250 = result_add_32249
# Getting the type of 'include_dirs' (line 52)
include_dirs_32251 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 52, 38), 'include_dirs', False)
keyword_32252 = include_dirs_32251
# Getting the type of 'lapack_opt' (line 53)
lapack_opt_32253 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 53, 27), 'lapack_opt', False)
kwargs_32254 = {'libraries': keyword_32243, 'sources': keyword_32237, 'depends': keyword_32250, 'lapack_opt_32253': lapack_opt_32253, 'include_dirs': keyword_32252}
# Getting the type of 'config' (line 47)
config_32232 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 47, 4), 'config', False)
# Obtaining the member 'add_extension' of a type (line 47)
add_extension_32233 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 47, 4), config_32232, 'add_extension')
# Calling add_extension(args, kwargs) (line 47)
add_extension_call_result_32255 = invoke(stypy.reporting.localization.Localization(__file__, 47, 4), add_extension_32233, *[str_32234], **kwargs_32254)
# Assigning a Call to a Name (line 56):
# Call to copy(...): (line 56)
# Processing the call keyword arguments (line 56)
kwargs_32258 = {}
# Getting the type of 'lapack_opt' (line 56)
lapack_opt_32256 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 56, 19), 'lapack_opt', False)
# Obtaining the member 'copy' of a type (line 56)
copy_32257 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 56, 19), lapack_opt_32256, 'copy')
# Calling copy(args, kwargs) (line 56)
copy_call_result_32259 = invoke(stypy.reporting.localization.Localization(__file__, 56, 19), copy_32257, *[], **kwargs_32258)
# Assigning a type to the variable 'odepack_opts' (line 56)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 56, 4), 'odepack_opts', copy_call_result_32259)
# Call to update(...): (line 57)
# Processing the call arguments (line 57)
# Getting the type of 'numpy_nodepr_api' (line 57)
numpy_nodepr_api_32262 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 57, 24), 'numpy_nodepr_api', False)
# Processing the call keyword arguments (line 57)
kwargs_32263 = {}
# Getting the type of 'odepack_opts' (line 57)
odepack_opts_32260 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 57, 4), 'odepack_opts', False)
# Obtaining the member 'update' of a type (line 57)
update_32261 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 57, 4), odepack_opts_32260, 'update')
# Calling update(args, kwargs) (line 57)
update_call_result_32264 = invoke(stypy.reporting.localization.Localization(__file__, 57, 4), update_32261, *[numpy_nodepr_api_32262], **kwargs_32263)
# Call to add_extension(...): (line 58)
# Processing the call arguments (line 58)
str_32267 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 58, 25), 'str', '_odepack')
# Processing the call keyword arguments (line 58)
# Obtaining an instance of the builtin type 'list' (line 59)
list_32268 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 59, 33), 'list')
# Adding type elements to the builtin type 'list' instance (line 59)
# Adding element type (line 59)
str_32269 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 59, 34), 'str', '_odepackmodule.c')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 59, 33), list_32268, str_32269)
keyword_32270 = list_32268
# Obtaining an instance of the builtin type 'list' (line 60)
list_32271 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 60, 35), 'list')
# Adding type elements to the builtin type 'list' instance (line 60)
# Adding element type (line 60)
str_32272 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 60, 36), 'str', 'lsoda')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 60, 35), list_32271, str_32272)
# Adding element type (line 60)
str_32273 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 60, 45), 'str', 'mach')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 60, 35), list_32271, str_32273)
# Getting the type of 'lapack_libs' (line 60)
lapack_libs_32274 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 60, 55), 'lapack_libs', False)
# Applying the binary operator '+' (line 60)
result_add_32275 = python_operator(stypy.reporting.localization.Localization(__file__, 60, 35), '+', list_32271, lapack_libs_32274)
keyword_32276 = result_add_32275
# Getting the type of 'lsoda_src' (line 61)
lsoda_src_32277 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 61, 34), 'lsoda_src', False)
# Getting the type of 'mach_src' (line 61)
mach_src_32278 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 61, 46), 'mach_src', False)
# Applying the binary operator '+' (line 61)
result_add_32279 = python_operator(stypy.reporting.localization.Localization(__file__, 61, 34), '+', lsoda_src_32277, mach_src_32278)
keyword_32280 = result_add_32279
# Getting the type of 'odepack_opts' (line 62)
odepack_opts_32281 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 62, 27), 'odepack_opts', False)
kwargs_32282 = {'libraries': keyword_32276, 'sources': keyword_32270, 'depends': keyword_32280, 'odepack_opts_32281': odepack_opts_32281}
# Getting the type of 'config' (line 58)
config_32265 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 58, 4), 'config', False)
# Obtaining the member 'add_extension' of a type (line 58)
add_extension_32266 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 58, 4), config_32265, 'add_extension')
# Calling add_extension(args, kwargs) (line 58)
add_extension_call_result_32283 = invoke(stypy.reporting.localization.Localization(__file__, 58, 4), add_extension_32266, *[str_32267], **kwargs_32282)
# Call to add_extension(...): (line 65)
# Processing the call arguments (line 65)
str_32286 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 65, 25), 'str', 'vode')
# Processing the call keyword arguments (line 65)
# Obtaining an instance of the builtin type 'list' (line 66)
list_32287 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 66, 33), 'list')
# Adding type elements to the builtin type 'list' instance (line 66)
# Adding element type (line 66)
str_32288 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 66, 34), 'str', 'vode.pyf')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 66, 33), list_32287, str_32288)
keyword_32289 = list_32287
# Obtaining an instance of the builtin type 'list' (line 67)
list_32290 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 67, 35), 'list')
# Adding type elements to the builtin type 'list' instance (line 67)
# Adding element type (line 67)
str_32291 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 67, 36), 'str', 'vode')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 67, 35), list_32290, str_32291)
# Getting the type of 'lapack_libs' (line 67)
lapack_libs_32292 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 67, 46), 'lapack_libs', False)
# Applying the binary operator '+' (line 67)
result_add_32293 = python_operator(stypy.reporting.localization.Localization(__file__, 67, 35), '+', list_32290, lapack_libs_32292)
keyword_32294 = result_add_32293
# Getting the type of 'vode_src' (line 68)
vode_src_32295 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 68, 33), 'vode_src', False)
keyword_32296 = vode_src_32295
# Getting the type of 'lapack_opt' (line 69)
lapack_opt_32297 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 69, 27), 'lapack_opt', False)
kwargs_32298 = {'libraries': keyword_32294, 'sources': keyword_32289, 'depends': keyword_32296, 'lapack_opt_32297': lapack_opt_32297}
# Getting the type of 'config' (line 65)
config_32284 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 65, 4), 'config', False)
# Obtaining the member 'add_extension' of a type (line 65)
add_extension_32285 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 65, 4), config_32284, 'add_extension')
# Calling add_extension(args, kwargs) (line 65)
add_extension_call_result_32299 = invoke(stypy.reporting.localization.Localization(__file__, 65, 4), add_extension_32285, *[str_32286], **kwargs_32298)
# Call to add_extension(...): (line 72)
# Processing the call arguments (line 72)
str_32302 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 72, 25), 'str', 'lsoda')
# Processing the call keyword arguments (line 72)
# Obtaining an instance of the builtin type 'list' (line 73)
list_32303 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 73, 33), 'list')
# Adding type elements to the builtin type 'list' instance (line 73)
# Adding element type (line 73)
str_32304 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 73, 34), 'str', 'lsoda.pyf')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 73, 33), list_32303, str_32304)
keyword_32305 = list_32303
# Obtaining an instance of the builtin type 'list' (line 74)
list_32306 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 74, 35), 'list')
# Adding type elements to the builtin type 'list' instance (line 74)
# Adding element type (line 74)
str_32307 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 74, 36), 'str', 'lsoda')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 74, 35), list_32306, str_32307)
# Adding element type (line 74)
str_32308 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 74, 45), 'str', 'mach')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 74, 35), list_32306, str_32308)
# Getting the type of 'lapack_libs' (line 74)
lapack_libs_32309 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 74, 55), 'lapack_libs', False)
# Applying the binary operator '+' (line 74)
result_add_32310 = python_operator(stypy.reporting.localization.Localization(__file__, 74, 35), '+', list_32306, lapack_libs_32309)
keyword_32311 = result_add_32310
# Getting the type of 'lsoda_src' (line 75)
lsoda_src_32312 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 75, 34), 'lsoda_src', False)
# Getting the type of 'mach_src' (line 75)
mach_src_32313 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 75, 46), 'mach_src', False)
# Applying the binary operator '+' (line 75)
result_add_32314 = python_operator(stypy.reporting.localization.Localization(__file__, 75, 34), '+', lsoda_src_32312, mach_src_32313)
keyword_32315 = result_add_32314
# Getting the type of 'lapack_opt' (line 76)
lapack_opt_32316 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 76, 27), 'lapack_opt', False)
kwargs_32317 = {'libraries': keyword_32311, 'sources': keyword_32305, 'depends': keyword_32315, 'lapack_opt_32316': lapack_opt_32316}
# Getting the type of 'config' (line 72)
config_32300 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 72, 4), 'config', False)
# Obtaining the member 'add_extension' of a type (line 72)
add_extension_32301 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 72, 4), config_32300, 'add_extension')
# Calling add_extension(args, kwargs) (line 72)
add_extension_call_result_32318 = invoke(stypy.reporting.localization.Localization(__file__, 72, 4), add_extension_32301, *[str_32302], **kwargs_32317)
# Call to add_extension(...): (line 79)
# Processing the call arguments (line 79)
str_32321 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 79, 25), 'str', '_dop')
# Processing the call keyword arguments (line 79)
# Obtaining an instance of the builtin type 'list' (line 80)
list_32322 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 80, 33), 'list')
# Adding type elements to the builtin type 'list' instance (line 80)
# Adding element type (line 80)
str_32323 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 80, 34), 'str', 'dop.pyf')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 80, 33), list_32322, str_32323)
keyword_32324 = list_32322
# Obtaining an instance of the builtin type 'list' (line 81)
list_32325 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 81, 35), 'list')
# Adding type elements to the builtin type 'list' instance (line 81)
# Adding element type (line 81)
str_32326 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 81, 36), 'str', 'dop')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 81, 35), list_32325, str_32326)
keyword_32327 = list_32325
# Getting the type of 'dop_src' (line 82)
dop_src_32328 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 82, 33), 'dop_src', False)
keyword_32329 = dop_src_32328
kwargs_32330 = {'libraries': keyword_32327, 'sources': keyword_32324, 'depends': keyword_32329}
# Getting the type of 'config' (line 79)
config_32319 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 79, 4), 'config', False)
# Obtaining the member 'add_extension' of a type (line 79)
add_extension_32320 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 79, 4), config_32319, 'add_extension')
# Calling add_extension(args, kwargs) (line 79)
add_extension_call_result_32331 = invoke(stypy.reporting.localization.Localization(__file__, 79, 4), add_extension_32320, *[str_32321], **kwargs_32330)
# Call to add_extension(...): (line 84)
# Processing the call arguments (line 84)
str_32334 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 84, 25), 'str', '_test_multivariate')
# Processing the call keyword arguments (line 84)
# Getting the type of 'quadpack_test_src' (line 85)
quadpack_test_src_32335 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 85, 33), 'quadpack_test_src', False)
keyword_32336 = quadpack_test_src_32335
kwargs_32337 = {'sources': keyword_32336}
# Getting the type of 'config' (line 84)
config_32332 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 84, 4), 'config', False)
# Obtaining the member 'add_extension' of a type (line 84)
add_extension_32333 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 84, 4), config_32332, 'add_extension')
# Calling add_extension(args, kwargs) (line 84)
add_extension_call_result_32338 = invoke(stypy.reporting.localization.Localization(__file__, 84, 4), add_extension_32333, *[str_32334], **kwargs_32337)
# Call to add_extension(...): (line 88)
# Processing the call arguments (line 88)
str_32341 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 88, 25), 'str', '_test_odeint_banded')
# Processing the call keyword arguments (line 88)
# Getting the type of 'odeint_banded_test_src' (line 89)
odeint_banded_test_src_32342 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 89, 33), 'odeint_banded_test_src', False)
keyword_32343 = odeint_banded_test_src_32342
# Obtaining an instance of the builtin type 'list' (line 90)
list_32344 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 90, 35), 'list')
# Adding type elements to the builtin type 'list' instance (line 90)
# Adding element type (line 90)
str_32345 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 90, 36), 'str', 'lsoda')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 90, 35), list_32344, str_32345)
# Adding element type (line 90)
str_32346 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 90, 45), 'str', 'mach')
add_contained_elements_type(stypy.reporting.localization.Localization(__file__, 90, 35), list_32344, str_32346)
# Getting the type of 'lapack_libs' (line 90)
lapack_libs_32347 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 90, 55), 'lapack_libs', False)
# Applying the binary operator '+' (line 90)
result_add_32348 = python_operator(stypy.reporting.localization.Localization(__file__, 90, 35), '+', list_32344, lapack_libs_32347)
keyword_32349 = result_add_32348
# Getting the type of 'lsoda_src' (line 91)
lsoda_src_32350 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 91, 34), 'lsoda_src', False)
# Getting the type of 'mach_src' (line 91)
mach_src_32351 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 91, 46), 'mach_src', False)
# Applying the binary operator '+' (line 91)
result_add_32352 = python_operator(stypy.reporting.localization.Localization(__file__, 91, 34), '+', lsoda_src_32350, mach_src_32351)
keyword_32353 = result_add_32352
# Getting the type of 'lapack_opt' (line 92)
lapack_opt_32354 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 92, 27), 'lapack_opt', False)
kwargs_32355 = {'libraries': keyword_32349, 'sources': keyword_32343, 'depends': keyword_32353, 'lapack_opt_32354': lapack_opt_32354}
# Getting the type of 'config' (line 88)
config_32339 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 88, 4), 'config', False)
# Obtaining the member 'add_extension' of a type (line 88)
add_extension_32340 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 88, 4), config_32339, 'add_extension')
# Calling add_extension(args, kwargs) (line 88)
add_extension_call_result_32356 = invoke(stypy.reporting.localization.Localization(__file__, 88, 4), add_extension_32340, *[str_32341], **kwargs_32355)
# Call to add_subpackage(...): (line 94)
# Processing the call arguments (line 94)
str_32359 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 94, 26), 'str', '_ivp')
# Processing the call keyword arguments (line 94)
kwargs_32360 = {}
# Getting the type of 'config' (line 94)
config_32357 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 94, 4), 'config', False)
# Obtaining the member 'add_subpackage' of a type (line 94)
add_subpackage_32358 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 94, 4), config_32357, 'add_subpackage')
# Calling add_subpackage(args, kwargs) (line 94)
add_subpackage_call_result_32361 = invoke(stypy.reporting.localization.Localization(__file__, 94, 4), add_subpackage_32358, *[str_32359], **kwargs_32360)
# Call to add_data_dir(...): (line 96)
# Processing the call arguments (line 96)
str_32364 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 96, 24), 'str', 'tests')
# Processing the call keyword arguments (line 96)
kwargs_32365 = {}
# Getting the type of 'config' (line 96)
config_32362 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 96, 4), 'config', False)
# Obtaining the member 'add_data_dir' of a type (line 96)
add_data_dir_32363 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 96, 4), config_32362, 'add_data_dir')
# Calling add_data_dir(args, kwargs) (line 96)
add_data_dir_call_result_32366 = invoke(stypy.reporting.localization.Localization(__file__, 96, 4), add_data_dir_32363, *[str_32364], **kwargs_32365)
# Getting the type of 'config' (line 97)
config_32367 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 97, 11), 'config')
# Assigning a type to the variable 'stypy_return_type' (line 97)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 97, 4), 'stypy_return_type', config_32367)
# ################# End of 'configuration(...)' code ##################
# Teardown call information
teardown_call_information(localization, arguments)
# Storing the return type of function 'configuration' in the type store
# Getting the type of 'stypy_return_type' (line 9)
stypy_return_type_32368 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 9, 0), 'stypy_return_type')
module_type_store.store_return_type_of_current_context(stypy_return_type_32368)
# Destroy the current context
module_type_store = module_type_store.close_function_context()
# Return type of the function 'configuration'
return stypy_return_type_32368
# Assigning a type to the variable 'configuration' (line 9)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 9, 0), 'configuration', configuration)
if (__name__ == '__main__'):
stypy.reporting.localization.Localization.set_current(stypy.reporting.localization.Localization(__file__, 101, 4))
# 'from numpy.distutils.core import setup' statement (line 101)
update_path_to_current_file_folder('C:/Python27/lib/site-packages/scipy/integrate/')
import_32369 = generate_type_inference_code_for_module(stypy.reporting.localization.Localization(__file__, 101, 4), 'numpy.distutils.core')
if (type(import_32369) is not StypyTypeError):
if (import_32369 != 'pyd_module'):
__import__(import_32369)
sys_modules_32370 = sys.modules[import_32369]
import_from_module(stypy.reporting.localization.Localization(__file__, 101, 4), 'numpy.distutils.core', sys_modules_32370.module_type_store, module_type_store, ['setup'])
nest_module(stypy.reporting.localization.Localization(__file__, 101, 4), __file__, sys_modules_32370, sys_modules_32370.module_type_store, module_type_store)
else:
from numpy.distutils.core import setup
import_from_module(stypy.reporting.localization.Localization(__file__, 101, 4), 'numpy.distutils.core', None, module_type_store, ['setup'], [setup])
else:
# Assigning a type to the variable 'numpy.distutils.core' (line 101)
module_type_store.set_type_of(stypy.reporting.localization.Localization(__file__, 101, 4), 'numpy.distutils.core', import_32369)
remove_current_file_folder_from_path('C:/Python27/lib/site-packages/scipy/integrate/')
# Call to setup(...): (line 102)
# Processing the call keyword arguments (line 102)
# Call to todict(...): (line 102)
# Processing the call keyword arguments (line 102)
kwargs_32378 = {}
# Call to configuration(...): (line 102)
# Processing the call keyword arguments (line 102)
str_32373 = get_builtin_python_type_instance(stypy.reporting.localization.Localization(__file__, 102, 35), 'str', '')
keyword_32374 = str_32373
kwargs_32375 = {'top_path': keyword_32374}
# Getting the type of 'configuration' (line 102)
configuration_32372 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 102, 12), 'configuration', False)
# Calling configuration(args, kwargs) (line 102)
configuration_call_result_32376 = invoke(stypy.reporting.localization.Localization(__file__, 102, 12), configuration_32372, *[], **kwargs_32375)
# Obtaining the member 'todict' of a type (line 102)
todict_32377 = module_type_store.get_type_of_member(stypy.reporting.localization.Localization(__file__, 102, 12), configuration_call_result_32376, 'todict')
# Calling todict(args, kwargs) (line 102)
todict_call_result_32379 = invoke(stypy.reporting.localization.Localization(__file__, 102, 12), todict_32377, *[], **kwargs_32378)
kwargs_32380 = {'todict_call_result_32379': todict_call_result_32379}
# Getting the type of 'setup' (line 102)
setup_32371 = module_type_store.get_type_of(stypy.reporting.localization.Localization(__file__, 102, 4), 'setup', False)
# Calling setup(args, kwargs) (line 102)
setup_call_result_32381 = invoke(stypy.reporting.localization.Localization(__file__, 102, 4), setup_32371, *[], **kwargs_32380)
# ################# End of the type inference program ##################
module_errors = stypy.errors.type_error.StypyTypeError.get_error_msgs()
module_warnings = stypy.errors.type_warning.TypeWarning.get_warning_msgs()
| 1 |
5f8303ce91c5de779bbddbaafb3fb828596babe5
|
Python
|
# orm/relationships.py
# Copyright (C) 2005-2023 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
"""Heuristics related to join conditions as used in
:func:`_orm.relationship`.
Provides the :class:`.JoinCondition` object, which encapsulates
SQL annotation and aliasing behavior focused on the `primaryjoin`
and `secondaryjoin` aspects of :func:`_orm.relationship`.
"""
from __future__ import annotations
import collections
from collections import abc
import dataclasses
import inspect as _py_inspect
import re
import typing
from typing import Any
from typing import Callable
from typing import cast
from typing import Collection
from typing import Dict
from typing import Generic
from typing import Iterable
from typing import Iterator
from typing import List
from typing import NamedTuple
from typing import NoReturn
from typing import Optional
from typing import Sequence
from typing import Set
from typing import Tuple
from typing import Type
from typing import TypeVar
from typing import Union
import weakref
from . import attributes
from . import strategy_options
from ._typing import insp_is_aliased_class
from ._typing import is_has_collection_adapter
from .base import _DeclarativeMapped
from .base import _is_mapped_class
from .base import class_mapper
from .base import DynamicMapped
from .base import LoaderCallableStatus
from .base import PassiveFlag
from .base import state_str
from .base import WriteOnlyMapped
from .interfaces import _AttributeOptions
from .interfaces import _IntrospectsAnnotations
from .interfaces import MANYTOMANY
from .interfaces import MANYTOONE
from .interfaces import ONETOMANY
from .interfaces import PropComparator
from .interfaces import RelationshipDirection
from .interfaces import StrategizedProperty
from .util import _orm_annotate
from .util import _orm_deannotate
from .util import CascadeOptions
from .. import exc as sa_exc
from .. import Exists
from .. import log
from .. import schema
from .. import sql
from .. import util
from ..inspection import inspect
from ..sql import coercions
from ..sql import expression
from ..sql import operators
from ..sql import roles
from ..sql import visitors
from ..sql._typing import _ColumnExpressionArgument
from ..sql._typing import _HasClauseElement
from ..sql.annotation import _safe_annotate
from ..sql.elements import ColumnClause
from ..sql.elements import ColumnElement
from ..sql.util import _deep_annotate
from ..sql.util import _deep_deannotate
from ..sql.util import _shallow_annotate
from ..sql.util import adapt_criterion_to_null
from ..sql.util import ClauseAdapter
from ..sql.util import join_condition
from ..sql.util import selectables_overlap
from ..sql.util import visit_binary_product
from ..util.typing import de_optionalize_union_types
from ..util.typing import Literal
from ..util.typing import resolve_name_to_real_class_name
if typing.TYPE_CHECKING:
from ._typing import _EntityType
from ._typing import _ExternalEntityType
from ._typing import _IdentityKeyType
from ._typing import _InstanceDict
from ._typing import _InternalEntityType
from ._typing import _O
from ._typing import _RegistryType
from .base import Mapped
from .clsregistry import _class_resolver
from .clsregistry import _ModNS
from .decl_base import _ClassScanMapperConfig
from .dependency import DependencyProcessor
from .mapper import Mapper
from .query import Query
from .session import Session
from .state import InstanceState
from .strategies import LazyLoader
from .util import AliasedClass
from .util import AliasedInsp
from ..sql._typing import _CoreAdapterProto
from ..sql._typing import _EquivalentColumnMap
from ..sql._typing import _InfoType
from ..sql.annotation import _AnnotationDict
from ..sql.annotation import SupportsAnnotations
from ..sql.elements import BinaryExpression
from ..sql.elements import BindParameter
from ..sql.elements import ClauseElement
from ..sql.schema import Table
from ..sql.selectable import FromClause
from ..util.typing import _AnnotationScanType
from ..util.typing import RODescriptorReference
_T = TypeVar("_T", bound=Any)
_T1 = TypeVar("_T1", bound=Any)
_T2 = TypeVar("_T2", bound=Any)
_PT = TypeVar("_PT", bound=Any)
_PT2 = TypeVar("_PT2", bound=Any)
_RelationshipArgumentType = Union[
str,
Type[_T],
Callable[[], Type[_T]],
"Mapper[_T]",
"AliasedClass[_T]",
Callable[[], "Mapper[_T]"],
Callable[[], "AliasedClass[_T]"],
]
_LazyLoadArgumentType = Literal[
"select",
"joined",
"selectin",
"subquery",
"raise",
"raise_on_sql",
"noload",
"immediate",
"write_only",
"dynamic",
True,
False,
None,
]
_RelationshipJoinConditionArgument = Union[
str, _ColumnExpressionArgument[bool]
]
_RelationshipSecondaryArgument = Union[
"FromClause", str, Callable[[], "FromClause"]
]
_ORMOrderByArgument = Union[
Literal[False],
str,
_ColumnExpressionArgument[Any],
Callable[[], _ColumnExpressionArgument[Any]],
Callable[[], Iterable[_ColumnExpressionArgument[Any]]],
Iterable[Union[str, _ColumnExpressionArgument[Any]]],
]
ORMBackrefArgument = Union[str, Tuple[str, Dict[str, Any]]]
_ORMColCollectionElement = Union[
ColumnClause[Any], _HasClauseElement, roles.DMLColumnRole, "Mapped[Any]"
]
_ORMColCollectionArgument = Union[
str,
Sequence[_ORMColCollectionElement],
Callable[[], Sequence[_ORMColCollectionElement]],
Callable[[], _ORMColCollectionElement],
_ORMColCollectionElement,
]
_CEA = TypeVar("_CEA", bound=_ColumnExpressionArgument[Any])
_CE = TypeVar("_CE", bound="ColumnElement[Any]")
_ColumnPairIterable = Iterable[Tuple[ColumnElement[Any], ColumnElement[Any]]]
_ColumnPairs = Sequence[Tuple[ColumnElement[Any], ColumnElement[Any]]]
_MutableColumnPairs = List[Tuple[ColumnElement[Any], ColumnElement[Any]]]
def remote(expr: _CEA) -> _CEA:
"""Annotate a portion of a primaryjoin expression
with a 'remote' annotation.
See the section :ref:`relationship_custom_foreign` for a
description of use.
.. seealso::
:ref:`relationship_custom_foreign`
:func:`.foreign`
"""
return _annotate_columns( # type: ignore
coercions.expect(roles.ColumnArgumentRole, expr), {"remote": True}
)
def foreign(expr: _CEA) -> _CEA:
"""Annotate a portion of a primaryjoin expression
with a 'foreign' annotation.
See the section :ref:`relationship_custom_foreign` for a
description of use.
.. seealso::
:ref:`relationship_custom_foreign`
:func:`.remote`
"""
return _annotate_columns( # type: ignore
coercions.expect(roles.ColumnArgumentRole, expr), {"foreign": True}
)
@dataclasses.dataclass
class _RelationshipArg(Generic[_T1, _T2]):
"""stores a user-defined parameter value that must be resolved and
parsed later at mapper configuration time.
"""
__slots__ = "name", "argument", "resolved"
name: str
argument: _T1
resolved: Optional[_T2]
def _is_populated(self) -> bool:
return self.argument is not None
def _resolve_against_registry(
self, clsregistry_resolver: Callable[[str, bool], _class_resolver]
) -> None:
attr_value = self.argument
if isinstance(attr_value, str):
self.resolved = clsregistry_resolver(
attr_value, self.name == "secondary"
)()
elif callable(attr_value) and not _is_mapped_class(attr_value):
self.resolved = attr_value()
else:
self.resolved = attr_value
class _RelationshipArgs(NamedTuple):
"""stores user-passed parameters that are resolved at mapper configuration
time.
"""
secondary: _RelationshipArg[
Optional[_RelationshipSecondaryArgument],
Optional[FromClause],
]
primaryjoin: _RelationshipArg[
Optional[_RelationshipJoinConditionArgument],
Optional[ColumnElement[Any]],
]
secondaryjoin: _RelationshipArg[
Optional[_RelationshipJoinConditionArgument],
Optional[ColumnElement[Any]],
]
order_by: _RelationshipArg[
_ORMOrderByArgument,
Union[Literal[None, False], Tuple[ColumnElement[Any], ...]],
]
foreign_keys: _RelationshipArg[
Optional[_ORMColCollectionArgument], Set[ColumnElement[Any]]
]
remote_side: _RelationshipArg[
Optional[_ORMColCollectionArgument], Set[ColumnElement[Any]]
]
@log.class_logger
class RelationshipProperty(
_IntrospectsAnnotations, StrategizedProperty[_T], log.Identified
):
"""Describes an object property that holds a single item or list
of items that correspond to a related database table.
Public constructor is the :func:`_orm.relationship` function.
.. seealso::
:ref:`relationship_config_toplevel`
"""
strategy_wildcard_key = strategy_options._RELATIONSHIP_TOKEN
inherit_cache = True
""":meta private:"""
_links_to_entity = True
_is_relationship = True
_overlaps: Sequence[str]
_lazy_strategy: LazyLoader
_persistence_only = dict(
passive_deletes=False,
passive_updates=True,
enable_typechecks=True,
active_history=False,
cascade_backrefs=False,
)
_dependency_processor: Optional[DependencyProcessor] = None
primaryjoin: ColumnElement[bool]
secondaryjoin: Optional[ColumnElement[bool]]
secondary: Optional[FromClause]
_join_condition: JoinCondition
order_by: Union[Literal[False], Tuple[ColumnElement[Any], ...]]
_user_defined_foreign_keys: Set[ColumnElement[Any]]
_calculated_foreign_keys: Set[ColumnElement[Any]]
remote_side: Set[ColumnElement[Any]]
local_columns: Set[ColumnElement[Any]]
synchronize_pairs: _ColumnPairs
secondary_synchronize_pairs: Optional[_ColumnPairs]
local_remote_pairs: Optional[_ColumnPairs]
direction: RelationshipDirection
_init_args: _RelationshipArgs
def __init__(
self,
argument: Optional[_RelationshipArgumentType[_T]] = None,
secondary: Optional[_RelationshipSecondaryArgument] = None,
*,
uselist: Optional[bool] = None,
collection_class: Optional[
Union[Type[Collection[Any]], Callable[[], Collection[Any]]]
] = None,
primaryjoin: Optional[_RelationshipJoinConditionArgument] = None,
secondaryjoin: Optional[_RelationshipJoinConditionArgument] = None,
back_populates: Optional[str] = None,
order_by: _ORMOrderByArgument = False,
backref: Optional[ORMBackrefArgument] = None,
overlaps: Optional[str] = None,
post_update: bool = False,
cascade: str = "save-update, merge",
viewonly: bool = False,
attribute_options: Optional[_AttributeOptions] = None,
lazy: _LazyLoadArgumentType = "select",
passive_deletes: Union[Literal["all"], bool] = False,
passive_updates: bool = True,
active_history: bool = False,
enable_typechecks: bool = True,
foreign_keys: Optional[_ORMColCollectionArgument] = None,
remote_side: Optional[_ORMColCollectionArgument] = None,
join_depth: Optional[int] = None,
comparator_factory: Optional[
Type[RelationshipProperty.Comparator[Any]]
] = None,
single_parent: bool = False,
innerjoin: bool = False,
distinct_target_key: Optional[bool] = None,
load_on_pending: bool = False,
query_class: Optional[Type[Query[Any]]] = None,
info: Optional[_InfoType] = None,
omit_join: Literal[None, False] = None,
sync_backref: Optional[bool] = None,
doc: Optional[str] = None,
bake_queries: Literal[True] = True,
cascade_backrefs: Literal[False] = False,
_local_remote_pairs: Optional[_ColumnPairs] = None,
_legacy_inactive_history_style: bool = False,
):
super().__init__(attribute_options=attribute_options)
self.uselist = uselist
self.argument = argument
self._init_args = _RelationshipArgs(
_RelationshipArg("secondary", secondary, None),
_RelationshipArg("primaryjoin", primaryjoin, None),
_RelationshipArg("secondaryjoin", secondaryjoin, None),
_RelationshipArg("order_by", order_by, None),
_RelationshipArg("foreign_keys", foreign_keys, None),
_RelationshipArg("remote_side", remote_side, None),
)
self.post_update = post_update
self.viewonly = viewonly
if viewonly:
self._warn_for_persistence_only_flags(
passive_deletes=passive_deletes,
passive_updates=passive_updates,
enable_typechecks=enable_typechecks,
active_history=active_history,
cascade_backrefs=cascade_backrefs,
)
if viewonly and sync_backref:
raise sa_exc.ArgumentError(
"sync_backref and viewonly cannot both be True"
)
self.sync_backref = sync_backref
self.lazy = lazy
self.single_parent = single_parent
self.collection_class = collection_class
self.passive_deletes = passive_deletes
if cascade_backrefs:
raise sa_exc.ArgumentError(
"The 'cascade_backrefs' parameter passed to "
"relationship() may only be set to False."
)
self.passive_updates = passive_updates
self.enable_typechecks = enable_typechecks
self.query_class = query_class
self.innerjoin = innerjoin
self.distinct_target_key = distinct_target_key
self.doc = doc
self.active_history = active_history
self._legacy_inactive_history_style = _legacy_inactive_history_style
self.join_depth = join_depth
if omit_join:
util.warn(
"setting omit_join to True is not supported; selectin "
"loading of this relationship may not work correctly if this "
"flag is set explicitly. omit_join optimization is "
"automatically detected for conditions under which it is "
"supported."
)
self.omit_join = omit_join
self.local_remote_pairs = _local_remote_pairs
self.load_on_pending = load_on_pending
self.comparator_factory = (
comparator_factory or RelationshipProperty.Comparator
)
util.set_creation_order(self)
if info is not None:
self.info.update(info)
self.strategy_key = (("lazy", self.lazy),)
self._reverse_property: Set[RelationshipProperty[Any]] = set()
if overlaps:
self._overlaps = set(re.split(r"\s*,\s*", overlaps)) # type: ignore # noqa: E501
else:
self._overlaps = ()
# mypy ignoring the @property setter
self.cascade = cascade # type: ignore
self.back_populates = back_populates
if self.back_populates:
if backref:
raise sa_exc.ArgumentError(
"backref and back_populates keyword arguments "
"are mutually exclusive"
)
self.backref = None
else:
self.backref = backref
def _warn_for_persistence_only_flags(self, **kw: Any) -> None:
for k, v in kw.items():
if v != self._persistence_only[k]:
# we are warning here rather than warn deprecated as this is a
# configuration mistake, and Python shows regular warnings more
# aggressively than deprecation warnings by default. Unlike the
# case of setting viewonly with cascade, the settings being
# warned about here are not actively doing the wrong thing
# against viewonly=True, so it is not as urgent to have these
# raise an error.
util.warn(
"Setting %s on relationship() while also "
"setting viewonly=True does not make sense, as a "
"viewonly=True relationship does not perform persistence "
"operations. This configuration may raise an error "
"in a future release." % (k,)
)
def instrument_class(self, mapper: Mapper[Any]) -> None:
attributes.register_descriptor(
mapper.class_,
self.key,
comparator=self.comparator_factory(self, mapper),
parententity=mapper,
doc=self.doc,
)
class Comparator(util.MemoizedSlots, PropComparator[_PT]):
"""Produce boolean, comparison, and other operators for
:class:`.RelationshipProperty` attributes.
See the documentation for :class:`.PropComparator` for a brief
overview of ORM level operator definition.
.. seealso::
:class:`.PropComparator`
:class:`.ColumnProperty.Comparator`
:class:`.ColumnOperators`
:ref:`types_operators`
:attr:`.TypeEngine.comparator_factory`
"""
__slots__ = (
"entity",
"mapper",
"property",
"_of_type",
"_extra_criteria",
)
prop: RODescriptorReference[RelationshipProperty[_PT]]
_of_type: Optional[_EntityType[_PT]]
def __init__(
self,
prop: RelationshipProperty[_PT],
parentmapper: _InternalEntityType[Any],
adapt_to_entity: Optional[AliasedInsp[Any]] = None,
of_type: Optional[_EntityType[_PT]] = None,
extra_criteria: Tuple[ColumnElement[bool], ...] = (),
):
"""Construction of :class:`.RelationshipProperty.Comparator`
is internal to the ORM's attribute mechanics.
"""
self.prop = prop
self._parententity = parentmapper
self._adapt_to_entity = adapt_to_entity
if of_type:
self._of_type = of_type
else:
self._of_type = None
self._extra_criteria = extra_criteria
def adapt_to_entity(
self, adapt_to_entity: AliasedInsp[Any]
) -> RelationshipProperty.Comparator[Any]:
return self.__class__(
self.prop,
self._parententity,
adapt_to_entity=adapt_to_entity,
of_type=self._of_type,
)
entity: _InternalEntityType[_PT]
"""The target entity referred to by this
:class:`.RelationshipProperty.Comparator`.
This is either a :class:`_orm.Mapper` or :class:`.AliasedInsp`
object.
This is the "target" or "remote" side of the
:func:`_orm.relationship`.
"""
mapper: Mapper[_PT]
"""The target :class:`_orm.Mapper` referred to by this
:class:`.RelationshipProperty.Comparator`.
This is the "target" or "remote" side of the
:func:`_orm.relationship`.
"""
def _memoized_attr_entity(self) -> _InternalEntityType[_PT]:
if self._of_type:
return inspect(self._of_type) # type: ignore
else:
return self.prop.entity
def _memoized_attr_mapper(self) -> Mapper[_PT]:
return self.entity.mapper
def _source_selectable(self) -> FromClause:
if self._adapt_to_entity:
return self._adapt_to_entity.selectable
else:
return self.property.parent._with_polymorphic_selectable
def __clause_element__(self) -> ColumnElement[bool]:
adapt_from = self._source_selectable()
if self._of_type:
of_type_entity = inspect(self._of_type)
else:
of_type_entity = None
(
pj,
sj,
source,
dest,
secondary,
target_adapter,
) = self.prop._create_joins(
source_selectable=adapt_from,
source_polymorphic=True,
of_type_entity=of_type_entity,
alias_secondary=True,
extra_criteria=self._extra_criteria,
)
if sj is not None:
return pj & sj
else:
return pj
def of_type(self, class_: _EntityType[Any]) -> PropComparator[_PT]:
r"""Redefine this object in terms of a polymorphic subclass.
See :meth:`.PropComparator.of_type` for an example.
"""
return RelationshipProperty.Comparator(
self.prop,
self._parententity,
adapt_to_entity=self._adapt_to_entity,
of_type=class_,
extra_criteria=self._extra_criteria,
)
def and_(
self, *criteria: _ColumnExpressionArgument[bool]
) -> PropComparator[Any]:
"""Add AND criteria.
See :meth:`.PropComparator.and_` for an example.
.. versionadded:: 1.4
"""
exprs = tuple(
coercions.expect(roles.WhereHavingRole, clause)
for clause in util.coerce_generator_arg(criteria)
)
return RelationshipProperty.Comparator(
self.prop,
self._parententity,
adapt_to_entity=self._adapt_to_entity,
of_type=self._of_type,
extra_criteria=self._extra_criteria + exprs,
)
def in_(self, other: Any) -> NoReturn:
"""Produce an IN clause - this is not implemented
for :func:`_orm.relationship`-based attributes at this time.
"""
raise NotImplementedError(
"in_() not yet supported for "
"relationships. For a simple "
"many-to-one, use in_() against "
"the set of foreign key values."
)
# https://github.com/python/mypy/issues/4266
__hash__ = None # type: ignore
def __eq__(self, other: Any) -> ColumnElement[bool]: # type: ignore[override] # noqa: E501
"""Implement the ``==`` operator.
In a many-to-one context, such as::
MyClass.some_prop == <some object>
this will typically produce a
clause such as::
mytable.related_id == <some id>
Where ``<some id>`` is the primary key of the given
object.
The ``==`` operator provides partial functionality for non-
many-to-one comparisons:
* Comparisons against collections are not supported.
Use :meth:`~.Relationship.Comparator.contains`.
* Compared to a scalar one-to-many, will produce a
clause that compares the target columns in the parent to
the given target.
* Compared to a scalar many-to-many, an alias
of the association table will be rendered as
well, forming a natural join that is part of the
main body of the query. This will not work for
queries that go beyond simple AND conjunctions of
comparisons, such as those which use OR. Use
explicit joins, outerjoins, or
:meth:`~.Relationship.Comparator.has` for
more comprehensive non-many-to-one scalar
membership tests.
* Comparisons against ``None`` given in a one-to-many
or many-to-many context produce a NOT EXISTS clause.
"""
if other is None or isinstance(other, expression.Null):
if self.property.direction in [ONETOMANY, MANYTOMANY]:
return ~self._criterion_exists()
else:
return _orm_annotate(
self.property._optimized_compare(
None, adapt_source=self.adapter
)
)
elif self.property.uselist:
raise sa_exc.InvalidRequestError(
"Can't compare a collection to an object or collection; "
"use contains() to test for membership."
)
else:
return _orm_annotate(
self.property._optimized_compare(
other, adapt_source=self.adapter
)
)
def _criterion_exists(
self,
criterion: Optional[_ColumnExpressionArgument[bool]] = None,
**kwargs: Any,
) -> Exists:
where_criteria = (
coercions.expect(roles.WhereHavingRole, criterion)
if criterion is not None
else None
)
if getattr(self, "_of_type", None):
info: Optional[_InternalEntityType[Any]] = inspect(
self._of_type
)
assert info is not None
target_mapper, to_selectable, is_aliased_class = (
info.mapper,
info.selectable,
info.is_aliased_class,
)
if self.property._is_self_referential and not is_aliased_class:
to_selectable = to_selectable._anonymous_fromclause()
single_crit = target_mapper._single_table_criterion
if single_crit is not None:
if where_criteria is not None:
where_criteria = single_crit & where_criteria
else:
where_criteria = single_crit
else:
is_aliased_class = False
to_selectable = None
if self.adapter:
source_selectable = self._source_selectable()
else:
source_selectable = None
(
pj,
sj,
source,
dest,
secondary,
target_adapter,
) = self.property._create_joins(
dest_selectable=to_selectable,
source_selectable=source_selectable,
)
for k in kwargs:
crit = getattr(self.property.mapper.class_, k) == kwargs[k]
if where_criteria is None:
where_criteria = crit
else:
where_criteria = where_criteria & crit
# annotate the *local* side of the join condition, in the case
# of pj + sj this is the full primaryjoin, in the case of just
# pj its the local side of the primaryjoin.
if sj is not None:
j = _orm_annotate(pj) & sj
else:
j = _orm_annotate(pj, exclude=self.property.remote_side)
if (
where_criteria is not None
and target_adapter
and not is_aliased_class
):
# limit this adapter to annotated only?
where_criteria = target_adapter.traverse(where_criteria)
# only have the "joined left side" of what we
# return be subject to Query adaption. The right
# side of it is used for an exists() subquery and
# should not correlate or otherwise reach out
# to anything in the enclosing query.
if where_criteria is not None:
where_criteria = where_criteria._annotate(
{"no_replacement_traverse": True}
)
crit = j & sql.True_._ifnone(where_criteria)
if secondary is not None:
ex = (
sql.exists(1)
.where(crit)
.select_from(dest, secondary)
.correlate_except(dest, secondary)
)
else:
ex = (
sql.exists(1)
.where(crit)
.select_from(dest)
.correlate_except(dest)
)
return ex
def any(
self,
criterion: Optional[_ColumnExpressionArgument[bool]] = None,
**kwargs: Any,
) -> ColumnElement[bool]:
"""Produce an expression that tests a collection against
particular criterion, using EXISTS.
An expression like::
session.query(MyClass).filter(
MyClass.somereference.any(SomeRelated.x==2)
)
Will produce a query like::
SELECT * FROM my_table WHERE
EXISTS (SELECT 1 FROM related WHERE related.my_id=my_table.id
AND related.x=2)
Because :meth:`~.Relationship.Comparator.any` uses
a correlated subquery, its performance is not nearly as
good when compared against large target tables as that of
using a join.
:meth:`~.Relationship.Comparator.any` is particularly
useful for testing for empty collections::
session.query(MyClass).filter(
~MyClass.somereference.any()
)
will produce::
SELECT * FROM my_table WHERE
NOT (EXISTS (SELECT 1 FROM related WHERE
related.my_id=my_table.id))
:meth:`~.Relationship.Comparator.any` is only
valid for collections, i.e. a :func:`_orm.relationship`
that has ``uselist=True``. For scalar references,
use :meth:`~.Relationship.Comparator.has`.
"""
if not self.property.uselist:
raise sa_exc.InvalidRequestError(
"'any()' not implemented for scalar "
"attributes. Use has()."
)
return self._criterion_exists(criterion, **kwargs)
def has(
self,
criterion: Optional[_ColumnExpressionArgument[bool]] = None,
**kwargs: Any,
) -> ColumnElement[bool]:
"""Produce an expression that tests a scalar reference against
particular criterion, using EXISTS.
An expression like::
session.query(MyClass).filter(
MyClass.somereference.has(SomeRelated.x==2)
)
Will produce a query like::
SELECT * FROM my_table WHERE
EXISTS (SELECT 1 FROM related WHERE
related.id==my_table.related_id AND related.x=2)
Because :meth:`~.Relationship.Comparator.has` uses
a correlated subquery, its performance is not nearly as
good when compared against large target tables as that of
using a join.
:meth:`~.Relationship.Comparator.has` is only
valid for scalar references, i.e. a :func:`_orm.relationship`
that has ``uselist=False``. For collection references,
use :meth:`~.Relationship.Comparator.any`.
"""
if self.property.uselist:
raise sa_exc.InvalidRequestError(
"'has()' not implemented for collections. " "Use any()."
)
return self._criterion_exists(criterion, **kwargs)
def contains(
self, other: _ColumnExpressionArgument[Any], **kwargs: Any
) -> ColumnElement[bool]:
"""Return a simple expression that tests a collection for
containment of a particular item.
:meth:`~.Relationship.Comparator.contains` is
only valid for a collection, i.e. a
:func:`_orm.relationship` that implements
one-to-many or many-to-many with ``uselist=True``.
When used in a simple one-to-many context, an
expression like::
MyClass.contains(other)
Produces a clause like::
mytable.id == <some id>
Where ``<some id>`` is the value of the foreign key
attribute on ``other`` which refers to the primary
key of its parent object. From this it follows that
:meth:`~.Relationship.Comparator.contains` is
very useful when used with simple one-to-many
operations.
For many-to-many operations, the behavior of
:meth:`~.Relationship.Comparator.contains`
has more caveats. The association table will be
rendered in the statement, producing an "implicit"
join, that is, includes multiple tables in the FROM
clause which are equated in the WHERE clause::
query(MyClass).filter(MyClass.contains(other))
Produces a query like::
SELECT * FROM my_table, my_association_table AS
my_association_table_1 WHERE
my_table.id = my_association_table_1.parent_id
AND my_association_table_1.child_id = <some id>
Where ``<some id>`` would be the primary key of
``other``. From the above, it is clear that
:meth:`~.Relationship.Comparator.contains`
will **not** work with many-to-many collections when
used in queries that move beyond simple AND
conjunctions, such as multiple
:meth:`~.Relationship.Comparator.contains`
expressions joined by OR. In such cases subqueries or
explicit "outer joins" will need to be used instead.
See :meth:`~.Relationship.Comparator.any` for
a less-performant alternative using EXISTS, or refer
to :meth:`_query.Query.outerjoin`
as well as :ref:`orm_queryguide_joins`
for more details on constructing outer joins.
kwargs may be ignored by this operator but are required for API
conformance.
"""
if not self.prop.uselist:
raise sa_exc.InvalidRequestError(
"'contains' not implemented for scalar "
"attributes. Use =="
)
clause = self.prop._optimized_compare(
other, adapt_source=self.adapter
)
if self.prop.secondaryjoin is not None:
clause.negation_clause = self.__negated_contains_or_equals(
other
)
return clause
def __negated_contains_or_equals(
self, other: Any
) -> ColumnElement[bool]:
if self.prop.direction == MANYTOONE:
state = attributes.instance_state(other)
def state_bindparam(
local_col: ColumnElement[Any],
state: InstanceState[Any],
remote_col: ColumnElement[Any],
) -> BindParameter[Any]:
dict_ = state.dict
return sql.bindparam(
local_col.key,
type_=local_col.type,
unique=True,
callable_=self.prop._get_attr_w_warn_on_none(
self.prop.mapper, state, dict_, remote_col
),
)
def adapt(col: _CE) -> _CE:
if self.adapter:
return self.adapter(col)
else:
return col
if self.property._use_get:
return sql.and_(
*[
sql.or_(
adapt(x)
!= state_bindparam(adapt(x), state, y),
adapt(x) == None,
)
for (x, y) in self.property.local_remote_pairs
]
)
criterion = sql.and_(
*[
x == y
for (x, y) in zip(
self.property.mapper.primary_key,
self.property.mapper.primary_key_from_instance(other),
)
]
)
return ~self._criterion_exists(criterion)
def __ne__(self, other: Any) -> ColumnElement[bool]: # type: ignore[override] # noqa: E501
"""Implement the ``!=`` operator.
In a many-to-one context, such as::
MyClass.some_prop != <some object>
This will typically produce a clause such as::
mytable.related_id != <some id>
Where ``<some id>`` is the primary key of the
given object.
The ``!=`` operator provides partial functionality for non-
many-to-one comparisons:
* Comparisons against collections are not supported.
Use
:meth:`~.Relationship.Comparator.contains`
in conjunction with :func:`_expression.not_`.
* Compared to a scalar one-to-many, will produce a
clause that compares the target columns in the parent to
the given target.
* Compared to a scalar many-to-many, an alias
of the association table will be rendered as
well, forming a natural join that is part of the
main body of the query. This will not work for
queries that go beyond simple AND conjunctions of
comparisons, such as those which use OR. Use
explicit joins, outerjoins, or
:meth:`~.Relationship.Comparator.has` in
conjunction with :func:`_expression.not_` for
more comprehensive non-many-to-one scalar
membership tests.
* Comparisons against ``None`` given in a one-to-many
or many-to-many context produce an EXISTS clause.
"""
if other is None or isinstance(other, expression.Null):
if self.property.direction == MANYTOONE:
return _orm_annotate(
~self.property._optimized_compare(
None, adapt_source=self.adapter
)
)
else:
return self._criterion_exists()
elif self.property.uselist:
raise sa_exc.InvalidRequestError(
"Can't compare a collection"
" to an object or collection; use "
"contains() to test for membership."
)
else:
return _orm_annotate(self.__negated_contains_or_equals(other))
def _memoized_attr_property(self) -> RelationshipProperty[_PT]:
self.prop.parent._check_configure()
return self.prop
def _with_parent(
self,
instance: object,
alias_secondary: bool = True,
from_entity: Optional[_EntityType[Any]] = None,
) -> ColumnElement[bool]:
assert instance is not None
adapt_source: Optional[_CoreAdapterProto] = None
if from_entity is not None:
insp: Optional[_InternalEntityType[Any]] = inspect(from_entity)
assert insp is not None
if insp_is_aliased_class(insp):
adapt_source = insp._adapter.adapt_clause
return self._optimized_compare(
instance,
value_is_parent=True,
adapt_source=adapt_source,
alias_secondary=alias_secondary,
)
def _optimized_compare(
self,
state: Any,
value_is_parent: bool = False,
adapt_source: Optional[_CoreAdapterProto] = None,
alias_secondary: bool = True,
) -> ColumnElement[bool]:
if state is not None:
try:
state = inspect(state)
except sa_exc.NoInspectionAvailable:
state = None
if state is None or not getattr(state, "is_instance", False):
raise sa_exc.ArgumentError(
"Mapped instance expected for relationship "
"comparison to object. Classes, queries and other "
"SQL elements are not accepted in this context; for "
"comparison with a subquery, "
"use %s.has(**criteria)." % self
)
reverse_direction = not value_is_parent
if state is None:
return self._lazy_none_clause(
reverse_direction, adapt_source=adapt_source
)
if not reverse_direction:
criterion, bind_to_col = (
self._lazy_strategy._lazywhere,
self._lazy_strategy._bind_to_col,
)
else:
criterion, bind_to_col = (
self._lazy_strategy._rev_lazywhere,
self._lazy_strategy._rev_bind_to_col,
)
if reverse_direction:
mapper = self.mapper
else:
mapper = self.parent
dict_ = attributes.instance_dict(state.obj())
def visit_bindparam(bindparam: BindParameter[Any]) -> None:
if bindparam._identifying_key in bind_to_col:
bindparam.callable = self._get_attr_w_warn_on_none(
mapper,
state,
dict_,
bind_to_col[bindparam._identifying_key],
)
if self.secondary is not None and alias_secondary:
criterion = ClauseAdapter(
self.secondary._anonymous_fromclause()
).traverse(criterion)
criterion = visitors.cloned_traverse(
criterion, {}, {"bindparam": visit_bindparam}
)
if adapt_source:
criterion = adapt_source(criterion)
return criterion
def _get_attr_w_warn_on_none(
self,
mapper: Mapper[Any],
state: InstanceState[Any],
dict_: _InstanceDict,
column: ColumnElement[Any],
) -> Callable[[], Any]:
"""Create the callable that is used in a many-to-one expression.
E.g.::
u1 = s.query(User).get(5)
expr = Address.user == u1
Above, the SQL should be "address.user_id = 5". The callable
returned by this method produces the value "5" based on the identity
of ``u1``.
"""
# in this callable, we're trying to thread the needle through
# a wide variety of scenarios, including:
#
# * the object hasn't been flushed yet and there's no value for
# the attribute as of yet
#
# * the object hasn't been flushed yet but it has a user-defined
# value
#
# * the object has a value but it's expired and not locally present
#
# * the object has a value but it's expired and not locally present,
# and the object is also detached
#
# * The object hadn't been flushed yet, there was no value, but
# later, the object has been expired and detached, and *now*
# they're trying to evaluate it
#
# * the object had a value, but it was changed to a new value, and
# then expired
#
# * the object had a value, but it was changed to a new value, and
# then expired, then the object was detached
#
# * the object has a user-set value, but it's None and we don't do
# the comparison correctly for that so warn
#
prop = mapper.get_property_by_column(column)
# by invoking this method, InstanceState will track the last known
# value for this key each time the attribute is to be expired.
# this feature was added explicitly for use in this method.
state._track_last_known_value(prop.key)
lkv_fixed = state._last_known_values
def _go() -> Any:
assert lkv_fixed is not None
last_known = to_return = lkv_fixed[prop.key]
existing_is_available = (
last_known is not LoaderCallableStatus.NO_VALUE
)
# we support that the value may have changed. so here we
# try to get the most recent value including re-fetching.
# only if we can't get a value now due to detachment do we return
# the last known value
current_value = mapper._get_state_attr_by_column(
state,
dict_,
column,
passive=PassiveFlag.PASSIVE_OFF
if state.persistent
else PassiveFlag.PASSIVE_NO_FETCH ^ PassiveFlag.INIT_OK,
)
if current_value is LoaderCallableStatus.NEVER_SET:
if not existing_is_available:
raise sa_exc.InvalidRequestError(
"Can't resolve value for column %s on object "
"%s; no value has been set for this column"
% (column, state_str(state))
)
elif current_value is LoaderCallableStatus.PASSIVE_NO_RESULT:
if not existing_is_available:
raise sa_exc.InvalidRequestError(
"Can't resolve value for column %s on object "
"%s; the object is detached and the value was "
"expired" % (column, state_str(state))
)
else:
to_return = current_value
if to_return is None:
util.warn(
"Got None for value of column %s; this is unsupported "
"for a relationship comparison and will not "
"currently produce an IS comparison "
"(but may in a future release)" % column
)
return to_return
return _go
def _lazy_none_clause(
self,
reverse_direction: bool = False,
adapt_source: Optional[_CoreAdapterProto] = None,
) -> ColumnElement[bool]:
if not reverse_direction:
criterion, bind_to_col = (
self._lazy_strategy._lazywhere,
self._lazy_strategy._bind_to_col,
)
else:
criterion, bind_to_col = (
self._lazy_strategy._rev_lazywhere,
self._lazy_strategy._rev_bind_to_col,
)
criterion = adapt_criterion_to_null(criterion, bind_to_col)
if adapt_source:
criterion = adapt_source(criterion)
return criterion
def __str__(self) -> str:
return str(self.parent.class_.__name__) + "." + self.key
def merge(
self,
session: Session,
source_state: InstanceState[Any],
source_dict: _InstanceDict,
dest_state: InstanceState[Any],
dest_dict: _InstanceDict,
load: bool,
_recursive: Dict[Any, object],
_resolve_conflict_map: Dict[_IdentityKeyType[Any], object],
) -> None:
if load:
for r in self._reverse_property:
if (source_state, r) in _recursive:
return
if "merge" not in self._cascade:
return
if self.key not in source_dict:
return
if self.uselist:
impl = source_state.get_impl(self.key)
assert is_has_collection_adapter(impl)
instances_iterable = impl.get_collection(source_state, source_dict)
# if this is a CollectionAttributeImpl, then empty should
# be False, otherwise "self.key in source_dict" should not be
# True
assert not instances_iterable.empty if impl.collection else True
if load:
# for a full merge, pre-load the destination collection,
# so that individual _merge of each item pulls from identity
# map for those already present.
# also assumes CollectionAttributeImpl behavior of loading
# "old" list in any case
dest_state.get_impl(self.key).get(
dest_state, dest_dict, passive=PassiveFlag.PASSIVE_MERGE
)
dest_list = []
for current in instances_iterable:
current_state = attributes.instance_state(current)
current_dict = attributes.instance_dict(current)
_recursive[(current_state, self)] = True
obj = session._merge(
current_state,
current_dict,
load=load,
_recursive=_recursive,
_resolve_conflict_map=_resolve_conflict_map,
)
if obj is not None:
dest_list.append(obj)
if not load:
coll = attributes.init_state_collection(
dest_state, dest_dict, self.key
)
for c in dest_list:
coll.append_without_event(c)
else:
dest_impl = dest_state.get_impl(self.key)
assert is_has_collection_adapter(dest_impl)
dest_impl.set(
dest_state,
dest_dict,
dest_list,
_adapt=False,
passive=PassiveFlag.PASSIVE_MERGE,
)
else:
current = source_dict[self.key]
if current is not None:
current_state = attributes.instance_state(current)
current_dict = attributes.instance_dict(current)
_recursive[(current_state, self)] = True
obj = session._merge(
current_state,
current_dict,
load=load,
_recursive=_recursive,
_resolve_conflict_map=_resolve_conflict_map,
)
else:
obj = None
if not load:
dest_dict[self.key] = obj
else:
dest_state.get_impl(self.key).set(
dest_state, dest_dict, obj, None
)
def _value_as_iterable(
self,
state: InstanceState[_O],
dict_: _InstanceDict,
key: str,
passive: PassiveFlag = PassiveFlag.PASSIVE_OFF,
) -> Sequence[Tuple[InstanceState[_O], _O]]:
"""Return a list of tuples (state, obj) for the given
key.
returns an empty list if the value is None/empty/PASSIVE_NO_RESULT
"""
impl = state.manager[key].impl
x = impl.get(state, dict_, passive=passive)
if x is LoaderCallableStatus.PASSIVE_NO_RESULT or x is None:
return []
elif is_has_collection_adapter(impl):
return [
(attributes.instance_state(o), o)
for o in impl.get_collection(state, dict_, x, passive=passive)
]
else:
return [(attributes.instance_state(x), x)]
def cascade_iterator(
self,
type_: str,
state: InstanceState[Any],
dict_: _InstanceDict,
visited_states: Set[InstanceState[Any]],
halt_on: Optional[Callable[[InstanceState[Any]], bool]] = None,
) -> Iterator[Tuple[Any, Mapper[Any], InstanceState[Any], _InstanceDict]]:
# assert type_ in self._cascade
# only actively lazy load on the 'delete' cascade
if type_ != "delete" or self.passive_deletes:
passive = PassiveFlag.PASSIVE_NO_INITIALIZE
else:
passive = PassiveFlag.PASSIVE_OFF | PassiveFlag.NO_RAISE
if type_ == "save-update":
tuples = state.manager[self.key].impl.get_all_pending(state, dict_)
else:
tuples = self._value_as_iterable(
state, dict_, self.key, passive=passive
)
skip_pending = (
type_ == "refresh-expire" and "delete-orphan" not in self._cascade
)
for instance_state, c in tuples:
if instance_state in visited_states:
continue
if c is None:
# would like to emit a warning here, but
# would not be consistent with collection.append(None)
# current behavior of silently skipping.
# see [ticket:2229]
continue
assert instance_state is not None
instance_dict = attributes.instance_dict(c)
if halt_on and halt_on(instance_state):
continue
if skip_pending and not instance_state.key:
continue
instance_mapper = instance_state.manager.mapper
if not instance_mapper.isa(self.mapper.class_manager.mapper):
raise AssertionError(
"Attribute '%s' on class '%s' "
"doesn't handle objects "
"of type '%s'"
% (self.key, self.parent.class_, c.__class__)
)
visited_states.add(instance_state)
yield c, instance_mapper, instance_state, instance_dict
@property
def _effective_sync_backref(self) -> bool:
if self.viewonly:
return False
else:
return self.sync_backref is not False
@staticmethod
def _check_sync_backref(
rel_a: RelationshipProperty[Any], rel_b: RelationshipProperty[Any]
) -> None:
if rel_a.viewonly and rel_b.sync_backref:
raise sa_exc.InvalidRequestError(
"Relationship %s cannot specify sync_backref=True since %s "
"includes viewonly=True." % (rel_b, rel_a)
)
if (
rel_a.viewonly
and not rel_b.viewonly
and rel_b.sync_backref is not False
):
rel_b.sync_backref = False
def _add_reverse_property(self, key: str) -> None:
other = self.mapper.get_property(key, _configure_mappers=False)
if not isinstance(other, RelationshipProperty):
raise sa_exc.InvalidRequestError(
"back_populates on relationship '%s' refers to attribute '%s' "
"that is not a relationship. The back_populates parameter "
"should refer to the name of a relationship on the target "
"class." % (self, other)
)
# viewonly and sync_backref cases
# 1. self.viewonly==True and other.sync_backref==True -> error
# 2. self.viewonly==True and other.viewonly==False and
# other.sync_backref==None -> warn sync_backref=False, set to False
self._check_sync_backref(self, other)
# 3. other.viewonly==True and self.sync_backref==True -> error
# 4. other.viewonly==True and self.viewonly==False and
# self.sync_backref==None -> warn sync_backref=False, set to False
self._check_sync_backref(other, self)
self._reverse_property.add(other)
other._reverse_property.add(self)
other._setup_entity()
if not other.mapper.common_parent(self.parent):
raise sa_exc.ArgumentError(
"reverse_property %r on "
"relationship %s references relationship %s, which "
"does not reference mapper %s"
% (key, self, other, self.parent)
)
if (
other._configure_started
and self.direction in (ONETOMANY, MANYTOONE)
and self.direction == other.direction
):
raise sa_exc.ArgumentError(
"%s and back-reference %s are "
"both of the same direction %r. Did you mean to "
"set remote_side on the many-to-one side ?"
% (other, self, self.direction)
)
@util.memoized_property
def entity(self) -> _InternalEntityType[_T]:
"""Return the target mapped entity, which is an inspect() of the
class or aliased class that is referred towards.
"""
self.parent._check_configure()
return self.entity
@util.memoized_property
def mapper(self) -> Mapper[_T]:
"""Return the targeted :class:`_orm.Mapper` for this
:class:`.RelationshipProperty`.
"""
return self.entity.mapper
def do_init(self) -> None:
self._check_conflicts()
self._process_dependent_arguments()
self._setup_entity()
self._setup_registry_dependencies()
self._setup_join_conditions()
self._check_cascade_settings(self._cascade)
self._post_init()
self._generate_backref()
self._join_condition._warn_for_conflicting_sync_targets()
super().do_init()
self._lazy_strategy = cast(
"LazyLoader", self._get_strategy((("lazy", "select"),))
)
def _setup_registry_dependencies(self) -> None:
self.parent.mapper.registry._set_depends_on(
self.entity.mapper.registry
)
def _process_dependent_arguments(self) -> None:
"""Convert incoming configuration arguments to their
proper form.
Callables are resolved, ORM annotations removed.
"""
# accept callables for other attributes which may require
# deferred initialization. This technique is used
# by declarative "string configs" and some recipes.
init_args = self._init_args
for attr in (
"order_by",
"primaryjoin",
"secondaryjoin",
"secondary",
"foreign_keys",
"remote_side",
):
rel_arg = getattr(init_args, attr)
rel_arg._resolve_against_registry(self._clsregistry_resolvers[1])
# remove "annotations" which are present if mapped class
# descriptors are used to create the join expression.
for attr in "primaryjoin", "secondaryjoin":
rel_arg = getattr(init_args, attr)
val = rel_arg.resolved
if val is not None:
rel_arg.resolved = _orm_deannotate(
coercions.expect(
roles.ColumnArgumentRole, val, argname=attr
)
)
secondary = init_args.secondary.resolved
if secondary is not None and _is_mapped_class(secondary):
raise sa_exc.ArgumentError(
"secondary argument %s passed to to relationship() %s must "
"be a Table object or other FROM clause; can't send a mapped "
"class directly as rows in 'secondary' are persisted "
"independently of a class that is mapped "
"to that same table." % (secondary, self)
)
# ensure expressions in self.order_by, foreign_keys,
# remote_side are all columns, not strings.
if (
init_args.order_by.resolved is not False
and init_args.order_by.resolved is not None
):
self.order_by = tuple(
coercions.expect(
roles.ColumnArgumentRole, x, argname="order_by"
)
for x in util.to_list(init_args.order_by.resolved)
)
else:
self.order_by = False
self._user_defined_foreign_keys = util.column_set(
coercions.expect(
roles.ColumnArgumentRole, x, argname="foreign_keys"
)
for x in util.to_column_set(init_args.foreign_keys.resolved)
)
self.remote_side = util.column_set(
coercions.expect(
roles.ColumnArgumentRole, x, argname="remote_side"
)
for x in util.to_column_set(init_args.remote_side.resolved)
)
def declarative_scan(
self,
decl_scan: _ClassScanMapperConfig,
registry: _RegistryType,
cls: Type[Any],
originating_module: Optional[str],
key: str,
mapped_container: Optional[Type[Mapped[Any]]],
annotation: Optional[_AnnotationScanType],
extracted_mapped_annotation: Optional[_AnnotationScanType],
is_dataclass_field: bool,
) -> None:
argument = extracted_mapped_annotation
if extracted_mapped_annotation is None:
if self.argument is None:
self._raise_for_required(key, cls)
| 0 |
5f8303ce91c5de779bbddbaafb3fb828596babe5
|
Python
|
else:
return
argument = extracted_mapped_annotation
assert originating_module is not None
is_write_only = mapped_container is not None and issubclass(
mapped_container, WriteOnlyMapped
)
if is_write_only:
self.lazy = "write_only"
self.strategy_key = (("lazy", self.lazy),)
is_dynamic = mapped_container is not None and issubclass(
mapped_container, DynamicMapped
)
if is_dynamic:
self.lazy = "dynamic"
self.strategy_key = (("lazy", self.lazy),)
argument = de_optionalize_union_types(argument)
if hasattr(argument, "__origin__"):
arg_origin = argument.__origin__ # type: ignore
if isinstance(arg_origin, type) and issubclass(
arg_origin, abc.Collection
):
if self.collection_class is None:
if _py_inspect.isabstract(arg_origin):
raise sa_exc.ArgumentError(
f"Collection annotation type {arg_origin} cannot "
"be instantiated; please provide an explicit "
"'collection_class' parameter "
"(e.g. list, set, etc.) to the "
"relationship() function to accompany this "
"annotation"
)
self.collection_class = arg_origin
elif not is_write_only and not is_dynamic:
self.uselist = False
if argument.__args__: # type: ignore
if isinstance(arg_origin, type) and issubclass(
arg_origin, typing.Mapping # type: ignore
):
type_arg = argument.__args__[-1] # type: ignore
else:
type_arg = argument.__args__[0] # type: ignore
if hasattr(type_arg, "__forward_arg__"):
str_argument = type_arg.__forward_arg__
argument = resolve_name_to_real_class_name(
str_argument, originating_module
)
else:
argument = type_arg
else:
raise sa_exc.ArgumentError(
f"Generic alias {argument} requires an argument"
)
elif hasattr(argument, "__forward_arg__"):
argument = argument.__forward_arg__ # type: ignore
argument = resolve_name_to_real_class_name(
argument, originating_module
)
# we don't allow the collection class to be a
# __forward_arg__ right now, so if we see a forward arg here,
# we know there was no collection class either
if (
self.collection_class is None
and not is_write_only
and not is_dynamic
):
self.uselist = False
# ticket #8759
# if a lead argument was given to relationship(), like
# `relationship("B")`, use that, don't replace it with class we
# found in the annotation. The declarative_scan() method call here is
# still useful, as we continue to derive collection type and do
# checking of the annotation in any case.
if self.argument is None:
self.argument = cast("_RelationshipArgumentType[_T]", argument)
@util.preload_module("sqlalchemy.orm.mapper")
def _setup_entity(self, __argument: Any = None) -> None:
if "entity" in self.__dict__:
return
mapperlib = util.preloaded.orm_mapper
if __argument:
argument = __argument
else:
argument = self.argument
resolved_argument: _ExternalEntityType[Any]
if isinstance(argument, str):
# we might want to cleanup clsregistry API to make this
# more straightforward
resolved_argument = cast(
"_ExternalEntityType[Any]",
self._clsregistry_resolve_name(argument)(),
)
elif callable(argument) and not isinstance(
argument, (type, mapperlib.Mapper)
):
resolved_argument = argument()
else:
resolved_argument = argument
entity: _InternalEntityType[Any]
if isinstance(resolved_argument, type):
entity = class_mapper(resolved_argument, configure=False)
else:
try:
entity = inspect(resolved_argument)
except sa_exc.NoInspectionAvailable:
entity = None # type: ignore
if not hasattr(entity, "mapper"):
raise sa_exc.ArgumentError(
"relationship '%s' expects "
"a class or a mapper argument (received: %s)"
% (self.key, type(resolved_argument))
)
self.entity = entity # type: ignore
self.target = self.entity.persist_selectable
def _setup_join_conditions(self) -> None:
self._join_condition = jc = JoinCondition(
parent_persist_selectable=self.parent.persist_selectable,
child_persist_selectable=self.entity.persist_selectable,
parent_local_selectable=self.parent.local_table,
child_local_selectable=self.entity.local_table,
primaryjoin=self._init_args.primaryjoin.resolved,
secondary=self._init_args.secondary.resolved,
secondaryjoin=self._init_args.secondaryjoin.resolved,
parent_equivalents=self.parent._equivalent_columns,
child_equivalents=self.mapper._equivalent_columns,
consider_as_foreign_keys=self._user_defined_foreign_keys,
local_remote_pairs=self.local_remote_pairs,
remote_side=self.remote_side,
self_referential=self._is_self_referential,
prop=self,
support_sync=not self.viewonly,
can_be_synced_fn=self._columns_are_mapped,
)
self.primaryjoin = jc.primaryjoin
self.secondaryjoin = jc.secondaryjoin
self.secondary = jc.secondary
self.direction = jc.direction
self.local_remote_pairs = jc.local_remote_pairs
self.remote_side = jc.remote_columns
self.local_columns = jc.local_columns
self.synchronize_pairs = jc.synchronize_pairs
self._calculated_foreign_keys = jc.foreign_key_columns
self.secondary_synchronize_pairs = jc.secondary_synchronize_pairs
@property
def _clsregistry_resolve_arg(
self,
) -> Callable[[str, bool], _class_resolver]:
return self._clsregistry_resolvers[1]
@property
def _clsregistry_resolve_name(
self,
) -> Callable[[str], Callable[[], Union[Type[Any], Table, _ModNS]]]:
return self._clsregistry_resolvers[0]
@util.memoized_property
@util.preload_module("sqlalchemy.orm.clsregistry")
def _clsregistry_resolvers(
self,
) -> Tuple[
Callable[[str], Callable[[], Union[Type[Any], Table, _ModNS]]],
Callable[[str, bool], _class_resolver],
]:
_resolver = util.preloaded.orm_clsregistry._resolver
return _resolver(self.parent.class_, self)
def _check_conflicts(self) -> None:
"""Test that this relationship is legal, warn about
inheritance conflicts."""
if self.parent.non_primary and not class_mapper(
self.parent.class_, configure=False
).has_property(self.key):
raise sa_exc.ArgumentError(
"Attempting to assign a new "
"relationship '%s' to a non-primary mapper on "
"class '%s'. New relationships can only be added "
"to the primary mapper, i.e. the very first mapper "
"created for class '%s' "
% (
self.key,
self.parent.class_.__name__,
self.parent.class_.__name__,
)
)
@property
def cascade(self) -> CascadeOptions:
"""Return the current cascade setting for this
:class:`.RelationshipProperty`.
"""
return self._cascade
@cascade.setter
def cascade(self, cascade: Union[str, CascadeOptions]) -> None:
self._set_cascade(cascade)
def _set_cascade(self, cascade_arg: Union[str, CascadeOptions]) -> None:
cascade = CascadeOptions(cascade_arg)
if self.viewonly:
cascade = CascadeOptions(
cascade.intersection(CascadeOptions._viewonly_cascades)
)
if "mapper" in self.__dict__:
self._check_cascade_settings(cascade)
self._cascade = cascade
if self._dependency_processor:
self._dependency_processor.cascade = cascade
def _check_cascade_settings(self, cascade: CascadeOptions) -> None:
if (
cascade.delete_orphan
and not self.single_parent
and (self.direction is MANYTOMANY or self.direction is MANYTOONE)
):
raise sa_exc.ArgumentError(
"For %(direction)s relationship %(rel)s, delete-orphan "
"cascade is normally "
'configured only on the "one" side of a one-to-many '
"relationship, "
'and not on the "many" side of a many-to-one or many-to-many '
"relationship. "
"To force this relationship to allow a particular "
'"%(relatedcls)s" object to be referred towards by only '
'a single "%(clsname)s" object at a time via the '
"%(rel)s relationship, which "
"would allow "
"delete-orphan cascade to take place in this direction, set "
"the single_parent=True flag."
% {
"rel": self,
"direction": "many-to-one"
if self.direction is MANYTOONE
else "many-to-many",
"clsname": self.parent.class_.__name__,
"relatedcls": self.mapper.class_.__name__,
},
code="bbf0",
)
if self.passive_deletes == "all" and (
"delete" in cascade or "delete-orphan" in cascade
):
raise sa_exc.ArgumentError(
"On %s, can't set passive_deletes='all' in conjunction "
"with 'delete' or 'delete-orphan' cascade" % self
)
if cascade.delete_orphan:
self.mapper.primary_mapper()._delete_orphans.append(
(self.key, self.parent.class_)
)
def _persists_for(self, mapper: Mapper[Any]) -> bool:
"""Return True if this property will persist values on behalf
of the given mapper.
"""
return (
self.key in mapper.relationships
and mapper.relationships[self.key] is self
)
def _columns_are_mapped(self, *cols: ColumnElement[Any]) -> bool:
"""Return True if all columns in the given collection are
mapped by the tables referenced by this :class:`.RelationshipProperty`.
"""
secondary = self._init_args.secondary.resolved
for c in cols:
if secondary is not None and secondary.c.contains_column(c):
continue
if not self.parent.persist_selectable.c.contains_column(
c
) and not self.target.c.contains_column(c):
return False
return True
def _generate_backref(self) -> None:
"""Interpret the 'backref' instruction to create a
:func:`_orm.relationship` complementary to this one."""
if self.parent.non_primary:
return
if self.backref is not None and not self.back_populates:
kwargs: Dict[str, Any]
if isinstance(self.backref, str):
backref_key, kwargs = self.backref, {}
else:
backref_key, kwargs = self.backref
mapper = self.mapper.primary_mapper()
if not mapper.concrete:
check = set(mapper.iterate_to_root()).union(
mapper.self_and_descendants
)
for m in check:
if m.has_property(backref_key) and not m.concrete:
raise sa_exc.ArgumentError(
"Error creating backref "
"'%s' on relationship '%s': property of that "
"name exists on mapper '%s'"
% (backref_key, self, m)
)
# determine primaryjoin/secondaryjoin for the
# backref. Use the one we had, so that
# a custom join doesn't have to be specified in
# both directions.
if self.secondary is not None:
# for many to many, just switch primaryjoin/
# secondaryjoin. use the annotated
# pj/sj on the _join_condition.
pj = kwargs.pop(
"primaryjoin",
self._join_condition.secondaryjoin_minus_local,
)
sj = kwargs.pop(
"secondaryjoin",
self._join_condition.primaryjoin_minus_local,
)
else:
pj = kwargs.pop(
"primaryjoin",
self._join_condition.primaryjoin_reverse_remote,
)
sj = kwargs.pop("secondaryjoin", None)
if sj:
raise sa_exc.InvalidRequestError(
"Can't assign 'secondaryjoin' on a backref "
"against a non-secondary relationship."
)
foreign_keys = kwargs.pop(
"foreign_keys", self._user_defined_foreign_keys
)
parent = self.parent.primary_mapper()
kwargs.setdefault("viewonly", self.viewonly)
kwargs.setdefault("post_update", self.post_update)
kwargs.setdefault("passive_updates", self.passive_updates)
kwargs.setdefault("sync_backref", self.sync_backref)
self.back_populates = backref_key
relationship = RelationshipProperty(
parent,
self.secondary,
primaryjoin=pj,
secondaryjoin=sj,
foreign_keys=foreign_keys,
back_populates=self.key,
**kwargs,
)
mapper._configure_property(
backref_key, relationship, warn_for_existing=True
)
if self.back_populates:
self._add_reverse_property(self.back_populates)
@util.preload_module("sqlalchemy.orm.dependency")
def _post_init(self) -> None:
dependency = util.preloaded.orm_dependency
if self.uselist is None:
self.uselist = self.direction is not MANYTOONE
if not self.viewonly:
self._dependency_processor = ( # type: ignore
dependency.DependencyProcessor.from_relationship
)(self)
@util.memoized_property
def _use_get(self) -> bool:
"""memoize the 'use_get' attribute of this RelationshipLoader's
lazyloader."""
strategy = self._lazy_strategy
return strategy.use_get
@util.memoized_property
def _is_self_referential(self) -> bool:
return self.mapper.common_parent(self.parent)
def _create_joins(
self,
source_polymorphic: bool = False,
source_selectable: Optional[FromClause] = None,
dest_selectable: Optional[FromClause] = None,
of_type_entity: Optional[_InternalEntityType[Any]] = None,
alias_secondary: bool = False,
extra_criteria: Tuple[ColumnElement[bool], ...] = (),
) -> Tuple[
ColumnElement[bool],
Optional[ColumnElement[bool]],
FromClause,
FromClause,
Optional[FromClause],
Optional[ClauseAdapter],
]:
aliased = False
if alias_secondary and self.secondary is not None:
aliased = True
if source_selectable is None:
if source_polymorphic and self.parent.with_polymorphic:
source_selectable = self.parent._with_polymorphic_selectable
if of_type_entity:
dest_mapper = of_type_entity.mapper
if dest_selectable is None:
dest_selectable = of_type_entity.selectable
aliased = True
else:
dest_mapper = self.mapper
if dest_selectable is None:
dest_selectable = self.entity.selectable
if self.mapper.with_polymorphic:
aliased = True
if self._is_self_referential and source_selectable is None:
dest_selectable = dest_selectable._anonymous_fromclause()
aliased = True
elif (
dest_selectable is not self.mapper._with_polymorphic_selectable
or self.mapper.with_polymorphic
):
aliased = True
single_crit = dest_mapper._single_table_criterion
aliased = aliased or (
source_selectable is not None
and (
source_selectable
is not self.parent._with_polymorphic_selectable
or source_selectable._is_subquery
)
)
(
primaryjoin,
secondaryjoin,
secondary,
target_adapter,
dest_selectable,
) = self._join_condition.join_targets(
source_selectable,
dest_selectable,
aliased,
single_crit,
extra_criteria,
)
if source_selectable is None:
source_selectable = self.parent.local_table
if dest_selectable is None:
dest_selectable = self.entity.local_table
return (
primaryjoin,
secondaryjoin,
source_selectable,
dest_selectable,
secondary,
target_adapter,
)
def _annotate_columns(element: _CE, annotations: _AnnotationDict) -> _CE:
def clone(elem: _CE) -> _CE:
if isinstance(elem, expression.ColumnClause):
elem = elem._annotate(annotations.copy()) # type: ignore
elem._copy_internals(clone=clone)
return elem
if element is not None:
element = clone(element)
clone = None # type: ignore # remove gc cycles
return element
class JoinCondition:
primaryjoin_initial: Optional[ColumnElement[bool]]
primaryjoin: ColumnElement[bool]
secondaryjoin: Optional[ColumnElement[bool]]
secondary: Optional[FromClause]
prop: RelationshipProperty[Any]
synchronize_pairs: _ColumnPairs
secondary_synchronize_pairs: _ColumnPairs
direction: RelationshipDirection
parent_persist_selectable: FromClause
child_persist_selectable: FromClause
parent_local_selectable: FromClause
child_local_selectable: FromClause
_local_remote_pairs: Optional[_ColumnPairs]
def __init__(
self,
parent_persist_selectable: FromClause,
child_persist_selectable: FromClause,
parent_local_selectable: FromClause,
child_local_selectable: FromClause,
*,
primaryjoin: Optional[ColumnElement[bool]] = None,
secondary: Optional[FromClause] = None,
secondaryjoin: Optional[ColumnElement[bool]] = None,
parent_equivalents: Optional[_EquivalentColumnMap] = None,
child_equivalents: Optional[_EquivalentColumnMap] = None,
consider_as_foreign_keys: Any = None,
local_remote_pairs: Optional[_ColumnPairs] = None,
remote_side: Any = None,
self_referential: Any = False,
prop: RelationshipProperty[Any],
support_sync: bool = True,
can_be_synced_fn: Callable[..., bool] = lambda *c: True,
):
self.parent_persist_selectable = parent_persist_selectable
self.parent_local_selectable = parent_local_selectable
self.child_persist_selectable = child_persist_selectable
self.child_local_selectable = child_local_selectable
self.parent_equivalents = parent_equivalents
self.child_equivalents = child_equivalents
self.primaryjoin_initial = primaryjoin
self.secondaryjoin = secondaryjoin
self.secondary = secondary
self.consider_as_foreign_keys = consider_as_foreign_keys
self._local_remote_pairs = local_remote_pairs
self._remote_side = remote_side
self.prop = prop
self.self_referential = self_referential
self.support_sync = support_sync
self.can_be_synced_fn = can_be_synced_fn
self._determine_joins()
assert self.primaryjoin is not None
self._sanitize_joins()
self._annotate_fks()
self._annotate_remote()
self._annotate_local()
self._annotate_parentmapper()
self._setup_pairs()
self._check_foreign_cols(self.primaryjoin, True)
if self.secondaryjoin is not None:
self._check_foreign_cols(self.secondaryjoin, False)
self._determine_direction()
self._check_remote_side()
self._log_joins()
def _log_joins(self) -> None:
log = self.prop.logger
log.info("%s setup primary join %s", self.prop, self.primaryjoin)
log.info("%s setup secondary join %s", self.prop, self.secondaryjoin)
log.info(
"%s synchronize pairs [%s]",
self.prop,
",".join(
"(%s => %s)" % (l, r) for (l, r) in self.synchronize_pairs
),
)
log.info(
"%s secondary synchronize pairs [%s]",
self.prop,
",".join(
"(%s => %s)" % (l, r)
for (l, r) in self.secondary_synchronize_pairs or []
),
)
log.info(
"%s local/remote pairs [%s]",
self.prop,
",".join(
"(%s / %s)" % (l, r) for (l, r) in self.local_remote_pairs
),
)
log.info(
"%s remote columns [%s]",
self.prop,
",".join("%s" % col for col in self.remote_columns),
)
log.info(
"%s local columns [%s]",
self.prop,
",".join("%s" % col for col in self.local_columns),
)
log.info("%s relationship direction %s", self.prop, self.direction)
def _sanitize_joins(self) -> None:
"""remove the parententity annotation from our join conditions which
can leak in here based on some declarative patterns and maybe others.
"parentmapper" is relied upon both by the ORM evaluator as well as
the use case in _join_fixture_inh_selfref_w_entity
that relies upon it being present, see :ticket:`3364`.
"""
self.primaryjoin = _deep_deannotate(
self.primaryjoin, values=("parententity", "proxy_key")
)
if self.secondaryjoin is not None:
self.secondaryjoin = _deep_deannotate(
self.secondaryjoin, values=("parententity", "proxy_key")
)
def _determine_joins(self) -> None:
"""Determine the 'primaryjoin' and 'secondaryjoin' attributes,
if not passed to the constructor already.
This is based on analysis of the foreign key relationships
between the parent and target mapped selectables.
"""
if self.secondaryjoin is not None and self.secondary is None:
raise sa_exc.ArgumentError(
"Property %s specified with secondary "
"join condition but "
"no secondary argument" % self.prop
)
# find a join between the given mapper's mapped table and
# the given table. will try the mapper's local table first
# for more specificity, then if not found will try the more
# general mapped table, which in the case of inheritance is
# a join.
try:
consider_as_foreign_keys = self.consider_as_foreign_keys or None
if self.secondary is not None:
if self.secondaryjoin is None:
self.secondaryjoin = join_condition(
self.child_persist_selectable,
self.secondary,
a_subset=self.child_local_selectable,
consider_as_foreign_keys=consider_as_foreign_keys,
)
if self.primaryjoin_initial is None:
self.primaryjoin = join_condition(
self.parent_persist_selectable,
self.secondary,
a_subset=self.parent_local_selectable,
consider_as_foreign_keys=consider_as_foreign_keys,
)
else:
self.primaryjoin = self.primaryjoin_initial
else:
if self.primaryjoin_initial is None:
self.primaryjoin = join_condition(
self.parent_persist_selectable,
self.child_persist_selectable,
a_subset=self.parent_local_selectable,
consider_as_foreign_keys=consider_as_foreign_keys,
)
else:
self.primaryjoin = self.primaryjoin_initial
except sa_exc.NoForeignKeysError as nfe:
if self.secondary is not None:
raise sa_exc.NoForeignKeysError(
"Could not determine join "
"condition between parent/child tables on "
"relationship %s - there are no foreign keys "
"linking these tables via secondary table '%s'. "
"Ensure that referencing columns are associated "
"with a ForeignKey or ForeignKeyConstraint, or "
"specify 'primaryjoin' and 'secondaryjoin' "
"expressions." % (self.prop, self.secondary)
) from nfe
else:
raise sa_exc.NoForeignKeysError(
"Could not determine join "
"condition between parent/child tables on "
"relationship %s - there are no foreign keys "
"linking these tables. "
"Ensure that referencing columns are associated "
"with a ForeignKey or ForeignKeyConstraint, or "
"specify a 'primaryjoin' expression." % self.prop
) from nfe
except sa_exc.AmbiguousForeignKeysError as afe:
if self.secondary is not None:
raise sa_exc.AmbiguousForeignKeysError(
"Could not determine join "
"condition between parent/child tables on "
"relationship %s - there are multiple foreign key "
"paths linking the tables via secondary table '%s'. "
"Specify the 'foreign_keys' "
"argument, providing a list of those columns which "
"should be counted as containing a foreign key "
"reference from the secondary table to each of the "
"parent and child tables." % (self.prop, self.secondary)
) from afe
else:
raise sa_exc.AmbiguousForeignKeysError(
"Could not determine join "
"condition between parent/child tables on "
"relationship %s - there are multiple foreign key "
"paths linking the tables. Specify the "
"'foreign_keys' argument, providing a list of those "
"columns which should be counted as containing a "
"foreign key reference to the parent table." % self.prop
) from afe
@property
def primaryjoin_minus_local(self) -> ColumnElement[bool]:
return _deep_deannotate(self.primaryjoin, values=("local", "remote"))
@property
def secondaryjoin_minus_local(self) -> ColumnElement[bool]:
assert self.secondaryjoin is not None
return _deep_deannotate(self.secondaryjoin, values=("local", "remote"))
@util.memoized_property
def primaryjoin_reverse_remote(self) -> ColumnElement[bool]:
"""Return the primaryjoin condition suitable for the
"reverse" direction.
If the primaryjoin was delivered here with pre-existing
"remote" annotations, the local/remote annotations
are reversed. Otherwise, the local/remote annotations
are removed.
"""
if self._has_remote_annotations:
def replace(element: _CE, **kw: Any) -> Optional[_CE]:
if "remote" in element._annotations:
v = dict(element._annotations)
del v["remote"]
v["local"] = True
return element._with_annotations(v)
elif "local" in element._annotations:
v = dict(element._annotations)
del v["local"]
v["remote"] = True
return element._with_annotations(v)
return None
return visitors.replacement_traverse(self.primaryjoin, {}, replace)
else:
if self._has_foreign_annotations:
# TODO: coverage
return _deep_deannotate(
self.primaryjoin, values=("local", "remote")
)
else:
return _deep_deannotate(self.primaryjoin)
def _has_annotation(self, clause: ClauseElement, annotation: str) -> bool:
for col in visitors.iterate(clause, {}):
if annotation in col._annotations:
return True
else:
return False
@util.memoized_property
def _has_foreign_annotations(self) -> bool:
return self._has_annotation(self.primaryjoin, "foreign")
@util.memoized_property
def _has_remote_annotations(self) -> bool:
return self._has_annotation(self.primaryjoin, "remote")
def _annotate_fks(self) -> None:
"""Annotate the primaryjoin and secondaryjoin
structures with 'foreign' annotations marking columns
considered as foreign.
"""
if self._has_foreign_annotations:
return
if self.consider_as_foreign_keys:
self._annotate_from_fk_list()
else:
self._annotate_present_fks()
def _annotate_from_fk_list(self) -> None:
def check_fk(element: _CE, **kw: Any) -> Optional[_CE]:
if element in self.consider_as_foreign_keys:
return element._annotate({"foreign": True})
return None
self.primaryjoin = visitors.replacement_traverse(
self.primaryjoin, {}, check_fk
)
if self.secondaryjoin is not None:
self.secondaryjoin = visitors.replacement_traverse(
self.secondaryjoin, {}, check_fk
)
def _annotate_present_fks(self) -> None:
if self.secondary is not None:
secondarycols = util.column_set(self.secondary.c)
else:
secondarycols = set()
def is_foreign(
a: ColumnElement[Any], b: ColumnElement[Any]
) -> Optional[ColumnElement[Any]]:
if isinstance(a, schema.Column) and isinstance(b, schema.Column):
if a.references(b):
return a
elif b.references(a):
return b
if secondarycols:
if a in secondarycols and b not in secondarycols:
return a
elif b in secondarycols and a not in secondarycols:
return b
return None
def visit_binary(binary: BinaryExpression[Any]) -> None:
if not isinstance(
binary.left, sql.ColumnElement
) or not isinstance(binary.right, sql.ColumnElement):
return
if (
"foreign" not in binary.left._annotations
and "foreign" not in binary.right._annotations
):
col = is_foreign(binary.left, binary.right)
if col is not None:
if col.compare(binary.left):
binary.left = binary.left._annotate({"foreign": True})
elif col.compare(binary.right):
binary.right = binary.right._annotate(
{"foreign": True}
)
self.primaryjoin = visitors.cloned_traverse(
self.primaryjoin, {}, {"binary": visit_binary}
)
if self.secondaryjoin is not None:
self.secondaryjoin = visitors.cloned_traverse(
self.secondaryjoin, {}, {"binary": visit_binary}
)
def _refers_to_parent_table(self) -> bool:
"""Return True if the join condition contains column
comparisons where both columns are in both tables.
"""
pt = self.parent_persist_selectable
mt = self.child_persist_selectable
result = False
def visit_binary(binary: BinaryExpression[Any]) -> None:
nonlocal result
c, f = binary.left, binary.right
if (
isinstance(c, expression.ColumnClause)
and isinstance(f, expression.ColumnClause)
and pt.is_derived_from(c.table)
and pt.is_derived_from(f.table)
and mt.is_derived_from(c.table)
and mt.is_derived_from(f.table)
):
result = True
visitors.traverse(self.primaryjoin, {}, {"binary": visit_binary})
return result
def _tables_overlap(self) -> bool:
"""Return True if parent/child tables have some overlap."""
return selectables_overlap(
self.parent_persist_selectable, self.child_persist_selectable
)
def _annotate_remote(self) -> None:
"""Annotate the primaryjoin and secondaryjoin
structures with 'remote' annotations marking columns
considered as part of the 'remote' side.
"""
if self._has_remote_annotations:
return
if self.secondary is not None:
self._annotate_remote_secondary()
elif self._local_remote_pairs or self._remote_side:
self._annotate_remote_from_args()
elif self._refers_to_parent_table():
self._annotate_selfref(
lambda col: "foreign" in col._annotations, False
)
elif self._tables_overlap():
self._annotate_remote_with_overlap()
else:
self._annotate_remote_distinct_selectables()
def _annotate_remote_secondary(self) -> None:
"""annotate 'remote' in primaryjoin, secondaryjoin
when 'secondary' is present.
"""
assert self.secondary is not None
fixed_secondary = self.secondary
def repl(element: _CE, **kw: Any) -> Optional[_CE]:
if fixed_secondary.c.contains_column(element):
return element._annotate({"remote": True})
return None
self.primaryjoin = visitors.replacement_traverse(
self.primaryjoin, {}, repl
)
assert self.secondaryjoin is not None
self.secondaryjoin = visitors.replacement_traverse(
self.secondaryjoin, {}, repl
)
def _annotate_selfref(
self, fn: Callable[[ColumnElement[Any]], bool], remote_side_given: bool
) -> None:
"""annotate 'remote' in primaryjoin, secondaryjoin
when the relationship is detected as self-referential.
"""
def visit_binary(binary: BinaryExpression[Any]) -> None:
equated = binary.left.compare(binary.right)
if isinstance(binary.left, expression.ColumnClause) and isinstance(
binary.right, expression.ColumnClause
):
# assume one to many - FKs are "remote"
if fn(binary.left):
binary.left = binary.left._annotate({"remote": True})
if fn(binary.right) and not equated:
binary.right = binary.right._annotate({"remote": True})
elif not remote_side_given:
self._warn_non_column_elements()
self.primaryjoin = visitors.cloned_traverse(
self.primaryjoin, {}, {"binary": visit_binary}
)
def _annotate_remote_from_args(self) -> None:
"""annotate 'remote' in primaryjoin, secondaryjoin
when the 'remote_side' or '_local_remote_pairs'
arguments are used.
"""
if self._local_remote_pairs:
if self._remote_side:
raise sa_exc.ArgumentError(
"remote_side argument is redundant "
"against more detailed _local_remote_side "
"argument."
)
remote_side = [r for (l, r) in self._local_remote_pairs]
else:
remote_side = self._remote_side
if self._refers_to_parent_table():
self._annotate_selfref(lambda col: col in remote_side, True)
else:
def repl(element: _CE, **kw: Any) -> Optional[_CE]:
# use set() to avoid generating ``__eq__()`` expressions
# against each element
if element in set(remote_side):
return element._annotate({"remote": True})
return None
self.primaryjoin = visitors.replacement_traverse(
self.primaryjoin, {}, repl
)
def _annotate_remote_with_overlap(self) -> None:
"""annotate 'remote' in primaryjoin, secondaryjoin
when the parent/child tables have some set of
tables in common, though is not a fully self-referential
relationship.
"""
def visit_binary(binary: BinaryExpression[Any]) -> None:
binary.left, binary.right = proc_left_right(
binary.left, binary.right
)
binary.right, binary.left = proc_left_right(
binary.right, binary.left
)
check_entities = (
self.prop is not None and self.prop.mapper is not self.prop.parent
)
def proc_left_right(
left: ColumnElement[Any], right: ColumnElement[Any]
) -> Tuple[ColumnElement[Any], ColumnElement[Any]]:
if isinstance(left, expression.ColumnClause) and isinstance(
right, expression.ColumnClause
):
if self.child_persist_selectable.c.contains_column(
right
) and self.parent_persist_selectable.c.contains_column(left):
right = right._annotate({"remote": True})
elif (
check_entities
and right._annotations.get("parentmapper") is self.prop.mapper
):
right = right._annotate({"remote": True})
elif (
check_entities
and left._annotations.get("parentmapper") is self.prop.mapper
):
left = left._annotate({"remote": True})
else:
self._warn_non_column_elements()
return left, right
self.primaryjoin = visitors.cloned_traverse(
self.primaryjoin, {}, {"binary": visit_binary}
)
def _annotate_remote_distinct_selectables(self) -> None:
"""annotate 'remote' in primaryjoin, secondaryjoin
when the parent/child tables are entirely
separate.
"""
def repl(element: _CE, **kw: Any) -> Optional[_CE]:
if self.child_persist_selectable.c.contains_column(element) and (
not self.parent_local_selectable.c.contains_column(element)
or self.child_local_selectable.c.contains_column(element)
):
return element._annotate({"remote": True})
return None
self.primaryjoin = visitors.replacement_traverse(
self.primaryjoin, {}, repl
)
def _warn_non_column_elements(self) -> None:
util.warn(
"Non-simple column elements in primary "
"join condition for property %s - consider using "
"remote() annotations to mark the remote side." % self.prop
)
def _annotate_local(self) -> None:
"""Annotate the primaryjoin and secondaryjoin
structures with 'local' annotations.
This annotates all column elements found
simultaneously in the parent table
and the join condition that don't have a
'remote' annotation set up from
_annotate_remote() or user-defined.
"""
if self._has_annotation(self.primaryjoin, "local"):
return
if self._local_remote_pairs:
local_side = util.column_set(
[l for (l, r) in self._local_remote_pairs]
)
else:
local_side = util.column_set(self.parent_persist_selectable.c)
def locals_(element: _CE, **kw: Any) -> Optional[_CE]:
if "remote" not in element._annotations and element in local_side:
return element._annotate({"local": True})
return None
self.primaryjoin = visitors.replacement_traverse(
self.primaryjoin, {}, locals_
)
def _annotate_parentmapper(self) -> None:
def parentmappers_(element: _CE, **kw: Any) -> Optional[_CE]:
if "remote" in element._annotations:
return element._annotate({"parentmapper": self.prop.mapper})
elif "local" in element._annotations:
return element._annotate({"parentmapper": self.prop.parent})
return None
self.primaryjoin = visitors.replacement_traverse(
self.primaryjoin, {}, parentmappers_
)
def _check_remote_side(self) -> None:
if not self.local_remote_pairs:
raise sa_exc.ArgumentError(
"Relationship %s could "
"not determine any unambiguous local/remote column "
"pairs based on join condition and remote_side "
"arguments. "
"Consider using the remote() annotation to "
"accurately mark those elements of the join "
"condition that are on the remote side of "
"the relationship." % (self.prop,)
)
else:
not_target = util.column_set(
self.parent_persist_selectable.c
).difference(self.child_persist_selectable.c)
for _, rmt in self.local_remote_pairs:
if rmt in not_target:
util.warn(
"Expression %s is marked as 'remote', but these "
"column(s) are local to the local side. The "
"remote() annotation is needed only for a "
"self-referential relationship where both sides "
"of the relationship refer to the same tables."
% (rmt,)
)
def _check_foreign_cols(
self, join_condition: ColumnElement[bool], primary: bool
) -> None:
"""Check the foreign key columns collected and emit error
messages."""
can_sync = False
foreign_cols = self._gather_columns_with_annotation(
join_condition, "foreign"
)
has_foreign = bool(foreign_cols)
if primary:
can_sync = bool(self.synchronize_pairs)
else:
can_sync = bool(self.secondary_synchronize_pairs)
if (
self.support_sync
and can_sync
or (not self.support_sync and has_foreign)
):
return
# from here below is just determining the best error message
# to report. Check for a join condition using any operator
# (not just ==), perhaps they need to turn on "viewonly=True".
if self.support_sync and has_foreign and not can_sync:
err = (
"Could not locate any simple equality expressions "
"involving locally mapped foreign key columns for "
"%s join condition "
"'%s' on relationship %s."
% (
primary and "primary" or "secondary",
join_condition,
self.prop,
)
)
err += (
" Ensure that referencing columns are associated "
"with a ForeignKey or ForeignKeyConstraint, or are "
"annotated in the join condition with the foreign() "
"annotation. To allow comparison operators other than "
"'==', the relationship can be marked as viewonly=True."
)
raise sa_exc.ArgumentError(err)
else:
err = (
"Could not locate any relevant foreign key columns "
"for %s join condition '%s' on relationship %s."
% (
primary and "primary" or "secondary",
join_condition,
self.prop,
)
)
err += (
" Ensure that referencing columns are associated "
"with a ForeignKey or ForeignKeyConstraint, or are "
"annotated in the join condition with the foreign() "
"annotation."
)
raise sa_exc.ArgumentError(err)
def _determine_direction(self) -> None:
"""Determine if this relationship is one to many, many to one,
many to many.
"""
if self.secondaryjoin is not None:
self.direction = MANYTOMANY
else:
parentcols = util.column_set(self.parent_persist_selectable.c)
targetcols = util.column_set(self.child_persist_selectable.c)
# fk collection which suggests ONETOMANY.
onetomany_fk = targetcols.intersection(self.foreign_key_columns)
# fk collection which suggests MANYTOONE.
manytoone_fk = parentcols.intersection(self.foreign_key_columns)
if onetomany_fk and manytoone_fk:
# fks on both sides. test for overlap of local/remote
# with foreign key.
# we will gather columns directly from their annotations
# without deannotating, so that we can distinguish on a column
# that refers to itself.
# 1. columns that are both remote and FK suggest
# onetomany.
onetomany_local = self._gather_columns_with_annotation(
self.primaryjoin, "remote", "foreign"
)
# 2. columns that are FK but are not remote (e.g. local)
# suggest manytoone.
manytoone_local = {
c
for c in self._gather_columns_with_annotation(
self.primaryjoin, "foreign"
)
if "remote" not in c._annotations
}
# 3. if both collections are present, remove columns that
# refer to themselves. This is for the case of
# and_(Me.id == Me.remote_id, Me.version == Me.version)
if onetomany_local and manytoone_local:
self_equated = self.remote_columns.intersection(
self.local_columns
)
onetomany_local = onetomany_local.difference(self_equated)
manytoone_local = manytoone_local.difference(self_equated)
# at this point, if only one or the other collection is
# present, we know the direction, otherwise it's still
# ambiguous.
if onetomany_local and not manytoone_local:
self.direction = ONETOMANY
elif manytoone_local and not onetomany_local:
self.direction = MANYTOONE
else:
raise sa_exc.ArgumentError(
"Can't determine relationship"
" direction for relationship '%s' - foreign "
"key columns within the join condition are present "
"in both the parent and the child's mapped tables. "
"Ensure that only those columns referring "
"to a parent column are marked as foreign, "
"either via the foreign() annotation or "
"via the foreign_keys argument." % self.prop
)
elif onetomany_fk:
self.direction = ONETOMANY
elif manytoone_fk:
self.direction = MANYTOONE
else:
raise sa_exc.ArgumentError(
"Can't determine relationship "
"direction for relationship '%s' - foreign "
"key columns are present in neither the parent "
"nor the child's mapped tables" % self.prop
)
def _deannotate_pairs(
self, collection: _ColumnPairIterable
) -> _MutableColumnPairs:
"""provide deannotation for the various lists of
pairs, so that using them in hashes doesn't incur
high-overhead __eq__() comparisons against
original columns mapped.
"""
return [(x._deannotate(), y._deannotate()) for x, y in collection]
def _setup_pairs(self) -> None:
sync_pairs: _MutableColumnPairs = []
lrp: util.OrderedSet[
Tuple[ColumnElement[Any], ColumnElement[Any]]
] = util.OrderedSet([])
secondary_sync_pairs: _MutableColumnPairs = []
def go(
joincond: ColumnElement[bool],
collection: _MutableColumnPairs,
) -> None:
def visit_binary(
binary: BinaryExpression[Any],
left: ColumnElement[Any],
right: ColumnElement[Any],
) -> None:
if (
"remote" in right._annotations
and "remote" not in left._annotations
and self.can_be_synced_fn(left)
):
lrp.add((left, right))
elif (
"remote" in left._annotations
and "remote" not in right._annotations
and self.can_be_synced_fn(right)
):
lrp.add((right, left))
if binary.operator is operators.eq and self.can_be_synced_fn(
left, right
):
if "foreign" in right._annotations:
collection.append((left, right))
elif "foreign" in left._annotations:
collection.append((right, left))
visit_binary_product(visit_binary, joincond)
for joincond, collection in [
(self.primaryjoin, sync_pairs),
(self.secondaryjoin, secondary_sync_pairs),
]:
if joincond is None:
continue
go(joincond, collection)
self.local_remote_pairs = self._deannotate_pairs(lrp)
self.synchronize_pairs = self._deannotate_pairs(sync_pairs)
self.secondary_synchronize_pairs = self._deannotate_pairs(
secondary_sync_pairs
)
_track_overlapping_sync_targets: weakref.WeakKeyDictionary[
ColumnElement[Any],
weakref.WeakKeyDictionary[
RelationshipProperty[Any], ColumnElement[Any]
],
] = weakref.WeakKeyDictionary()
def _warn_for_conflicting_sync_targets(self) -> None:
if not self.support_sync:
return
# we would like to detect if we are synchronizing any column
# pairs in conflict with another relationship that wishes to sync
# an entirely different column to the same target. This is a
# very rare edge case so we will try to minimize the memory/overhead
# impact of this check
for from_, to_ in [
(from_, to_) for (from_, to_) in self.synchronize_pairs
] + [
(from_, to_) for (from_, to_) in self.secondary_synchronize_pairs
]:
# save ourselves a ton of memory and overhead by only
# considering columns that are subject to a overlapping
# FK constraints at the core level. This condition can arise
# if multiple relationships overlap foreign() directly, but
# we're going to assume it's typically a ForeignKeyConstraint-
# level configuration that benefits from this warning.
if to_ not in self._track_overlapping_sync_targets:
self._track_overlapping_sync_targets[
to_
] = weakref.WeakKeyDictionary({self.prop: from_})
else:
other_props = []
prop_to_from = self._track_overlapping_sync_targets[to_]
for pr, fr_ in prop_to_from.items():
if (
not pr.mapper._dispose_called
and pr not in self.prop._reverse_property
and pr.key not in self.prop._overlaps
and self.prop.key not in pr._overlaps
# note: the "__*" symbol is used internally by
# SQLAlchemy as a general means of suppressing the
# overlaps warning for some extension cases, however
# this is not currently
# a publicly supported symbol and may change at
# any time.
and "__*" not in self.prop._overlaps
and "__*" not in pr._overlaps
and not self.prop.parent.is_sibling(pr.parent)
and not self.prop.mapper.is_sibling(pr.mapper)
and not self.prop.parent.is_sibling(pr.mapper)
and not self.prop.mapper.is_sibling(pr.parent)
and (
self.prop.key != pr.key
or not self.prop.parent.common_parent(pr.parent)
)
):
other_props.append((pr, fr_))
if other_props:
util.warn(
"relationship '%s' will copy column %s to column %s, "
"which conflicts with relationship(s): %s. "
"If this is not the intention, consider if these "
"relationships should be linked with "
"back_populates, or if viewonly=True should be "
"applied to one or more if they are read-only. "
"For the less common case that foreign key "
"constraints are partially overlapping, the "
"orm.foreign() "
"annotation can be used to isolate the columns that "
"should be written towards. To silence this "
"warning, add the parameter 'overlaps=\"%s\"' to the "
"'%s' relationship."
% (
self.prop,
from_,
to_,
", ".join(
sorted(
"'%s' (copies %s to %s)" % (pr, fr_, to_)
for (pr, fr_) in other_props
)
),
",".join(sorted(pr.key for pr, fr in other_props)),
self.prop,
),
code="qzyx",
)
self._track_overlapping_sync_targets[to_][self.prop] = from_
@util.memoized_property
def remote_columns(self) -> Set[ColumnElement[Any]]:
return self._gather_join_annotations("remote")
@util.memoized_property
def local_columns(self) -> Set[ColumnElement[Any]]:
return self._gather_join_annotations("local")
@util.memoized_property
def foreign_key_columns(self) -> Set[ColumnElement[Any]]:
return self._gather_join_annotations("foreign")
def _gather_join_annotations(
self, annotation: str
) -> Set[ColumnElement[Any]]:
s = set(
self._gather_columns_with_annotation(self.primaryjoin, annotation)
)
if self.secondaryjoin is not None:
s.update(
self._gather_columns_with_annotation(
self.secondaryjoin, annotation
)
)
return {x._deannotate() for x in s}
def _gather_columns_with_annotation(
self, clause: ColumnElement[Any], *annotation: Iterable[str]
) -> Set[ColumnElement[Any]]:
annotation_set = set(annotation)
return {
cast(ColumnElement[Any], col)
for col in visitors.iterate(clause, {})
if annotation_set.issubset(col._annotations)
}
def join_targets(
self,
source_selectable: Optional[FromClause],
dest_selectable: FromClause,
aliased: bool,
single_crit: Optional[ColumnElement[bool]] = None,
extra_criteria: Tuple[ColumnElement[bool], ...] = (),
) -> Tuple[
ColumnElement[bool],
Optional[ColumnElement[bool]],
Optional[FromClause],
Optional[ClauseAdapter],
FromClause,
]:
"""Given a source and destination selectable, create a
join between them.
This takes into account aliasing the join clause
to reference the appropriate corresponding columns
in the target objects, as well as the extra child
criterion, equivalent column sets, etc.
"""
# place a barrier on the destination such that
# replacement traversals won't ever dig into it.
# its internal structure remains fixed
# regardless of context.
dest_selectable = _shallow_annotate(
dest_selectable, {"no_replacement_traverse": True}
)
primaryjoin, secondaryjoin, secondary = (
self.primaryjoin,
self.secondaryjoin,
self.secondary,
)
# adjust the join condition for single table inheritance,
# in the case that the join is to a subclass
# this is analogous to the
# "_adjust_for_single_table_inheritance()" method in Query.
if single_crit is not None:
if secondaryjoin is not None:
secondaryjoin = secondaryjoin & single_crit
else:
primaryjoin = primaryjoin & single_crit
if extra_criteria:
def mark_unrelated_columns_as_ok_to_adapt(
elem: SupportsAnnotations, annotations: _AnnotationDict
) -> SupportsAnnotations:
"""note unrelated columns in the "extra criteria" as OK
to adapt, even though they are not part of our "local"
or "remote" side.
see #9779 for this case
"""
parentmapper_for_element = elem._annotations.get(
"parentmapper", None
)
if (
parentmapper_for_element is not self.prop.parent
and parentmapper_for_element is not self.prop.mapper
):
return _safe_annotate(elem, annotations)
else:
return elem
extra_criteria = tuple(
_deep_annotate(
elem,
{"ok_to_adapt_in_join_condition": True},
annotate_callable=mark_unrelated_columns_as_ok_to_adapt,
)
for elem in extra_criteria
)
if secondaryjoin is not None:
secondaryjoin = secondaryjoin & sql.and_(*extra_criteria)
else:
primaryjoin = primaryjoin & sql.and_(*extra_criteria)
if aliased:
if secondary is not None:
secondary = secondary._anonymous_fromclause(flat=True)
primary_aliasizer = ClauseAdapter(
secondary, exclude_fn=_ColInAnnotations("local")
)
secondary_aliasizer = ClauseAdapter(
dest_selectable, equivalents=self.child_equivalents
).chain(primary_aliasizer)
if source_selectable is not None:
primary_aliasizer = ClauseAdapter(
secondary, exclude_fn=_ColInAnnotations("local")
).chain(
ClauseAdapter(
source_selectable,
equivalents=self.parent_equivalents,
)
)
secondaryjoin = secondary_aliasizer.traverse(secondaryjoin)
else:
primary_aliasizer = ClauseAdapter(
dest_selectable,
exclude_fn=_ColInAnnotations("local"),
equivalents=self.child_equivalents,
)
if source_selectable is not None:
primary_aliasizer.chain(
ClauseAdapter(
source_selectable,
exclude_fn=_ColInAnnotations("remote"),
equivalents=self.parent_equivalents,
)
)
secondary_aliasizer = None
primaryjoin = primary_aliasizer.traverse(primaryjoin)
target_adapter = secondary_aliasizer or primary_aliasizer
target_adapter.exclude_fn = None
else:
target_adapter = None
| 1 |
5f8303ce91c5de779bbddbaafb3fb828596babe5
|
Python
|
return (
primaryjoin,
secondaryjoin,
secondary,
target_adapter,
dest_selectable,
)
def create_lazy_clause(
self, reverse_direction: bool = False
) -> Tuple[
ColumnElement[bool],
Dict[str, ColumnElement[Any]],
Dict[ColumnElement[Any], ColumnElement[Any]],
]:
binds: Dict[ColumnElement[Any], BindParameter[Any]] = {}
equated_columns: Dict[ColumnElement[Any], ColumnElement[Any]] = {}
has_secondary = self.secondaryjoin is not None
if has_secondary:
lookup = collections.defaultdict(list)
for l, r in self.local_remote_pairs:
lookup[l].append((l, r))
equated_columns[r] = l
elif not reverse_direction:
for l, r in self.local_remote_pairs:
equated_columns[r] = l
else:
for l, r in self.local_remote_pairs:
equated_columns[l] = r
def col_to_bind(
element: ColumnElement[Any], **kw: Any
) -> Optional[BindParameter[Any]]:
if (
(not reverse_direction and "local" in element._annotations)
or reverse_direction
and (
(has_secondary and element in lookup)
or (not has_secondary and "remote" in element._annotations)
)
):
if element not in binds:
binds[element] = sql.bindparam(
None, None, type_=element.type, unique=True
)
return binds[element]
return None
lazywhere = self.primaryjoin
if self.secondaryjoin is None or not reverse_direction:
lazywhere = visitors.replacement_traverse(
lazywhere, {}, col_to_bind
)
if self.secondaryjoin is not None:
secondaryjoin = self.secondaryjoin
if reverse_direction:
secondaryjoin = visitors.replacement_traverse(
secondaryjoin, {}, col_to_bind
)
lazywhere = sql.and_(lazywhere, secondaryjoin)
bind_to_col = {binds[col].key: col for col in binds}
return lazywhere, bind_to_col, equated_columns
class _ColInAnnotations:
"""Serializable object that tests for a name in c._annotations."""
__slots__ = ("name",)
def __init__(self, name: str):
self.name = name
def __call__(self, c: ClauseElement) -> bool:
return (
self.name in c._annotations
or "ok_to_adapt_in_join_condition" in c._annotations
)
class Relationship( # type: ignore
RelationshipProperty[_T],
_DeclarativeMapped[_T],
WriteOnlyMapped[_T], # not compatible with Mapped[_T]
DynamicMapped[_T], # not compatible with Mapped[_T]
):
"""Describes an object property that holds a single item or list
of items that correspond to a related database table.
Public constructor is the :func:`_orm.relationship` function.
.. seealso::
:ref:`relationship_config_toplevel`
.. versionchanged:: 2.0 Added :class:`_orm.Relationship` as a Declarative
compatible subclass for :class:`_orm.RelationshipProperty`.
"""
inherit_cache = True
""":meta private:"""
| 2 |
63093190ee20e10698bd99dcea94ccf5d076a006
|
Python
|
species(
label = 'C=C([CH]C)C(=C)[CH]C(24182)',
structure = SMILES('[CH2]C(=CC)C([CH2])=CC'),
E0 = (249.687,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([325,375,415,465,420,450,1700,1750,2750,2770,2790,2810,2830,2850,1350,1400,1450,1500,700,800,1000,1100,1350,1400,900,1100,3000,3033.33,3066.67,3100,415,465,780,850,1435,1475,900,1100,2995,3025,975,1000,1300,1375,400,500,1630,1680,180],'cm^-1')),
HinderedRotor(inertia=(0.735277,'amu*angstrom^2'), symmetry=1, barrier=(16.9055,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.0632434,'amu*angstrom^2'), symmetry=1, barrier=(29.514,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.737545,'amu*angstrom^2'), symmetry=1, barrier=(16.9576,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.732781,'amu*angstrom^2'), symmetry=1, barrier=(16.8481,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.739219,'amu*angstrom^2'), symmetry=1, barrier=(16.9961,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (108.181,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.384005,0.0840749,-5.09991e-05,5.50851e-09,4.14197e-12,30198.9,28.4131], Tmin=(100,'K'), Tmax=(1039.09,'K')), NASAPolynomial(coeffs=[18.1326,0.0354522,-1.35159e-05,2.44392e-09,-1.69358e-13,25127.7,-67.5143], Tmin=(1039.09,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(249.687,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(461.453,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-Cds(Cds-Cds)Cs) + group(Cds-Cds(Cds-Cds)Cs) + group(Cds-CdsCsH) + group(Cds-CdsCsH) + radical(Allyl_P) + radical(Allyl_P)"""),
)
species(
label = 'CH3CHCCH2(18175)',
structure = SMILES('C=C=CC'),
E0 = (145.615,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2950,3100,1380,975,1025,1650,540,610,2055,2750,2800,2850,1350,1500,750,1050,1375,1000,3010,987.5,1337.5,450,1655],'cm^-1')),
HinderedRotor(inertia=(0.759584,'amu*angstrom^2'), symmetry=1, barrier=(17.4643,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (54.0904,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(2996.71,'J/mol'), sigma=(5.18551,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0, comment="""Epsilon & sigma estimated with Tc=468.08 K, Pc=48.77 bar (from Joback method)"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.74635,0.0218189,8.22353e-06,-2.14768e-08,8.55624e-12,17563.6,12.7381], Tmin=(100,'K'), Tmax=(1025.6,'K')), NASAPolynomial(coeffs=[6.82078,0.0192338,-7.45622e-06,1.36536e-09,-9.53195e-14,16028,-10.4333], Tmin=(1025.6,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(145.615,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(228.648,'J/(mol*K)'), label="""CH3CHCCH2""", comment="""Thermo library: DFT_QCI_thermo"""),
)
species(
label = '[CH2]C1([CH]C)CC1=CC(25275)',
structure = SMILES('[CH2]C1([CH]C)CC1=CC'),
E0 = (462.221,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (108.181,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.263258,0.0692237,-2.26363e-05,-1.35463e-08,8.13734e-12,55737.7,31.4039], Tmin=(100,'K'), Tmax=(1105.46,'K')), NASAPolynomial(coeffs=[15.171,0.0400578,-1.66801e-05,3.13624e-09,-2.2049e-13,50927.8,-48.8594], Tmin=(1105.46,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(462.221,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(465.61,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)CsCsCs) + group(Cs-CsCsHH) + group(Cs-(Cds-Cds)CsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsCs) + group(Cds-CdsCsH) + ring(Methylene_cyclopropane) + radical(Neopentyl) + radical(Cs_S)"""),
)
species(
label = 'C=[C][CH]C(18176)',
structure = SMILES('[CH2][C]=CC'),
E0 = (361.056,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([1685,370,2750,2800,2850,1350,1500,750,1050,1375,1000,3000,3100,440,815,1455,1000,3010,987.5,1337.5,450,1655],'cm^-1')),
HinderedRotor(inertia=(0.352622,'amu*angstrom^2'), symmetry=1, barrier=(8.10748,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.828631,'amu*angstrom^2'), symmetry=1, barrier=(19.0519,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (54.0904,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.42015,0.030446,-1.69076e-05,4.64684e-09,-5.12013e-13,43485.7,14.8304], Tmin=(100,'K'), Tmax=(2065.83,'K')), NASAPolynomial(coeffs=[10.7464,0.014324,-5.20136e-06,8.69079e-10,-5.48385e-14,40045.6,-31.3799], Tmin=(2065.83,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(361.056,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(274.378,'J/(mol*K)'), comment="""Thermo library: DFT_QCI_thermo + radical(Cds_S) + radical(Allyl_P)"""),
)
species(
label = '[CH2]C(=CC)C(C)=[C]C(25412)',
structure = SMILES('[CH2]C(=CC)C(C)=[C]C'),
E0 = (336.03,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([325,375,415,465,420,450,1700,1750,1685,370,2750,2762.5,2775,2787.5,2800,2812.5,2825,2837.5,2850,1350,1380,1410,1440,1470,1500,700,750,800,1000,1050,1100,1350,1375,1400,900,1000,1100,3000,3100,440,815,1455,1000,3010,987.5,1337.5,450,1655,222.04],'cm^-1')),
HinderedRotor(inertia=(0.395973,'amu*angstrom^2'), symmetry=1, barrier=(13.8694,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.396086,'amu*angstrom^2'), symmetry=1, barrier=(13.8683,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.395737,'amu*angstrom^2'), symmetry=1, barrier=(13.8691,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.395039,'amu*angstrom^2'), symmetry=1, barrier=(13.8689,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.395901,'amu*angstrom^2'), symmetry=1, barrier=(13.8689,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (108.181,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.116365,0.0876489,-7.20737e-05,3.21805e-08,-5.96317e-12,40565.5,28.3373], Tmin=(100,'K'), Tmax=(1264.63,'K')), NASAPolynomial(coeffs=[14.5979,0.041109,-1.68732e-05,3.08148e-09,-2.10818e-13,36843.8,-46.1055], Tmin=(1264.63,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(336.03,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(461.453,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-Cds(Cds-Cds)Cs) + group(Cds-Cds(Cds-Cds)Cs) + group(Cds-CdsCsH) + group(Cds-CdsCsH) + radical(Cds_S) + radical(Allyl_P)"""),
)
species(
label = '[CH2]C(=[C]C)C(C)=CC(25413)',
structure = SMILES('[CH2]C(=[C]C)C(C)=CC'),
E0 = (336.03,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([325,375,415,465,420,450,1700,1750,1685,370,2750,2762.5,2775,2787.5,2800,2812.5,2825,2837.5,2850,1350,1380,1410,1440,1470,1500,700,750,800,1000,1050,1100,1350,1375,1400,900,1000,1100,3000,3100,440,815,1455,1000,3010,987.5,1337.5,450,1655,222.04],'cm^-1')),
HinderedRotor(inertia=(0.395973,'amu*angstrom^2'), symmetry=1, barrier=(13.8694,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.396086,'amu*angstrom^2'), symmetry=1, barrier=(13.8683,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.395737,'amu*angstrom^2'), symmetry=1, barrier=(13.8691,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.395039,'amu*angstrom^2'), symmetry=1, barrier=(13.8689,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.395901,'amu*angstrom^2'), symmetry=1, barrier=(13.8689,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (108.181,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.116365,0.0876489,-7.20737e-05,3.21805e-08,-5.96317e-12,40565.5,28.3373], Tmin=(100,'K'), Tmax=(1264.63,'K')), NASAPolynomial(coeffs=[14.5979,0.041109,-1.68732e-05,3.08148e-09,-2.10818e-13,36843.8,-46.1055], Tmin=(1264.63,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(336.03,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(461.453,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-Cds(Cds-Cds)Cs) + group(Cds-Cds(Cds-Cds)Cs) + group(Cds-CdsCsH) + group(Cds-CdsCsH) + radical(Allyl_P) + radical(Cds_S)"""),
)
species(
label = '[CH2]C(=CC)[C](C)C=C(24605)',
structure = SMILES('[CH2]C=C(C)C([CH2])=CC'),
E0 = (216.244,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([325,375,415,465,420,450,1700,1750,2750,2770,2790,2810,2830,2850,1350,1400,1450,1500,700,800,1000,1100,1350,1400,900,1100,3000,3033.33,3066.67,3100,415,465,780,850,1435,1475,900,1100,2995,3025,975,1000,1300,1375,400,500,1630,1680,180],'cm^-1')),
HinderedRotor(inertia=(0.712083,'amu*angstrom^2'), symmetry=1, barrier=(16.3722,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.555659,'amu*angstrom^2'), symmetry=1, barrier=(96.3851,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.0202512,'amu*angstrom^2'), symmetry=1, barrier=(16.3711,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.712008,'amu*angstrom^2'), symmetry=1, barrier=(16.3705,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(4.19211,'amu*angstrom^2'), symmetry=1, barrier=(96.3849,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (108.181,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.0883175,0.0775021,-3.58132e-05,-7.55711e-09,8.27771e-12,26166.1,29.3215], Tmin=(100,'K'), Tmax=(1017.17,'K')), NASAPolynomial(coeffs=[16.4341,0.0376674,-1.41425e-05,2.53759e-09,-1.75328e-13,21504.4,-57.0638], Tmin=(1017.17,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(216.244,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(461.453,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-Cds(Cds-Cds)Cs) + group(Cds-Cds(Cds-Cds)Cs) + group(Cds-CdsCsH) + group(Cds-CdsCsH) + radical(Allyl_P) + radical(C=CC=CCJ)"""),
)
species(
label = '[CH2][C](C=C)C(C)=CC(24606)',
structure = SMILES('[CH2]C=C([CH2])C(C)=CC'),
E0 = (216.244,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (108.181,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.0883175,0.0775021,-3.58132e-05,-7.55711e-09,8.27771e-12,26166.1,29.3215], Tmin=(100,'K'), Tmax=(1017.17,'K')), NASAPolynomial(coeffs=[16.4341,0.0376674,-1.41425e-05,2.53759e-09,-1.75328e-13,21504.4,-57.0638], Tmin=(1017.17,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(216.244,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(461.453,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-Cds(Cds-Cds)Cs) + group(Cds-Cds(Cds-Cds)Cs) + group(Cds-CdsCsH) + group(Cds-CdsCsH) + radical(Allyl_P) + radical(C=CC=CCJ)"""),
)
species(
label = '[CH2]C(=CC)[C]1CC1C(25414)',
structure = SMILES('[CH2]C(=CC)[C]1CC1C'),
E0 = (289.9,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (108.181,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.71289,0.0520158,3.84829e-05,-8.55933e-08,3.61457e-11,35003.5,26.4903], Tmin=(100,'K'), Tmax=(968.714,'K')), NASAPolynomial(coeffs=[16.7686,0.0352996,-1.24057e-05,2.26286e-09,-1.62921e-13,29566.5,-62.466], Tmin=(968.714,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(289.9,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(465.61,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-CsCsCsH) + group(Cs-(Cds-Cds)CsCsH) + group(Cs-CsCsHH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsCs) + group(Cds-CdsCsH) + ring(Cyclopropane) + radical(Allyl_T) + radical(Allyl_P)"""),
)
species(
label = '[CH2][C]1C(=CC)CC1C(25415)',
structure = SMILES('[CH2]C1=C([CH]C)CC1C'),
E0 = (304.572,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (108.181,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.583091,0.0531885,4.0938e-05,-9.08388e-08,3.83549e-11,36774.2,26.4705], Tmin=(100,'K'), Tmax=(972.301,'K')), NASAPolynomial(coeffs=[18.2947,0.0339462,-1.21014e-05,2.24934e-09,-1.64353e-13,30795.4,-71.5147], Tmin=(972.301,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(304.572,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(465.61,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)CsCsH) + group(Cs-(Cds-Cds)CsHH) + group(Cs-(Cds-Cds)CsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsCs) + group(Cds-CdsCsCs) + ring(Cyclobutene) + radical(Allyl_P) + radical(Allyl_S)"""),
)
species(
label = 'CH2(S)(23)',
structure = SMILES('[CH2]'),
E0 = (419.862,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([1369.36,2789.41,2993.36],'cm^-1')),
],
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (14.0266,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(1197.29,'J/mol'), sigma=(3.8,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[4.19195,-0.00230793,8.0509e-06,-6.60123e-09,1.95638e-12,50484.3,-0.754589], Tmin=(200,'K'), Tmax=(1000,'K')), NASAPolynomial(coeffs=[2.28556,0.00460255,-1.97412e-06,4.09548e-10,-3.34695e-14,50922.4,8.67684], Tmin=(1000,'K'), Tmax=(3000,'K'))], Tmin=(200,'K'), Tmax=(3000,'K'), E0=(419.862,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(58.2013,'J/(mol*K)'), label="""CH2(S)""", comment="""Thermo library: Klippenstein_Glarborg2016"""),
)
species(
label = '[CH2]C(=C)C([CH2])=CC(25416)',
structure = SMILES('[CH2]C(=C)C([CH2])=CC'),
E0 = (285.713,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([325,375,415,465,420,450,1700,1750,2950,3100,1380,975,1025,1650,2750,2800,2850,1350,1500,750,1050,1375,1000,3000,3033.33,3066.67,3100,415,465,780,850,1435,1475,900,1100,3010,987.5,1337.5,450,1655,311.383],'cm^-1')),
HinderedRotor(inertia=(0.327475,'amu*angstrom^2'), symmetry=1, barrier=(22.5291,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.327466,'amu*angstrom^2'), symmetry=1, barrier=(22.5294,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.327318,'amu*angstrom^2'), symmetry=1, barrier=(22.5272,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.327483,'amu*angstrom^2'), symmetry=1, barrier=(22.5297,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (94.1543,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.335271,0.0676667,-2.76626e-05,-1.62749e-08,1.21982e-11,34506.8,24.024], Tmin=(100,'K'), Tmax=(980.594,'K')), NASAPolynomial(coeffs=[17.5531,0.0266059,-9.47854e-06,1.70194e-09,-1.19937e-13,29727.4,-65.8563], Tmin=(980.594,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(285.713,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(390.78,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-Cds(Cds-Cds)Cs) + group(Cds-Cds(Cds-Cds)Cs) + group(Cds-CdsCsH) + group(Cds-CdsHH) + radical(Allyl_P) + radical(Allyl_P)"""),
)
species(
label = 'C=C([CH]C)C[C]=CC(24184)',
structure = SMILES('[CH2]C(=CC)C[C]=CC'),
E0 = (366.985,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2995,3025,975,1000,1300,1375,400,500,1630,1680,2750,2770,2790,2810,2830,2850,1350,1400,1450,1500,700,800,1000,1100,1350,1400,900,1100,1685,370,350,440,435,1725,2750,2850,1437.5,1250,1305,750,350,3000,3100,440,815,1455,1000,180,579.702],'cm^-1')),
HinderedRotor(inertia=(0.147406,'amu*angstrom^2'), symmetry=1, barrier=(3.38916,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.64226,'amu*angstrom^2'), symmetry=1, barrier=(14.7668,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.64164,'amu*angstrom^2'), symmetry=1, barrier=(14.7526,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.643937,'amu*angstrom^2'), symmetry=1, barrier=(14.8054,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.145327,'amu*angstrom^2'), symmetry=1, barrier=(3.34136,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (108.181,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(3683.66,'J/mol'), sigma=(6.4482,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0, comment="""Epsilon & sigma estimated with Tc=575.38 K, Pc=31.18 bar (from Joback method)"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.29648,0.0786067,-5.42868e-05,1.96375e-08,-2.97459e-12,44273.2,31.2372], Tmin=(100,'K'), Tmax=(1490.43,'K')), NASAPolynomial(coeffs=[13.9025,0.0420909,-1.75363e-05,3.199e-09,-2.17227e-13,40217.5,-39.8334], Tmin=(1490.43,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(366.985,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(461.453,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)(Cds-Cds)HH) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsCs) + group(Cds-CdsCsH) + group(Cds-CdsCsH) + group(Cds-CdsCsH) + radical(Cds_S) + radical(Allyl_P)"""),
)
species(
label = 'CC=C1CCC1=CC(25269)',
structure = SMILES('CC=C1CCC1=CC'),
E0 = (114.107,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (108.181,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.677799,0.0585738,5.80411e-06,-4.1598e-08,1.78951e-11,13856,25.5085], Tmin=(100,'K'), Tmax=(1034.79,'K')), NASAPolynomial(coeffs=[13.4814,0.0415234,-1.65073e-05,3.07348e-09,-2.16896e-13,9469.28,-45.0922], Tmin=(1034.79,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(114.107,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(473.925,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)CsHH) + group(Cs-(Cds-Cds)CsHH) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-Cds(Cds-Cds)Cs) + group(Cds-Cds(Cds-Cds)Cs) + group(Cds-CdsCsH) + group(Cds-CdsCsH) + ring(12methylenecyclobutane)"""),
)
species(
label = 'CH2(19)',
structure = SMILES('[CH2]'),
E0 = (381.563,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([1032.72,2936.3,3459],'cm^-1')),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (14.0266,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(1197.29,'J/mol'), sigma=(3.8,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[3.8328,0.000224446,4.68033e-06,-6.04743e-09,2.59009e-12,45920.8,1.40666], Tmin=(200,'K'), Tmax=(1000,'K')), NASAPolynomial(coeffs=[3.16229,0.00281798,-7.56235e-07,5.05446e-11,5.65236e-15,46099.1,4.77656], Tmin=(1000,'K'), Tmax=(3000,'K'))], Tmin=(200,'K'), Tmax=(3000,'K'), E0=(381.563,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(58.2013,'J/(mol*K)'), label="""CH2""", comment="""Thermo library: Klippenstein_Glarborg2016"""),
)
species(
label = '[CH2]C([C]=CC)=CC(25417)',
structure = SMILES('[CH2]C([C]=CC)=CC'),
E0 = (334.774,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([350,440,435,1725,1685,370,2750,2770,2790,2810,2830,2850,1350,1400,1450,1500,700,800,1000,1100,1350,1400,900,1100,3000,3100,440,815,1455,1000,2995,3025,975,1000,1300,1375,400,500,1630,1680,180],'cm^-1')),
HinderedRotor(inertia=(0.7606,'amu*angstrom^2'), symmetry=1, barrier=(17.4877,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.760854,'amu*angstrom^2'), symmetry=1, barrier=(17.4935,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.760586,'amu*angstrom^2'), symmetry=1, barrier=(17.4874,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(2.15146,'amu*angstrom^2'), symmetry=1, barrier=(49.4663,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (94.1543,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.352604,0.0734369,-5.91187e-05,2.57941e-08,-4.60694e-12,40400.9,25.1788], Tmin=(100,'K'), Tmax=(1327.42,'K')), NASAPolynomial(coeffs=[14.2321,0.0316126,-1.18565e-05,2.05761e-09,-1.36512e-13,36716.1,-45.7131], Tmin=(1327.42,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(334.774,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(390.78,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-Cds(Cds-Cds)Cs) + group(Cds-CdsCsH) + group(Cds-CdsCsH) + group(Cds-Cds(Cds-Cds)H) + radical(C=CJC=C) + radical(Allyl_P)"""),
)
species(
label = '[CH2]C1([CH]C)C(=C)C1C(25296)',
structure = SMILES('[CH2]C1([CH]C)C(=C)C1C'),
E0 = (466.494,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (108.181,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.29276,0.0655305,-4.50464e-06,-3.74661e-08,1.7759e-11,56253.7,30.0992], Tmin=(100,'K'), Tmax=(1027.4,'K')), NASAPolynomial(coeffs=[16.6435,0.0372633,-1.49065e-05,2.81296e-09,-2.01072e-13,51026,-58.316], Tmin=(1027.4,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(466.494,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(465.61,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)CsCsCs) + group(Cs-(Cds-Cds)CsCsH) + group(Cs-CsCsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + group(Cds-CdsCsCs) + group(Cds-CdsHH) + ring(Methylene_cyclopropane) + radical(Neopentyl) + radical(Cs_S)"""),
)
species(
label = 'H(3)',
structure = SMILES('[H]'),
E0 = (211.792,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (1.00794,'amu'),
collisionModel = TransportData(shapeIndex=0, epsilon=(1205.6,'J/mol'), sigma=(2.05,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.5,9.24385e-15,-1.3678e-17,6.66185e-21,-1.00107e-24,25472.7,-0.459566], Tmin=(100,'K'), Tmax=(3459.6,'K')), NASAPolynomial(coeffs=[2.5,9.20456e-12,-3.58608e-15,6.15199e-19,-3.92042e-23,25472.7,-0.459566], Tmin=(3459.6,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(211.792,'kJ/mol'), Cp0=(20.7862,'J/(mol*K)'), CpInf=(20.7862,'J/(mol*K)'), label="""H""", comment="""Thermo library: BurkeH2O2"""),
)
species(
label = '[CH2]C(=CC)C(=C)C=C(24604)',
structure = SMILES('[CH2]C(=CC)C(=C)C=C'),
E0 = (242.677,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([325,375,415,465,420,450,1700,1750,2950,3000,3050,3100,1330,1430,900,1050,1000,1050,1600,1700,2750,2800,2850,1350,1500,750,1050,1375,1000,3000,3100,440,815,1455,1000,2995,3025,975,1000,1300,1375,400,500,1630,1680,181.962,683.313],'cm^-1')),
HinderedRotor(inertia=(0.669842,'amu*angstrom^2'), symmetry=1, barrier=(19.1337,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.0582339,'amu*angstrom^2'), symmetry=1, barrier=(19.1767,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.83204,'amu*angstrom^2'), symmetry=1, barrier=(19.1302,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(4.52237,'amu*angstrom^2'), symmetry=1, barrier=(104.569,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (107.173,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.293043,0.0682771,-2.00337e-05,-2.05401e-08,1.21516e-11,29332.3,27.0261], Tmin=(100,'K'), Tmax=(1018.57,'K')), NASAPolynomial(coeffs=[15.7386,0.0358123,-1.37404e-05,2.51366e-09,-1.76142e-13,24723.4,-54.9529], Tmin=(1018.57,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(242.677,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(440.667,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-Cds(Cds-Cds)Cs) + group(Cds-Cds(Cds-Cds)(Cds-Cds)) + group(Cds-CdsCsH) + group(Cds-Cds(Cds-Cds)H) + group(Cds-CdsHH) + group(Cds-CdsHH) + radical(Allyl_P)"""),
)
species(
label = '[CH2]CC(=C)C([CH2])=CC(25418)',
structure = SMILES('[CH2]CC(=C)C([CH2])=CC'),
E0 = (316.814,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3010,987.5,1337.5,450,1655,2750,2800,2850,1350,1500,750,1050,1375,1000,2950,3100,1380,975,1025,1650,325,375,415,465,420,450,1700,1750,2750,2850,1437.5,1250,1305,750,350,3000,3033.33,3066.67,3100,415,465,780,850,1435,1475,900,1100,180,180],'cm^-1')),
HinderedRotor(inertia=(0.0368535,'amu*angstrom^2'), symmetry=1, barrier=(17.9864,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.00736317,'amu*angstrom^2'), symmetry=1, barrier=(3.60618,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.781153,'amu*angstrom^2'), symmetry=1, barrier=(17.9602,'kJ/mol'), semiclassical=False),
| 0 |
63093190ee20e10698bd99dcea94ccf5d076a006
|
Python
|
HinderedRotor(inertia=(0.779478,'amu*angstrom^2'), symmetry=1, barrier=(17.9217,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.781104,'amu*angstrom^2'), symmetry=1, barrier=(17.9591,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (108.181,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.348925,0.0836004,-5.1879e-05,7.14877e-09,3.44908e-12,38270.9,31.5928], Tmin=(100,'K'), Tmax=(1044.14,'K')), NASAPolynomial(coeffs=[17.9255,0.0352115,-1.34219e-05,2.42456e-09,-1.67785e-13,33276.3,-63.0036], Tmin=(1044.14,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(316.814,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(461.453,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)CsHH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-Cds(Cds-Cds)Cs) + group(Cds-Cds(Cds-Cds)Cs) + group(Cds-CdsCsH) + group(Cds-CdsHH) + radical(RCCJ) + radical(Allyl_P)"""),
)
species(
label = '[CH]=C(CC)C([CH2])=CC(25419)',
structure = SMILES('[CH]=C(CC)C([CH2])=CC'),
E0 = (358.664,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3120,650,792.5,1650,3010,987.5,1337.5,450,1655,2750,2770,2790,2810,2830,2850,1350,1400,1450,1500,700,800,1000,1100,1350,1400,900,1100,325,375,415,465,420,450,1700,1750,2750,2850,1437.5,1250,1305,750,350,3000,3100,440,815,1455,1000,180],'cm^-1')),
HinderedRotor(inertia=(0.701639,'amu*angstrom^2'), symmetry=1, barrier=(16.1321,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.344302,'amu*angstrom^2'), symmetry=1, barrier=(16.1602,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.0492932,'amu*angstrom^2'), symmetry=1, barrier=(16.1378,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.702005,'amu*angstrom^2'), symmetry=1, barrier=(16.1405,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.702379,'amu*angstrom^2'), symmetry=1, barrier=(16.1491,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (108.181,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.468616,0.0864938,-5.84569e-05,1.27697e-08,1.75707e-12,43308.4,30.6389], Tmin=(100,'K'), Tmax=(1047.28,'K')), NASAPolynomial(coeffs=[18.4195,0.034593,-1.31104e-05,2.35762e-09,-1.62637e-13,38242.2,-66.6572], Tmin=(1047.28,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(358.664,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(461.453,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)CsHH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-Cds(Cds-Cds)Cs) + group(Cds-Cds(Cds-Cds)Cs) + group(Cds-CdsCsH) + group(Cds-CdsHH) + radical(Allyl_P) + radical(Cds_P)"""),
)
species(
label = '[CH2]C(=[C]C)C(=C)CC(25420)',
structure = SMILES('[CH2]C(=[C]C)C(=C)CC'),
E0 = (349.41,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([1685,370,2750,2770,2790,2810,2830,2850,1350,1400,1450,1500,700,800,1000,1100,1350,1400,900,1100,2950,3100,1380,975,1025,1650,325,375,415,465,420,450,1700,1750,2750,2850,1437.5,1250,1305,750,350,3000,3100,440,815,1455,1000,180,180],'cm^-1')),
HinderedRotor(inertia=(0.159905,'amu*angstrom^2'), symmetry=1, barrier=(15.9368,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.693159,'amu*angstrom^2'), symmetry=1, barrier=(15.9371,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.693127,'amu*angstrom^2'), symmetry=1, barrier=(15.9364,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.693165,'amu*angstrom^2'), symmetry=1, barrier=(15.9372,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.0150632,'amu*angstrom^2'), symmetry=1, barrier=(15.9371,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (108.181,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.583231,0.089245,-7.16619e-05,3.00631e-08,-5.07891e-12,42198.9,31.1306], Tmin=(100,'K'), Tmax=(1412.15,'K')), NASAPolynomial(coeffs=[19.0319,0.0336833,-1.2643e-05,2.20036e-09,-1.46165e-13,36659.1,-70.2702], Tmin=(1412.15,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(349.41,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(461.453,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)CsHH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-Cds(Cds-Cds)Cs) + group(Cds-Cds(Cds-Cds)Cs) + group(Cds-CdsCsH) + group(Cds-CdsHH) + radical(Allyl_P) + radical(Cds_S)"""),
)
species(
label = '[CH]=C([CH]C)C(C)=CC(25421)',
structure = SMILES('[CH]C(=CC)C(C)=CC'),
E0 = (317.373,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([325,375,415,465,420,450,1700,1750,2750,2762.5,2775,2787.5,2800,2812.5,2825,2837.5,2850,1350,1380,1410,1440,1470,1500,700,750,800,1000,1050,1100,1350,1375,1400,900,1000,1100,2995,3025,975,1000,1300,1375,400,500,1630,1680,200,800,1200,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (108.181,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.247945,0.0873521,-6.16843e-05,2.31486e-08,-3.62747e-12,38328.8,29.1665], Tmin=(100,'K'), Tmax=(1460.93,'K')), NASAPolynomial(coeffs=[15.297,0.0447902,-1.7984e-05,3.20673e-09,-2.14924e-13,33786.8,-51.7212], Tmin=(1460.93,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(317.373,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(461.453,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-Cds(Cds-Cds)Cs) + group(Cds-Cds(Cds-Cds)Cs) + group(Cds-CdsCsH) + group(Cds-CdsCsH) + radical(AllylJ2_triplet)"""),
)
species(
label = '[CH2][C](C=C)C(=C)CC(24623)',
structure = SMILES('[CH2]C(C=C)=C([CH2])CC'),
E0 = (228.159,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (108.181,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.0497728,0.0733281,-1.6094e-05,-3.35123e-08,1.88363e-11,27601.1,30.4448], Tmin=(100,'K'), Tmax=(975.095,'K')), NASAPolynomial(coeffs=[18.3695,0.0342638,-1.21408e-05,2.16747e-09,-1.52112e-13,22274,-66.8493], Tmin=(975.095,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(228.159,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(461.453,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)CsHH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsCs) + group(Cds-Cds(Cds-Cds)Cs) + group(Cds-Cds(Cds-Cds)H) + group(Cds-CdsHH) + radical(C=CC=CCJ) + radical(Allyl_P)"""),
)
species(
label = 'C[CH][C]1CCC1=CC(25422)',
structure = SMILES('C[CH]C1CCC=1[CH]C'),
E0 = (303.292,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (108.181,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.788866,0.0500701,4.22235e-05,-8.64809e-08,3.53174e-11,36611.5,25.2586], Tmin=(100,'K'), Tmax=(987.239,'K')), NASAPolynomial(coeffs=[16.2187,0.0373502,-1.4111e-05,2.65357e-09,-1.92503e-13,31138.2,-61.2734], Tmin=(987.239,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(303.292,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(465.61,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)CsHH) + group(Cs-(Cds-Cds)CsHH) + group(Cs-(Cds-Cds)CsHH) + group(Cs-(Cds-Cds)CsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + group(Cds-CdsCsCs) + group(Cds-CdsCsCs) + ring(Cyclobutene) + radical(Allyl_S) + radical(Allyl_S)"""),
)
species(
label = '[CH2][C]1C(=C)C(C)C1C(25423)',
structure = SMILES('[CH2]C1=C([CH2])C(C)C1C'),
E0 = (305.852,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (108.181,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.377097,0.0563026,3.9705e-05,-9.53284e-08,4.14811e-11,36937,26.2973], Tmin=(100,'K'), Tmax=(959.735,'K')), NASAPolynomial(coeffs=[20.4056,0.0304853,-1.006e-05,1.83774e-09,-1.35603e-13,30437.2,-83.3398], Tmin=(959.735,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(305.852,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(465.61,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)CsCsH) + group(Cs-(Cds-Cds)CsCsH) + group(Cs-CsHHH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsCs) + group(Cds-CdsCsCs) + ring(Cyclobutene) + radical(Allyl_P) + radical(Allyl_P)"""),
)
species(
label = 'C=CC(=C)C(C)=CC(24616)',
structure = SMILES('C=CC(=C)C(C)=CC'),
E0 = (91.1774,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (108.181,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.236638,0.0713806,-3.04205e-05,-5.26762e-09,5.54498e-12,11111.2,26.9518], Tmin=(100,'K'), Tmax=(1093.32,'K')), NASAPolynomial(coeffs=[14.1536,0.040705,-1.6104e-05,2.93544e-09,-2.02595e-13,6858.32,-46.9636], Tmin=(1093.32,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(91.1774,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(465.61,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-Cds(Cds-Cds)Cs) + group(Cds-Cds(Cds-Cds)(Cds-Cds)) + group(Cds-CdsCsH) + group(Cds-Cds(Cds-Cds)H) + group(Cds-CdsHH) + group(Cds-CdsHH)"""),
)
species(
label = 'C=[C]C(C)C(=C)[CH]C(24183)',
structure = SMILES('[CH2]C(=CC)C(C)[C]=C'),
E0 = (369.44,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([1685,370,3010,987.5,1337.5,450,1655,2750,2770,2790,2810,2830,2850,1350,1400,1450,1500,700,800,1000,1100,1350,1400,900,1100,2950,3100,1380,975,1025,1650,1380,1390,370,380,2900,435,350,440,435,1725,3000,3100,440,815,1455,1000,345.333,347.343],'cm^-1')),
HinderedRotor(inertia=(0.119405,'amu*angstrom^2'), symmetry=1, barrier=(9.93037,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.281457,'amu*angstrom^2'), symmetry=1, barrier=(24.022,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.116909,'amu*angstrom^2'), symmetry=1, barrier=(9.94809,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.117447,'amu*angstrom^2'), symmetry=1, barrier=(9.9744,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.116555,'amu*angstrom^2'), symmetry=1, barrier=(9.93684,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (108.181,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(3625.33,'J/mol'), sigma=(6.4092,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0, comment="""Epsilon & sigma estimated with Tc=566.27 K, Pc=31.24 bar (from Joback method)"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.299693,0.0839308,-6.74533e-05,3.06742e-08,-6.02582e-12,44564.4,29.0122], Tmin=(100,'K'), Tmax=(1163.73,'K')), NASAPolynomial(coeffs=[10.857,0.0476425,-2.06788e-05,3.8782e-09,-2.69295e-13,42107.3,-23.5217], Tmin=(1163.73,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(369.44,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(461.453,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)(Cds-Cds)CsH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsCs) + group(Cds-CdsCsH) + group(Cds-CdsCsH) + group(Cds-CdsHH) + radical(Allyl_P) + radical(Cds_S)"""),
)
species(
label = 'C=C1C(=CC)CC1C(25265)',
structure = SMILES('C=C1C(=CC)CC1C'),
E0 = (118.381,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (108.181,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.689924,0.0550304,2.3689e-05,-6.56265e-08,2.77602e-11,14372.8,24.9628], Tmin=(100,'K'), Tmax=(993.204,'K')), NASAPolynomial(coeffs=[15.3775,0.0380508,-1.43595e-05,2.66472e-09,-1.90565e-13,9375.16,-56.2678], Tmin=(993.204,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(118.381,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(473.925,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)CsCsH) + group(Cs-(Cds-Cds)CsHH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-Cds(Cds-Cds)Cs) + group(Cds-Cds(Cds-Cds)Cs) + group(Cds-CdsCsH) + group(Cds-CdsHH) + ring(12methylenecyclobutane)"""),
)
species(
label = 'CHCH3(T)(95)',
structure = SMILES('[CH]C'),
E0 = (343.893,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2800,2850,1350,1500,750,1050,1375,1000,592.414,4000],'cm^-1')),
HinderedRotor(inertia=(0.00438699,'amu*angstrom^2'), symmetry=1, barrier=(26.7685,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (28.0532,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[3.82363,-0.000909515,3.2138e-05,-3.7348e-08,1.3309e-11,41371.4,7.10948], Tmin=(100,'K'), Tmax=(960.812,'K')), NASAPolynomial(coeffs=[4.30487,0.00943069,-3.27559e-06,5.95121e-10,-4.27307e-14,40709.1,1.84202], Tmin=(960.812,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(343.893,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(128.874,'J/(mol*K)'), label="""CHCH3(T)""", comment="""Thermo library: DFT_QCI_thermo"""),
)
species(
label = '[CH2]C([C]=C)=CC(24774)',
structure = SMILES('[CH2]C([C]=C)=CC'),
E0 = (370.8,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([1685,370,2750,2800,2850,1350,1500,750,1050,1375,1000,3010,987.5,1337.5,450,1655,2950,3100,1380,975,1025,1650,350,440,435,1725,3000,3100,440,815,1455,1000,180],'cm^-1')),
HinderedRotor(inertia=(1.17315,'amu*angstrom^2'), symmetry=1, barrier=(26.9731,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(1.17496,'amu*angstrom^2'), symmetry=1, barrier=(27.0146,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(1.1727,'amu*angstrom^2'), symmetry=1, barrier=(26.9626,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (80.1277,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.0818,0.0569416,-3.56598e-05,4.1841e-09,3.20998e-12,44708.4,20.7527], Tmin=(100,'K'), Tmax=(982.69,'K')), NASAPolynomial(coeffs=[12.9204,0.0239405,-8.46845e-06,1.46434e-09,-9.91425e-14,41648.3,-39.886], Tmin=(982.69,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(370.8,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(320.107,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-Cds(Cds-Cds)Cs) + group(Cds-CdsCsH) + group(Cds-Cds(Cds-Cds)H) + group(Cds-CdsHH) + radical(C=CJC=C) + radical(Allyl_P)"""),
)
species(
label = '[CH]=C([CH]C)C(=C)CC(25424)',
structure = SMILES('[CH]C(=CC)C(=C)CC'),
E0 = (330.753,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2850,1437.5,1250,1305,750,350,2950,3100,1380,975,1025,1650,3010,987.5,1337.5,450,1655,2750,2770,2790,2810,2830,2850,1350,1400,1450,1500,700,800,1000,1100,1350,1400,900,1100,325,375,415,465,420,450,1700,1750,200,800,1066.67,1333.33,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (108.181,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.442166,0.0858934,-5.1432e-05,9.5936e-09,1.54315e-12,39950.3,30.9724], Tmin=(100,'K'), Tmax=(1106.5,'K')), NASAPolynomial(coeffs=[16.3579,0.0427111,-1.66841e-05,2.99222e-09,-2.04007e-13,35158.1,-56.633], Tmin=(1106.5,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(330.753,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(461.453,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)CsHH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-Cds(Cds-Cds)Cs) + group(Cds-Cds(Cds-Cds)Cs) + group(Cds-CdsCsH) + group(Cds-CdsHH) + radical(AllylJ2_triplet)"""),
)
species(
label = 'C=CC(=C)C(=C)CC(24630)',
structure = SMILES('C=CC(=C)C(=C)CC'),
E0 = (104.558,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (108.181,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.296747,0.0670054,-1.0269e-05,-3.13536e-08,1.59568e-11,12721.3,27.8384], Tmin=(100,'K'), Tmax=(1010.3,'K')), NASAPolynomial(coeffs=[15.6889,0.0379462,-1.44599e-05,2.64736e-09,-1.86033e-13,7984.11,-54.6302], Tmin=(1010.3,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(104.558,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(465.61,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)CsHH) + group(Cs-CsHHH) + group(Cds-Cds(Cds-Cds)Cs) + group(Cds-Cds(Cds-Cds)(Cds-Cds)) + group(Cds-Cds(Cds-Cds)H) + group(Cds-CdsHH) + group(Cds-CdsHH) + group(Cds-CdsHH)"""),
)
species(
label = 'C=C1C(=C)C(C)C1C(25274)',
structure = SMILES('C=C1C(=C)C(C)C1C'),
E0 = (122.654,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (108.181,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.691732,0.0515838,4.13669e-05,-8.96066e-08,3.77135e-11,14890,23.0693], Tmin=(100,'K'), Tmax=(969.873,'K')), NASAPolynomial(coeffs=[17.4573,0.0342784,-1.20439e-05,2.21718e-09,-1.61071e-13,9199.74,-69.8715], Tmin=(969.873,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(122.654,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(473.925,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)CsCsH) + group(Cs-(Cds-Cds)CsCsH) + group(Cs-CsHHH) + group(Cs-CsHHH) + group(Cds-Cds(Cds-Cds)Cs) + group(Cds-Cds(Cds-Cds)Cs) + group(Cds-CdsHH) + group(Cds-CdsHH) + ring(12methylenecyclobutane)"""),
)
species(
label = 'N2',
structure = SMILES('N#N'),
E0 = (-8.69489,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (28.0135,'amu'),
collisionModel = TransportData(shapeIndex=1, epsilon=(810.913,'J/mol'), sigma=(3.621,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(1.76,'angstroms^3'), rotrelaxcollnum=4.0, comment="""PrimaryTransportLibrary"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[3.61263,-0.00100893,2.49898e-06,-1.43376e-09,2.58636e-13,-1051.1,2.6527], Tmin=(100,'K'), Tmax=(1817.04,'K')), NASAPolynomial(coeffs=[2.9759,0.00164141,-7.19722e-07,1.25378e-10,-7.91526e-15,-1025.84,5.53757], Tmin=(1817.04,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-8.69489,'kJ/mol'), Cp0=(29.1007,'J/(mol*K)'), CpInf=(37.4151,'J/(mol*K)'), label="""N2""", comment="""Thermo library: BurkeH2O2"""),
)
species(
label = 'Ne',
structure = SMILES('[Ne]'),
E0 = (-6.19738,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (20.1797,'amu'),
collisionModel = TransportData(shapeIndex=0, epsilon=(1235.53,'J/mol'), sigma=(3.758e-10,'m'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0, comment="""Epsilon & sigma estimated with fixed Lennard Jones Parameters. This is the fallback method! Try improving transport databases!"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.5,0,0,0,0,-745.375,3.35532], Tmin=(200,'K'), Tmax=(1000,'K')), NASAPolynomial(coeffs=[2.5,0,0,0,0,-745.375,3.35532], Tmin=(1000,'K'), Tmax=(6000,'K'))], Tmin=(200,'K'), Tmax=(6000,'K'), E0=(-6.19738,'kJ/mol'), Cp0=(20.7862,'J/(mol*K)'), CpInf=(20.7862,'J/(mol*K)'), label="""Ne""", comment="""Thermo library: primaryThermoLibrary"""),
)
transitionState(
label = 'TS1',
E0 = (291.23,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS2',
E0 = (462.221,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS3',
E0 = (538.699,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS4',
E0 = (497.951,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS5',
E0 = (380.338,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS6',
E0 = (399.474,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS7',
E0 = (350.103,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS8',
E0 = (722.113,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS9',
E0 = (343.259,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS10',
E0 = (380.132,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS11',
E0 = (705.575,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS12',
E0 = (537.022,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS13',
E0 = (257.971,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS14',
E0 = (716.337,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS15',
E0 = (466.494,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS16',
E0 = (454.469,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS17',
E0 = (430.619,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS18',
E0 = (503.849,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS19',
E0 = (393.718,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS20',
E0 = (361.682,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS21',
E0 = (350.103,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS22',
E0 = (380.132,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS23',
E0 = (375.044,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS24',
E0 = (274.66,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS25',
E0 = (463.915,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS26',
E0 = (257.971,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS27',
E0 = (714.692,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS28',
E0 = (375.062,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS29',
E0 = (258.055,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS30',
E0 = (257.971,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
reaction(
label = 'reaction1',
reactants = ['C=C([CH]C)C(=C)[CH]C(24182)'],
products = ['CH3CHCCH2(18175)', 'CH3CHCCH2(18175)'],
transitionState = 'TS1',
kinetics = Arrhenius(A=(5e+12,'s^-1'), n=0, Ea=(41.5431,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(1500,'K'), comment="""Exact match found for rate rule [RJJ]
Euclidian distance = 0
family: 1,4_Linear_birad_scission
Ea raised from 0.0 to 41.5 kJ/mol to match endothermicity of reaction."""),
)
reaction(
label = 'reaction2',
reactants = ['C=C([CH]C)C(=C)[CH]C(24182)'],
products = ['[CH2]C1([CH]C)CC1=CC(25275)'],
transitionState = 'TS2',
kinetics = Arrhenius(A=(3.36e+09,'s^-1'), n=0.84, Ea=(212.534,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(2500,'K'), comment="""Estimated using template [R4_S_D;doublebond_intra_HNd;radadd_intra_cs2H] for rate rule [R4_S_(Cd)_D;doublebond_intra_HNd;radadd_intra_cs2H]
Euclidian distance = 2.0
Multiplied by reaction path degeneracy 2.0
family: Intra_R_Add_Exocyclic
Ea raised from 210.2 to 212.5 kJ/mol to match endothermicity of reaction."""),
)
reaction(
label = 'reaction3',
reactants = ['CH3CHCCH2(18175)', 'C=[C][CH]C(18176)'],
products = ['C=C([CH]C)C(=C)[CH]C(24182)'],
transitionState = 'TS3',
kinetics = Arrhenius(A=(0.00086947,'m^3/(mol*s)'), n=2.67356, Ea=(32.0272,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [Ca_Cds-HH;CJ]
Euclidian distance = 0
family: R_Addition_MultipleBond"""),
)
reaction(
label = 'reaction4',
reactants = ['[CH2]C(=CC)C(C)=[C]C(25412)'],
products = ['C=C([CH]C)C(=C)[CH]C(24182)'],
transitionState = 'TS4',
kinetics = Arrhenius(A=(7.74e+09,'s^-1'), n=1.08, Ea=(161.921,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(1500,'K'), comment="""From training reaction 198 used for R3H_DS;Cd_rad_out_Cs;Cs_H_out_2H
Exact match found for rate rule [R3H_DS;Cd_rad_out_Cs;Cs_H_out_2H]
Euclidian distance = 0
Multiplied by reaction path degeneracy 3.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction5',
reactants = ['[CH2]C(=[C]C)C(C)=CC(25413)'],
products = ['C=C([CH]C)C(=C)[CH]C(24182)'],
transitionState = 'TS5',
kinetics = Arrhenius(A=(111300,'s^-1'), n=2.23, Ea=(44.3086,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R4H_DSS;Cd_rad_out_single;Cs_H_out] for rate rule [R4H_DSS;Cd_rad_out_Cs;Cs_H_out_2H]
Euclidian distance = 2.2360679775
Multiplied by reaction path degeneracy 3.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction6',
reactants = ['C=C([CH]C)C(=C)[CH]C(24182)'],
products = ['[CH2]C(=CC)[C](C)C=C(24605)'],
transitionState = 'TS6',
kinetics = Arrhenius(A=(1.6e+06,'s^-1'), n=1.81, Ea=(149.787,'kJ/mol'), T0=(1,'K'), comment="""From training reaction 101 used for R4H_SDS;C_rad_out_2H;Cs_H_out_2H
Exact match found for rate rule [R4H_SDS;C_rad_out_2H;Cs_H_out_2H]
Euclidian distance = 0
Multiplied by reaction path degeneracy 6.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction7',
reactants = ['C=C([CH]C)C(=C)[CH]C(24182)'],
products = ['[CH2][C](C=C)C(C)=CC(24606)'],
transitionState = 'TS7',
| 1 |
63093190ee20e10698bd99dcea94ccf5d076a006
|
Python
|
kinetics = Arrhenius(A=(6.66e+06,'s^-1'), n=1.64, Ea=(100.416,'kJ/mol'), T0=(1,'K'), comment="""From training reaction 96 used for R5H_SS(D)MS;C_rad_out_2H;Cs_H_out_2H
Exact match found for rate rule [R5H_SS(D)MS;C_rad_out_2H;Cs_H_out_2H]
Euclidian distance = 0
Multiplied by reaction path degeneracy 6.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction8',
reactants = ['C=[C][CH]C(18176)', 'C=[C][CH]C(18176)'],
products = ['C=C([CH]C)C(=C)[CH]C(24182)'],
transitionState = 'TS8',
kinetics = Arrhenius(A=(3.73038e+06,'m^3/(mol*s)'), n=0.027223, Ea=(0,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [Y_rad;Y_rad]
Euclidian distance = 0
family: R_Recombination
Ea raised from -14.4 to 0 kJ/mol."""),
)
reaction(
label = 'reaction9',
reactants = ['C=C([CH]C)C(=C)[CH]C(24182)'],
products = ['[CH2]C(=CC)[C]1CC1C(25414)'],
transitionState = 'TS9',
kinetics = Arrhenius(A=(7.36786e+12,'s^-1'), n=-0.105173, Ea=(93.5715,'kJ/mol'), T0=(1,'K'), Tmin=(303.03,'K'), Tmax=(2000,'K'), comment="""Estimated using template [R3_D;doublebond_intra;radadd_intra_cs2H] for rate rule [R3_D;doublebond_intra_secDe_HNd;radadd_intra_cs2H]
Euclidian distance = 2.0
Multiplied by reaction path degeneracy 2.0
family: Intra_R_Add_Endocyclic"""),
)
reaction(
label = 'reaction10',
reactants = ['C=C([CH]C)C(=C)[CH]C(24182)'],
products = ['[CH2][C]1C(=CC)CC1C(25415)'],
transitionState = 'TS10',
kinetics = Arrhenius(A=(6.43734e+08,'s^-1'), n=0.926191, Ea=(130.445,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [R4_S_D;doublebond_intra;radadd_intra_cs2H]
Euclidian distance = 0
Multiplied by reaction path degeneracy 2.0
family: Intra_R_Add_Endocyclic"""),
)
reaction(
label = 'reaction11',
reactants = ['CH2(S)(23)', '[CH2]C(=C)C([CH2])=CC(25416)'],
products = ['C=C([CH]C)C(=C)[CH]C(24182)'],
transitionState = 'TS11',
kinetics = Arrhenius(A=(7.94e+13,'cm^3/(mol*s)','*|/',0.25), n=-0.324, Ea=(0,'kJ/mol'), T0=(1,'K'), comment="""From training reaction 4 used for carbene;Cd_pri
Exact match found for rate rule [carbene;Cd_pri]
Euclidian distance = 0
Multiplied by reaction path degeneracy 4.0
family: 1,2_Insertion_carbene
Ea raised from -3.9 to 0 kJ/mol."""),
)
reaction(
label = 'reaction23',
reactants = ['C=C([CH]C)C[C]=CC(24184)'],
products = ['C=C([CH]C)C(=C)[CH]C(24182)'],
transitionState = 'TS12',
kinetics = Arrhenius(A=(1.74842e+09,'s^-1'), n=1.084, Ea=(170.038,'kJ/mol'), T0=(1,'K'), comment="""Estimated using average of templates [cCsCJ;CdsJ;C] + [cCs(-HH)CJ;CJ;C] for rate rule [cCs(-HH)CJ;CdsJ;C]
Euclidian distance = 1.0
family: 1,2_shiftC"""),
)
reaction(
label = 'reaction13',
reactants = ['C=C([CH]C)C(=C)[CH]C(24182)'],
products = ['CC=C1CCC1=CC(25269)'],
transitionState = 'TS13',
kinetics = Arrhenius(A=(1.62e+12,'s^-1'), n=-0.305, Ea=(8.28432,'kJ/mol'), T0=(1,'K'), Tmin=(600,'K'), Tmax=(2000,'K'), comment="""From training reaction 2 used for R4_SSS;C_rad_out_2H;Cpri_rad_out_2H
Exact match found for rate rule [R4_SSS;C_rad_out_2H;Cpri_rad_out_2H]
Euclidian distance = 0
family: Birad_recombination"""),
)
reaction(
label = 'reaction14',
reactants = ['CH2(19)', '[CH2]C([C]=CC)=CC(25417)'],
products = ['C=C([CH]C)C(=C)[CH]C(24182)'],
transitionState = 'TS14',
kinetics = Arrhenius(A=(1.06732e+06,'m^3/(mol*s)'), n=0.472793, Ea=(0,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [Y_rad;Birad] for rate rule [Cd_rad/OneDe;Birad]
Euclidian distance = 3.0
family: Birad_R_Recombination
Ea raised from -3.5 to 0 kJ/mol."""),
)
reaction(
label = 'reaction15',
reactants = ['C=C([CH]C)C(=C)[CH]C(24182)'],
products = ['[CH2]C1([CH]C)C(=C)C1C(25296)'],
transitionState = 'TS15',
kinetics = Arrhenius(A=(6.72658e+10,'s^-1'), n=0.535608, Ea=(216.807,'kJ/mol'), T0=(1,'K'), comment="""Estimated using average of templates [R4_S_D;doublebond_intra;radadd_intra_csHNd] + [R4_S_D;doublebond_intra_HNd;radadd_intra_cs] for rate rule [R4_S_(Cd)_D;doublebond_intra_HNd;radadd_intra_csHNd]
Euclidian distance = 2.2360679775
Multiplied by reaction path degeneracy 2.0
family: Intra_R_Add_Exocyclic
Ea raised from 214.2 to 216.8 kJ/mol to match endothermicity of reaction."""),
)
reaction(
label = 'reaction16',
reactants = ['H(3)', '[CH2]C(=CC)C(=C)C=C(24604)'],
products = ['C=C([CH]C)C(=C)[CH]C(24182)'],
transitionState = 'TS16',
kinetics = Arrhenius(A=(2.31e+08,'cm^3/(mol*s)'), n=1.64, Ea=(0,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(1500,'K'), comment="""From training reaction 2544 used for Cds-HH_Cds-CdH;HJ
Exact match found for rate rule [Cds-HH_Cds-CdH;HJ]
Euclidian distance = 0
family: R_Addition_MultipleBond
Ea raised from -2.0 to 0 kJ/mol."""),
)
reaction(
label = 'reaction17',
reactants = ['[CH2]CC(=C)C([CH2])=CC(25418)'],
products = ['C=C([CH]C)C(=C)[CH]C(24182)'],
transitionState = 'TS17',
kinetics = Arrhenius(A=(1.72e+06,'s^-1'), n=1.99, Ea=(113.805,'kJ/mol'), T0=(1,'K'), comment="""From training reaction 84 used for R2H_S;C_rad_out_2H;Cs_H_out_H/Cd
Exact match found for rate rule [R2H_S;C_rad_out_2H;Cs_H_out_H/Cd]
Euclidian distance = 0
Multiplied by reaction path degeneracy 2.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction18',
reactants = ['[CH]=C(CC)C([CH2])=CC(25419)'],
products = ['C=C([CH]C)C(=C)[CH]C(24182)'],
transitionState = 'TS18',
kinetics = Arrhenius(A=(1.846e+10,'s^-1'), n=0.74, Ea=(145.185,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(1500,'K'), comment="""From training reaction 194 used for R3H_DS;Cd_rad_out_singleH;Cs_H_out_H/NonDeC
Exact match found for rate rule [R3H_DS;Cd_rad_out_singleH;Cs_H_out_H/NonDeC]
Euclidian distance = 0
Multiplied by reaction path degeneracy 2.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction19',
reactants = ['[CH2]C(=[C]C)C(=C)CC(25420)'],
products = ['C=C([CH]C)C(=C)[CH]C(24182)'],
transitionState = 'TS19',
kinetics = Arrhenius(A=(74200,'s^-1'), n=2.23, Ea=(44.3086,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R4H_DSS;Cd_rad_out_single;Cs_H_out_1H] for rate rule [R4H_DSS;Cd_rad_out_Cs;Cs_H_out_H/NonDeC]
Euclidian distance = 2.2360679775
Multiplied by reaction path degeneracy 2.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction20',
reactants = ['[CH]=C([CH]C)C(C)=CC(25421)'],
products = ['C=C([CH]C)C(=C)[CH]C(24182)'],
transitionState = 'TS20',
kinetics = Arrhenius(A=(111300,'s^-1'), n=2.23, Ea=(44.3086,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R4H_DSS;Cd_rad_out_singleH;Cs_H_out] for rate rule [R4H_DSS;Cd_rad_out_singleH;Cs_H_out_2H]
Euclidian distance = 1.0
Multiplied by reaction path degeneracy 3.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction21',
reactants = ['C=C([CH]C)C(=C)[CH]C(24182)'],
products = ['[CH2][C](C=C)C(=C)CC(24623)'],
transitionState = 'TS21',
kinetics = Arrhenius(A=(6.66e+06,'s^-1'), n=1.64, Ea=(100.416,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R5H_SS(D)MS;C_rad_out_single;Cs_H_out_2H] for rate rule [R5H_SS(D)MS;C_rad_out_H/NonDeC;Cs_H_out_2H]
Euclidian distance = 2.0
Multiplied by reaction path degeneracy 6.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction22',
reactants = ['C=C([CH]C)C(=C)[CH]C(24182)'],
products = ['C[CH][C]1CCC1=CC(25422)'],
transitionState = 'TS22',
kinetics = Arrhenius(A=(3.21867e+08,'s^-1'), n=0.926191, Ea=(130.445,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [R4_S_D;doublebond_intra;radadd_intra_cs2H]
Euclidian distance = 0
family: Intra_R_Add_Endocyclic"""),
)
reaction(
label = 'reaction23',
reactants = ['C=C([CH]C)C(=C)[CH]C(24182)'],
products = ['[CH2][C]1C(=C)C(C)C1C(25423)'],
transitionState = 'TS23',
kinetics = Arrhenius(A=(5.16207e+08,'s^-1'), n=0.911389, Ea=(125.357,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [R4_S_D;doublebond_intra;radadd_intra_csHCs]
Euclidian distance = 0
family: Intra_R_Add_Endocyclic"""),
)
reaction(
label = 'reaction24',
reactants = ['C=C([CH]C)C(=C)[CH]C(24182)'],
products = ['C=CC(=C)C(C)=CC(24616)'],
transitionState = 'TS24',
kinetics = Arrhenius(A=(1.27566e+10,'s^-1'), n=0.137, Ea=(24.9733,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R5;Y_rad;XH_Rrad] for rate rule [R5radEndo;Y_rad;XH_Rrad]
Euclidian distance = 1.0
Multiplied by reaction path degeneracy 6.0
family: Intra_Disproportionation"""),
)
reaction(
label = 'reaction24',
reactants = ['C=[C]C(C)C(=C)[CH]C(24183)'],
products = ['C=C([CH]C)C(=C)[CH]C(24182)'],
transitionState = 'TS25',
kinetics = Arrhenius(A=(8.66e+11,'s^-1'), n=0.438, Ea=(94.4747,'kJ/mol'), T0=(1,'K'), comment="""From training reaction 5 used for cCs(-HC)CJ;CdsJ;C
Exact match found for rate rule [cCs(-HC)CJ;CdsJ;C]
Euclidian distance = 0
family: 1,2_shiftC"""),
)
reaction(
label = 'reaction26',
reactants = ['C=C([CH]C)C(=C)[CH]C(24182)'],
products = ['C=C1C(=CC)CC1C(25265)'],
transitionState = 'TS26',
kinetics = Arrhenius(A=(3.24e+12,'s^-1'), n=-0.305, Ea=(8.28432,'kJ/mol'), T0=(1,'K'), Tmin=(600,'K'), Tmax=(2000,'K'), comment="""Estimated using template [R4_SSS;C_rad_out_2H;Cpri_rad_out_single] for rate rule [R4_SSS;C_rad_out_2H;Cpri_rad_out_H/NonDeC]
Euclidian distance = 2.0
Multiplied by reaction path degeneracy 2.0
family: Birad_recombination"""),
)
reaction(
label = 'reaction27',
reactants = ['CHCH3(T)(95)', '[CH2]C([C]=C)=CC(24774)'],
products = ['C=C([CH]C)C(=C)[CH]C(24182)'],
transitionState = 'TS27',
kinetics = Arrhenius(A=(1.06732e+06,'m^3/(mol*s)'), n=0.472793, Ea=(0,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [Y_rad;Birad] for rate rule [Cd_rad/OneDe;Birad]
Euclidian distance = 3.0
family: Birad_R_Recombination
Ea raised from -3.5 to 0 kJ/mol."""),
)
reaction(
label = 'reaction28',
reactants = ['[CH]=C([CH]C)C(=C)CC(25424)'],
products = ['C=C([CH]C)C(=C)[CH]C(24182)'],
transitionState = 'TS28',
kinetics = Arrhenius(A=(74200,'s^-1'), n=2.23, Ea=(44.3086,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R4H_DSS;Cd_rad_out_singleH;Cs_H_out_1H] for rate rule [R4H_DSS;Cd_rad_out_singleH;Cs_H_out_H/NonDeC]
Euclidian distance = 1.0
Multiplied by reaction path degeneracy 2.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction29',
reactants = ['C=C([CH]C)C(=C)[CH]C(24182)'],
products = ['C=CC(=C)C(=C)CC(24630)'],
transitionState = 'TS29',
kinetics = Arrhenius(A=(1.926e+10,'s^-1'), n=0.137, Ea=(8.368,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(1500,'K'), comment="""Estimated using template [R5;Y_rad_NDe;XH_Rrad] for rate rule [R5radEndo;Y_rad_NDe;XH_Rrad]
Euclidian distance = 1.0
Multiplied by reaction path degeneracy 6.0
family: Intra_Disproportionation"""),
)
reaction(
label = 'reaction30',
reactants = ['C=C([CH]C)C(=C)[CH]C(24182)'],
products = ['C=C1C(=C)C(C)C1C(25274)'],
transitionState = 'TS30',
kinetics = Arrhenius(A=(1.62e+12,'s^-1'), n=-0.305, Ea=(8.28432,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R4_SSS;C_rad_out_single;Cpri_rad_out_single] for rate rule [R4_SSS;C_rad_out_H/NonDeC;Cpri_rad_out_H/NonDeC]
Euclidian distance = 2.82842712475
family: Birad_recombination"""),
)
network(
label = '4267',
isomers = [
'C=C([CH]C)C(=C)[CH]C(24182)',
],
reactants = [
('CH3CHCCH2(18175)', 'CH3CHCCH2(18175)'),
],
bathGas = {
'N2': 0.5,
'Ne': 0.5,
},
)
pressureDependence(
label = '4267',
Tmin = (300,'K'),
Tmax = (2000,'K'),
Tcount = 8,
Tlist = ([302.47,323.145,369.86,455.987,609.649,885.262,1353.64,1896.74],'K'),
Pmin = (0.01,'bar'),
Pmax = (100,'bar'),
Pcount = 5,
Plist = ([0.0125282,0.0667467,1,14.982,79.8202],'bar'),
maximumGrainSize = (0.5,'kcal/mol'),
minimumGrainCount = 250,
method = 'modified strong collision',
interpolationModel = ('Chebyshev', 6, 4),
activeKRotor = True,
activeJRotor = True,
rmgmode = True,
)
| 2 |
3c01ca27a5eef877b606b93b04ffe6f73168cd6b
|
Python
|
#Embedded file name: c:/depot/games/branches/release/EVE-TRANQUILITY/eve/client/script/paperDoll/SkinRaytracing.py
import trinity
import blue
import telemetry
import ctypes
import math
import time
import geo2
import struct
import itertools
import weakref
import uthread
import paperDoll as PD
import log
import random
mylog = log.Channel('optix', 'python')
def LogInfo(text, *args):
for arg in args:
text += ' ' + str(arg)
mylog.Log(text, log.LGINFO)
def LogWarn(text, *args):
for arg in args:
text = text + ' ' + str(arg)
mylog.Log(text, log.LGWARN)
class SkinRaytracingTools():
__guid__ = 'paperDoll.SkinRaytracingTools'
@staticmethod
def SetOptixMatrixFromTrinity(optix, matrixName, ratio = None):
proj = trinity.TriProjection()
view = trinity.TriView()
view.transform = trinity.GetViewTransform()
proj.PerspectiveFov(trinity.GetFieldOfView(), trinity.GetAspectRatio() if ratio is None else ratio, trinity.GetFrontClip(), trinity.GetBackClip())
projToView = geo2.MatrixInverse(proj.transform)
viewToWorld = geo2.MatrixInverse(view.transform)
projToWorld = geo2.MatrixMultiply(projToView, viewToWorld)
r0 = projToWorld[0]
r1 = projToWorld[1]
r2 = projToWorld[2]
r3 = projToWorld[3]
mat = trinity.TriMatrix(r0[0], r0[1], r0[2], r0[3], r1[0], r1[1], r1[2], r1[3], r2[0], r2[1], r2[2], r2[3], r3[0], r3[1], r3[2], r3[3])
optix.SetMatrix4x4(matrixName, mat)
r0 = view.transform[0]
r1 = view.transform[1]
r2 = view.transform[2]
r3 = view.transform[3]
mat = trinity.TriMatrix(r0[0], r0[1], r0[2], r0[3], r1[0], r1[1], r1[2], r1[3], r2[0], r2[1], r2[2], r2[3], r3[0], r3[1], r3[2], r3[3])
optix.SetMatrix4x4('viewTransform', mat)
return mat
@staticmethod
def CreateSamplerForTexture(name, map, waitForFinish):
rt = trinity.Tr2RenderTarget(map.width, map.height, 1, trinity.PIXEL_FORMAT.B8G8R8A8_UNORM)
job = trinity.CreateRenderJob()
job.PushRenderTarget(rt)
job.PushDepthStencil(None)
job.SetStdRndStates(trinity.RM_FULLSCREEN)
job.RenderTexture(map)
job.PopDepthStencil()
job.PopRenderTarget()
job.ScheduleOnce()
if waitForFinish:
job.WaitForFinish()
sampler = trinity.Tr2OptixTextureSampler()
if True:
res = trinity.TriTextureRes()
res.CreateAndCopyFromRenderTarget(rt)
sampler.CreateFromTexture(res)
else:
sampler.CreateFromRenderTarget(rt)
sampler.SetNormalizedIndexingMode(True)
if True:
return (sampler, res)
else:
return (sampler, rt)
@staticmethod
def ConvertCubeToTextures(cube):
names = ['PX',
'NX',
'PY',
'NY',
'PZ',
'NZ']
viewVec = [(1, 0, 0),
(-1, 0, 0),
(0, 1, 0),
(0, -1, 0),
(0, 0, 1),
(0, 0, -1)]
upVec = [(0, 1, 0),
(0, 1, 0),
(0, 0, 1),
(0, 0, -1),
(0, 1, 0),
(0, 1, 0)]
spaceScene = trinity.EveSpaceScene()
spaceScene.envMap1ResPath = str(cube.resourcePath)
spaceScene.envMapScaling = (1, 1, -1)
spaceScene.backgroundRenderingEnabled = True
spaceScene.backgroundEffect = trinity.Load('res:/dx9/scene/starfield/bakeNebula.red')
blue.resMan.Wait()
node = PD.FindParameterByName(spaceScene.backgroundEffect, 'NebulaBrightness')
if node is None:
node = trinity.Tr2FloatParameter()
node.name = 'NebulaBrightness'
spaceScene.backgroundEffect.parameters.append(node)
if node is not None:
node.value = 100
node = PD.FindResourceByName(spaceScene.backgroundEffect, 'NebulaMap')
if node is None:
node = trinity.TriTexture2DParam()
node.name = 'NebulaMap'
spaceScene.backgroundEffect.resources.append(node)
node.SetResource(cube.resource)
blue.resMan.Wait()
mipmapped = []
useTexture = True
for i in xrange(len(names)):
name = names[i]
rt = PD.SkinLightmapRenderer.CreateRenderTarget(cube.resource.width, cube.resource.height, trinity.PIXEL_FORMAT.B8G8R8A8_UNORM, useRT=True)
job = trinity.CreateRenderJob(name=name)
job.PushRenderTarget(rt)
job.PushDepthStencil(None)
job.Clear([(1, 0, 0),
(0.2, 0, 0),
(0, 1, 0),
(0, 0.2, 0),
(0, 0, 1),
(0, 0, 0.2)][i], None)
proj = trinity.TriProjection()
proj.PerspectiveFov(math.pi * 0.5, 1, 0.1, 1000)
view = trinity.TriView()
view.SetLookAtPosition((0, 0, 0), viewVec[i], upVec[i])
viewport = trinity.TriViewport(0, 0, cube.resource.width, cube.resource.height, 0.0, 1.0)
job.SetView(view)
job.SetProjection(proj)
job.SetViewport(viewport)
job.Update(spaceScene)
job.RenderScene(spaceScene)
job.PopDepthStencil()
job.PopRenderTarget()
if useTexture:
tex = trinity.TriTextureRes(cube.resource.width, cube.resource.height, 1, trinity.PIXEL_FORMAT.B8G8R8A8_UNORM)
if True:
job.ScheduleOnce()
job.WaitForFinish()
if useTexture:
mipmapped.append(tex)
else:
mipmapped.append(rt)
else:
job.ScheduleRecurring()
return (mipmapped, names)
@staticmethod
def FindAllTextureResourcesFromEffect(effect, scope):
textures = {}
samplers = []
cubemaps = []
if effect is not None:
for r in effect.resources:
if type(r) == trinity.TriTexture2DParameter and r.resource is not None:
textures[r.name] = r.resource
elif type(r) == trinity.TriTextureCubeParameter and r.resource is not None:
if r.name in cubemaps:
continue
LogInfo('', r.name, ': Converting to individual textures')
cubemaps.append(r.name)
mipmaps, names = SkinRaytracingTools.ConvertCubeToTextures(r)
for i in range(len(names)):
if i < len(mipmaps):
sampler = trinity.Tr2OptixTextureSampler()
sampler.CreateFromTexture(mipmaps[i])
sampler.SetNormalizedIndexingMode(True)
scope.SetSampler(r.name + names[i], sampler)
LogInfo('No-Copy Cube Side Interop for ' + r.name + names[i])
samplers.append(mipmaps[i])
samplers.append(sampler)
return (textures, samplers)
@staticmethod
def FindAllTextureResources(dynamic, scope):
textures = {}
samplers = []
cubemaps = []
def ProcessMesh(mesh):
for area in itertools.chain(mesh.opaqueAreas, mesh.decalAreas, mesh.transparentAreas):
newTextures, newSamplers = SkinRaytracingTools.FindAllTextureResourcesFromEffect(area.effect, scope)
textures.update(newTextures)
samplers.extend(newSamplers)
if type(dynamic) == trinity.Tr2IntSkinnedObject:
for mesh in dynamic.visualModel.meshes:
ProcessMesh(mesh)
elif type(dynamic) == trinity.EveShip2:
ProcessMesh(dynamic.highDetailMesh.object)
elif type(dynamic) == trinity.EveStation2:
ProcessMesh(dynamic.highDetailMesh.object)
return (textures, samplers)
@staticmethod
def InteropTexture(name, texture, waitForFinish, scope):
if texture.format == trinity.PIXEL_FORMAT.B8G8R8A8_UNORM:
sampler = trinity.Tr2OptixTextureSampler()
sampler.CreateFromTexture(texture)
sampler.SetNormalizedIndexingMode(True)
scope.SetSampler(name, sampler)
LogInfo('No-Copy Interop for', name)
return (sampler, None)
if texture.type == trinity.TRIRTYPE_CUBETEXTURE:
LogInfo('Copy-Interop for cubes not supported, skipping', name)
return
sampler_rt = SkinRaytracingTools.CreateSamplerForTexture(name, texture, waitForFinish)
if sampler_rt is None or len(sampler_rt) < 1:
LogInfo('InteropTexture failed for', name)
else:
scope.SetSampler(name, sampler_rt[0])
LogInfo('Interop for', name)
return sampler_rt
@staticmethod
def InteropAllTexturesFromEffect(optix, effect, waitForFinish, nameTranslation = None, scope = None, cache = None):
if scope is None:
scope = optix
textures, samplers = SkinRaytracingTools.FindAllTextureResourcesFromEffect(effect, scope)
for name, texture in textures.iteritems():
if 'spotlight' in name.lower():
continue
if nameTranslation is not None:
name = nameTranslation.get(name, name)
if cache is not None and texture in cache:
sampler = cache[texture]
scope.SetSampler(name, sampler[0])
LogInfo('Interop cache for', name)
else:
sampler = SkinRaytracingTools.InteropTexture(name, texture, waitForFinish, scope)
if sampler and cache is not None:
cache[texture] = sampler
if sampler is not None:
samplers.append(sampler)
return samplers
@staticmethod
def InteropAllTextures(optix, dynamic, waitForFinish, nameTranslation = None, scope = None):
if scope is None:
scope = optix
textures, samplers = SkinRaytracingTools.FindAllTextureResources(dynamic, scope)
for name, texture in textures.iteritems():
if 'spotlight' in name.lower():
continue
if nameTranslation is not None:
name = nameTranslation.get(name, name)
sampler = SkinRaytracingTools.InteropTexture(name, texture, waitForFinish, scope)
if sampler is not None:
samplers.append(sampler)
return samplers
@staticmethod
def SafeLinearize(values):
peak = max(1, max(values[0], max(values[1], values[2])))
return (peak * math.pow(values[0] / peak, 2.2),
peak * math.pow(values[1] / peak, 2.2),
peak * math.pow(values[2] / peak, 2.2),
values[3])
@staticmethod
def CopyParametersToContext(effect, instance, linearNames = None):
for p in effect.parameters:
if type(p) is trinity.Tr2Vector4Parameter:
value = SkinRaytracingTools.SafeLinearize(p.value) if linearNames is not None and p.name in linearNames else p.value
instance.SetFloat4(p.name, value[0], value[1], value[2], value[3])
elif type(p) is trinity.TriFloatParameter or type(p) is trinity.Tr2FloatParameter:
instance.SetFloat4(p.name, p.value, 0, 0, 0)
@staticmethod
def CreateBufferForLights(lights, leaveEmpty = False, preserveAlpha = False):
bufEveLights = trinity.Tr2OptixBuffer()
bufEveLights.CreateUserData(64, len(lights), trinity.OPTIX_BUFFER_OUTPUT, False)
bufEveLights.MapUser()
buffer = ''
if leaveEmpty:
lights = []
for light in lights:
innerAngle = light.coneAlphaInner
outerAngle = light.coneAlphaOuter
if innerAngle + 1.0 > outerAngle:
innerAngle = outerAngle - 1.0
innerAngle = math.cos(innerAngle * 3.1415927 / 180.0)
outerAngle = math.cos(outerAngle * 3.1415927 / 180.0)
coneDir = geo2.Vec3Normalize((light.coneDirection[0], light.coneDirection[1], light.coneDirection[2]))
import struct
buffer += struct.pack('16f', light.position[0], light.position[1], light.position[2], light.radius, math.pow(light.color[0], 2.2), math.pow(light.color[1], 2.2), math.pow(light.color[2], 2.2), light.falloff if not preserveAlpha else light.color[3], coneDir[0], coneDir[1], coneDir[2], outerAngle, innerAngle, 0, 0, 0)
bufEveLights.SetUserDataFromStruct(buffer)
bufEveLights.UnmapUser()
return bufEveLights
@staticmethod
def CreateUInt1Buffer(optix, name):
buffer = trinity.Tr2OptixBuffer()
buffer.CreateUInt1(1, 1, trinity.OPTIX_BUFFER_INPUT_OUTPUT)
buffer.Map()
buffer.SetUserDataI(0, 0)
buffer.Unmap()
optix.SetBuffer(name, buffer)
return buffer
@staticmethod
def matEqual(m1, m2):
return m1._11 == m2._11 and m1._12 == m2._12 and m1._13 == m2._13 and m1._14 == m2._14 and m1._21 == m2._21 and m1._22 == m2._22 and m1._23 == m2._23 and m1._24 == m2._24 and m1._31 == m2._31 and m1._32 == m2._32 and m1._33 == m2._33 and m1._34 == m2._34 and m1._41 == m2._41 and m1._42 == m2._42 and m1._43 == m2._43 and m1._44 == m2._44
@staticmethod
def FuncWrapper(weakSelf, func):
if weakSelf():
func(weakSelf())
class OitHelper():
def __init__(self, optix):
self.oitAllocatorBuffer = SkinRaytracingTools.CreateUInt1Buffer(optix, 'oit_allocator')
oitPoolBuffer = trinity.Tr2OptixBuffer()
oitPoolBuffer.CreateUserData(64 + 112, 1048576, trinity.OPTIX_BUFFER_INPUT_OUTPUT, True)
optix.SetBuffer('oit_pool', oitPoolBuffer)
self.oitPoolBuffer = oitPoolBuffer
def ResetAllocationCount(self):
self.oitAllocatorBuffer.Map()
self.oitAllocatorBuffer.SetUserDataI(0, 0)
self.oitAllocatorBuffer.Unmap()
def GetAllocationCount(self):
self.oitAllocatorBuffer.Map()
count = self.oitAllocatorBuffer.GetUserDataI(0)
self.oitAllocatorBuffer.Unmap()
return count
class RayCountHelper():
def __init__(self, optix):
self.rayCountBuffer = SkinRaytracingTools.CreateUInt1Buffer(optix, 'ray_count')
def ResetCount(self):
self.rayCountBuffer.Map()
self.rayCountBuffer.SetUserDataI(0, 0)
self.rayCountBuffer.Unmap()
def GetCount(self):
self.rayCountBuffer.Map()
count = self.rayCountBuffer.GetUserDataI(0)
self.rayCountBuffer.Unmap()
return count
class CaptureHelper():
def __init__(self, width, height):
self.capture = trinity.Tr2RenderTarget(width, height, 1, trinity.PIXEL_FORMAT.B8G8R8A8_UNORM)
def SaveSurfaceToFile(self, filename):
trinity.SaveRenderTarget(filename, self.capture)
LogInfo('Saved to', filename)
def CreateRenderSteps(self, rj, blitfx):
rj.PushRenderTarget(self.capture).name = 'Begin screenshot capture'
rj.PushDepthStencil(None).name = ' push depth'
rj.RenderEffect(blitfx).name = ' Blit to screenshot'
rj.PopDepthStencil().name = ' pop depth'
rj.PopRenderTarget().name = 'End screenshot capture'
class FullScreenBlitter():
def __init__(self, width, height):
self.effect = trinity.Tr2Effect()
self.effect.effectFilePath = 'res:/graphics/effect/optix/shaders/gammaBlit.fx'
if self.effect.effectResource is None:
LogWarn('Failed to load effect 1')
return
self.highpassEffect = trinity.Tr2Effect()
self.highpassEffect.effectFilePath = 'res:/graphics/effect/optix/shaders/highpassFilter.fx'
if self.highpassEffect.effectResource is None:
LogWarn('Failed to load effect 1')
return
self.gaussianHorizEffect = trinity.Tr2Effect()
self.gaussianHorizEffect.effectFilePath = 'res:/graphics/effect/optix/shaders/gaussianBlur.fx'
if self.gaussianHorizEffect.effectResource is None:
LogWarn('Failed to load effect 3')
return
self.gaussianVertEffect = trinity.Tr2Effect()
self.gaussianVertEffect.effectFilePath = 'res:/graphics/effect/optix/shaders/gaussianBlur.fx'
if self.gaussianVertEffect.effectResource is None:
LogWarn('Failed to load effect 3')
return
for effect in [self.effect,
self.highpassEffect,
self.gaussianHorizEffect,
self.gaussianVertEffect]:
while effect.effectResource.isLoading:
PD.Yield()
self.blitcolor = trinity.Tr2Vector4Parameter()
self.blitcolor.name = 'Color'
for effect in [self.effect,
self.highpassEffect,
self.gaussianHorizEffect,
self.gaussianVertEffect]:
effect.PopulateParameters()
effect.RebuildCachedData()
effect.parameters.append(self.blitcolor)
sizesParam = trinity.Tr2Vector4Parameter()
sizesParam.name = 'InvSize'
sizesParam.value = (1.0 / width,
1.0 / height,
0,
0)
for effect in [self.effect, self.highpassEffect]:
effect.parameters.append(sizesParam)
sizesHorizParam = trinity.Tr2Vector4Parameter()
sizesHorizParam.name = 'invTexelSize'
sizesHorizParam.value = (1.0 / width,
0.0,
0,
0)
self.gaussianHorizEffect.parameters.append(sizesHorizParam)
sizesVertParam = trinity.Tr2Vector4Parameter()
sizesVertParam.name = 'invTexelSize'
sizesVertParam.value = (0.0,
1.0 / height,
0,
0)
self.gaussianVertEffect.parameters.append(sizesVertParam)
def SetTexture(self, optixOutputTexture, highpassTexture, filteredTexture):
tex = trinity.TriTexture2DParameter()
tex.name = 'Texture'
tex.SetResource(optixOutputTexture)
for effect in [self.effect, self.highpassEffect]:
effect.resources.append(tex)
tex = trinity.TriTexture2DParameter()
tex.name = 'Texture'
tex.SetResource(highpassTexture)
self.gaussianHorizEffect.resources.append(tex)
tex = trinity.TriTexture2DParameter()
tex.name = 'Texture'
tex.SetResource(filteredTexture)
self.gaussianVertEffect.resources.append(tex)
tex = trinity.TriTexture2DParameter()
tex.name = 'BloomTexture'
tex.SetResource(highpassTexture)
self.effect.resources.append(tex)
def UpdateFrameCount(self, framecount):
invFC = 1.0 / framecount if framecount > 0 else 1.0
self.blitcolor.value = (invFC,
invFC,
invFC,
invFC)
class FullOptixRenderer():
__guid__ = 'paperDoll.FullOptixRenderer'
instance = None
def AddCallback(self, func, name, rj):
cb = trinity.TriStepPythonCB()
weakSelf = weakref.ref(self)
cb.SetCallback(lambda : SkinRaytracingTools.FuncWrapper(weakSelf, func))
cb.name = name
rj.steps.append(cb)
def GetFrameCount(self):
return self.framecount
def SaveScreenshot(self, filename):
self.capture.SaveSurfaceToFile(filename)
def AddRenderPreviewStep(self, renderJob):
renderJob.SetStdRndStates(trinity.RM_FULLSCREEN).name = ' [optix] fullscreen quad'
renderJob.PushDepthStencil(None).name = ' [optix] push depth'
renderJob.RenderEffect(self.blitfx.effect).name = ' [optix] Blit to screenshot'
renderJob.PopDepthStencil().name = ' [optix] pop depth'
def RefreshMatrices(self):
model = self.skinnedObject
self.optix.RefreshMatrices(model, self.skinnedOptix)
self.RunSkinningAndTesselation()
self.ApplySettings()
print 'Refreshed'
@staticmethod
def RaytraceFrame(selfRef):
start = time.time()
VP = SkinRaytracingTools.SetOptixMatrixFromTrinity(selfRef.optix, 'clipToWorld', selfRef.width / float(selfRef.height))
if not SkinRaytracingTools.matEqual(VP, selfRef.previousVP):
selfRef.previousVP = VP
selfRef.outputBuffer.Clear()
selfRef.framecount = 0
model = selfRef.skinnedObject
pos1 = model.GetBonePosition(model.GetBoneIndex('fj_eyeballLeft'))
pos2 = model.GetBonePosition(model.GetBoneIndex('fj_eyeballRight'))
dist1 = geo2.Vec3Distance(pos1, trinity.GetViewPosition())
dist2 = geo2.Vec3Distance(pos2, trinity.GetViewPosition())
autodof = min(dist1, dist2)
dof = selfRef.settings.get('lens_focal_distance', autodof)
print 'Auto-depth-of-field is at', autodof, ', actual focal distance is', dof
selfRef.optix.SetFloat3('depthOfField', dof - trinity.GetFrontClip(), selfRef.settings['lens_radius'], 0)
else:
selfRef.framecount += 1
selfRef.optix.SetUInt('frameIteration', selfRef.framecount)
selfRef.oit.ResetAllocationCount()
selfRef.rayCounter.ResetCount()
time1 = time.time()
selfRef.optix.Run(0, selfRef.width, selfRef.height)
time2 = time.time()
sec = time2 - time1
raycount = selfRef.rayCounter.GetCount()
raysec = 0
if sec > 0:
raysec = raycount / float(sec)
time3 = time.time()
if selfRef.framecount % 32 == 0:
stop = time.time()
print selfRef.oit.GetAllocationCount(), 'oit allocations'
selfRef.blitfx.UpdateFrameCount(selfRef.framecount)
selfRef.outputBuffer.CopyToTexture(selfRef.outputTexture)
print 'time %05.3f / %05.3f / %05.3f / %05.3f msec' % (float(time1 - start) * 1000,
float(time2 - time1) * 1000,
float(time3 - time2) * 1000,
float(stop - time3) * 1000),
print '%d rays in %05.3f ms / %10d Krays/sec / %d rays per pixel' % (raycount,
sec * 1000,
raysec / 1000,
selfRef.framecount)
@telemetry.ZONE_METHOD
def OnBeforeOptixPositionsUV(self):
PD.SkinLightmapRenderer.DoChangeEffect('oxPosWorldUVEffect', self.oxMeshes)
if self.skinnedObject is not None and self.skinnedObject.visualModel is not None:
self.savedMeshes = self.skinnedObject.visualModel.meshes[:]
filteredMeshes = [ ref.object for ref in self.oxMeshes.iterkeys() if ref.object is not None ]
PD.SkinLightmapRenderer.DoCopyMeshesToVisual(self.skinnedObject, filteredMeshes)
self.scene.filterList.removeAt(-1)
self.scene.filterList.append(self.skinnedObject)
self.scene.useFilterList = True
@telemetry.ZONE_METHOD
def OnBeforeOptixNormalsUV(self):
PD.SkinLightmapRenderer.DoChangeEffect('oxNormalWorldUVEffect', self.oxMeshes)
def OnAfterOptix(self):
PD.SkinLightmapRenderer.DoRestoreShaders(meshes=self.oxMeshes)
PD.SkinLightmapRenderer.DoCopyMeshesToVisual(self.skinnedObject, self.savedMeshes)
del self.savedMeshes
self.scene.useFilterList = False
self.scene.filterList.removeAt(-1)
def _InitUVUnwrap(self):
self.oxMeshes = {}
self.scatterFX = set()
self.unwrapSize = 1024
posUV = PD.SkinLightmapRenderer.PreloadEffect(PD.SkinLightmapRenderer.OPTIX_POSWORLD_UV_EFFECT)
normalUV = PD.SkinLightmapRenderer.PreloadEffect(PD.SkinLightmapRenderer.OPTIX_NORMALWORLD_UV_EFFECT)
deriv = PD.SkinLightmapRenderer.PreloadEffect(PD.SkinLightmapRenderer.STRETCHMAP_RENDERER_EFFECT)
self.oxDepth = trinity.Tr2DepthStencil(self.unwrapSize, self.unwrapSize, trinity.DEPTH_STENCIL_FORMAT.D24S8, 1, 0)
for mesh in self.skinnedObject.visualModel.meshes:
if PD.SkinLightmapRenderer.IsScattering(mesh):
m = PD.SkinLightmapRenderer.Mesh()
m.ExtractOrigEffect(mesh)
m.CreateOptixEffects(includeStretchMap=True)
PD.AddWeakBlue(self, 'oxMeshes', mesh, m)
fx = PD.GetEffectsFromMesh(mesh)
for f in fx:
self.scatterFX.add(f)
self.oxWorldPosMapUV = PD.SkinLightmapRenderer.CreateRenderTarget(self.unwrapSize, self.unwrapSize, trinity.PIXEL_FORMAT.R32G32B32A32_FLOAT, useRT=True)
self.oxWorldNormalMapUV = PD.SkinLightmapRenderer.CreateRenderTarget(self.unwrapSize, self.unwrapSize, trinity.PIXEL_FORMAT.R32G32B32A32_FLOAT, useRT=True)
self.stretchMap = PD.SkinLightmapRenderer.CreateRenderTarget(self.unwrapSize / 2, self.unwrapSize / 2, trinity.PIXEL_FORMAT.R32G32B32A32_FLOAT, useRT=True)
rj = trinity.CreateRenderJob('Optix UV Unwrap')
rj.PushRenderTarget(self.oxWorldPosMapUV)
rj.PushDepthStencil(self.oxDepth)
rj.Clear((0, 0, 0, 0), 1.0)
rj.SetStdRndStates(trinity.RM_FULLSCREEN)
vp = trinity.TriViewport()
vp.x = 0
vp.y = 0
vp.width = self.unwrapSize
vp.height = self.unwrapSize
rj.SetViewport(vp)
PD.SkinLightmapRenderer.AddCallback(self, FullOptixRenderer.OnBeforeOptixPositionsUV, 'onBeforeOptixPositionsUV', rj)
rj.RenderScene(self.scene).name = 'Optix WorldPos (UV space)'
PD.SkinLightmapRenderer.AddCallback(self, lambda weakSelf: PD.SkinLightmapRenderer.DoChangeEffect('oxNormalWorldUVEffect', meshes=weakSelf.oxMeshes), '', rj)
rj.SetRenderTarget(self.oxWorldNormalMapUV)
rj.Clear((0, 0, 0, 0), 1.0)
rj.RenderScene(self.scene).name = 'Optix Normals (UV space)'
rj.SetRenderTarget(self.stretchMap)
rj.Clear((0, 0, 0, 0), 1.0)
vp2 = trinity.TriViewport()
vp2.x = 0
vp2.y = 0
vp2.width = self.unwrapSize / 2
vp2.height = self.unwrapSize / 2
rj.SetViewport(vp2)
PD.SkinLightmapRenderer.AddCallback(self, lambda weakSelf: PD.SkinLightmapRenderer.DoChangeEffect('stretchmapRenderEffect', meshes=weakSelf.oxMeshes), '', rj)
rj.RenderScene(self.scene).name = 'Stretchmap'
PD.SkinLightmapRenderer.AddCallback(self, FullOptixRenderer.OnAfterOptix, 'onAfterOptix', rj)
rj.PopRenderTarget()
rj.PopDepthStencil()
rj.ScheduleOnce()
rj.WaitForFinish()
if False:
PD.SkinLightmapRenderer.SaveTarget(self.oxWorldPosMapUV, 'c:/depot/oxworldposuv2.dds', isRT=True)
PD.SkinLightmapRenderer.SaveTarget(self.oxWorldNormalMapUV, 'c:/depot/oxworldnormaluv2.dds', isRT=True)
PD.SkinLightmapRenderer.SaveTarget(self.stretchMap, 'c:/depot/stretchmap2.dds', isRT=True)
print '** MAPS SAVED **'
def RunSkinningAndTesselation(self):
print '*** Tesselation phase ***'
batchTypes = self.skinnedOptix[0]
optix = self.optix
ptx = {}
ptx[72] = self.path + 'eve_skinning_kernel72.ptx'
ptx[64] = self.path + 'eve_skinning_kernel64.ptx'
for bytes, ptxfile in ptx.iteritems():
LogInfo('Processing ', bytes, 'bytes/vertex')
skinningProgram = trinity.Tr2OptixProgram(ptxfile, 'kernel_no_tesselation')
skinningProgramTesselate = trinity.Tr2OptixProgram(ptxfile, 'kernel_tesselation')
optix.SetEntryPointCount(2)
optix.SetRayGenerationProgram(0, skinningProgram)
optix.SetRayGenerationProgram(1, skinningProgramTesselate)
for batchType in range(len(batchTypes)):
batches = batchTypes[batchType]
out = []
def needsTesselation(fx):
return 'skinnedavatarhair_detailed.fx' in fx.effectFilePath.lower()
for batch in batches:
if 'furshell' in batch[1].effectFilePath.lower():
out.append(None)
continue
tesselate = needsTesselation(batch[1])
triangle_count = batch[6]
bytes_per_vertex = batch[8]
if bytes_per_vertex != bytes:
out.append(None)
continue
vertex_buffer_output = trinity.Tr2OptixBuffer()
vertex_buffer_output.CreateUserData(bytes_per_vertex, triangle_count * 3 * 4 if tesselate else triangle_count * 3, trinity.OPTIX_BUFFER_INPUT_OUTPUT, True)
out.append(vertex_buffer_output)
for i, batch in enumerate(batches):
if 'furshell' in batch[1].effectFilePath.lower():
continue
triangle_count = batch[6]
tesselate = needsTesselation(batch[1])
bytes_per_vertex = batch[8]
if bytes_per_vertex != bytes:
continue
if tesselate:
LogInfo('Tesselating geometry ', batch, ' of type ', batchType)
else:
LogInfo('Skinning geometry ', batch, ' of type ', batchType)
optix.SetBuffer('vertex_buffer', batch[2])
optix.SetBuffer('index_buffer', batch[3])
optix.SetBuffer('vertex_buffer_output', out[i])
optix.SetUInt('first_index_index', batch[5])
optix.SetBuffer('matrix_buffer', batch[7])
program = int(tesselate)
optix.Run(program, triangle_count, 1)
batch[0].SetBuffer('vertex_buffer', out[i])
if tesselate:
batch[0].SetPrimitiveCount(triangle_count * 4)
optix.SetRayGenerationProgram(0, self.raygen)
optix.SetRayGenerationProgram(1, self.raygen)
def RemoveBadGeometry(self, model):
self.haveBeard = False
self.beardFx = None
for mesh in model.visualModel.meshes:
for area in mesh.decalAreas:
if PD.IsBeard(area):
self.haveBeard = True
self.beardFx = area.effect
area.debugIsHidden = True
break
for mesh in model.visualModel.meshes:
for area in mesh.transparentAreas:
lname = area.name.lower()
if lname.startswith('eyeshadow_'):
mesh.transparentAreas.removeAt(-1)
break
if False:
for mesh in model.visualModel.meshes:
for area in mesh.opaqueAreas:
lname = area.name.lower()
if 'eye' not in lname or 'eyewet' in lname or 'eyelash' in lname:
mesh.opaqueAreas.removeAt(-1)
break
for area in mesh.transparentAreas:
lname = area.name.lower()
if 'eye' not in lname or 'eyewet' in lname or 'eyelash' in lname:
mesh.transparentAreas.removeAt(-1)
break
if False:
print 'raytracing', len(model.visualModel.meshes), 'meshes'
for mesh in model.visualModel.meshes:
lname = mesh.name.lower()
if not lname.startswith('hair'):
print 'removing', lname
mesh.opaqueAreas.removeAt(-1)
mesh.decalAreas.removeAt(-1)
mesh.transparentAreas.removeAt(-1)
elif False:
print 'removing', lname
for a in mesh.opaqueAreas:
print 'opaque', a.name
for a in mesh.decalAreas:
print 'decal', a.name
for a in mesh.transparentAreas:
print 'transp', a.name
mesh.opaqueAreas.removeAt(-1)
mesh.decalAreas.removeAt(-1)
mesh.transparentAreas.removeAt(-1)
else:
print 'keeping', lname
def TransferBeardParameters(self, optix):
if self.haveBeard:
LogInfo('Beard found')
beardLength = self.settings['beardLength']
optix.SetFloat3('beardOptions', beardLength[0], beardLength[1], self.settings['beardGravity'])
floatMap = {'FurLength': 'beard_fur_length',
'UVScale': 'beard_uv_scale',
'AlphaMultiplier': 'beard_alpha_multiplier',
'CombStrength': 'beard_comb_strength',
'FurGrainRotation': 'beard_fur_grain_rotation',
'MirrorGrain': 'beard_mirror_grain',
'FurParallax': 'beard_fur_parallax'}
float3Map = {'gravityOffset': 'beard_gravity_offset',
'MaterialDiffuseColor': 'beard_diffuse_color'}
for param in self.beardFx.parameters:
optixName = floatMap.get(param.name, None)
if optixName is not None:
optix.SetFloat(optixName, param.value)
else:
optixName = float3Map.get(param.name, None)
if optixName is not None:
optix.SetFloat3(optixName, param.value[0], param.value[1], param.value[2])
def GenerateBeardGeometry(self, optix, path, any_hit_shadow):
if not self.haveBeard:
return None
LogInfo('generating beard splines')
SkinRaytracingTools.SetOptixMatrixFromTrinity(optix, 'clipToWorld')
beardProgram = trinity.Tr2OptixProgram(path + 'eve_beard_kernel.ptx', 'kernel')
curveOutputBuffer = trinity.Tr2OptixBuffer()
curveCount = 512
curveOutputBuffer.CreateUserData(80, curveCount * curveCount, trinity.OPTIX_BUFFER_INPUT_OUTPUT, True)
optix.SetBuffer('output', curveOutputBuffer)
rayTypeCount = optix.GetRayTypeCount()
optix.SetRayTypeCount(1)
optix.SetEntryPointCount(2)
optix.SetRayGenerationProgram(0, beardProgram)
optix.SetRayGenerationProgram(1, beardProgram)
optix.SetEntryPointCount(1)
LogInfo('beard: about to Run')
optix.Run(0, curveCount, curveCount)
LogInfo('beard: Run done')
optix.SetRayTypeCount(rayTypeCount)
hairGeometry = trinity.Tr2OptixGeometry()
hairGeometry.InitializeFromProgram(path + 'bezier_curves.ptx', 'intersect', 'bounds')
subdivideDepth = 2
hairGeometry.SetPrimitiveCount(curveCount * curveCount * (1 << subdivideDepth))
optix.SetUInt('presubdivide_depth', subdivideDepth)
optix.SetBuffer('curves', curveOutputBuffer)
LogInfo('beard: geometry setup done')
beardInstance = trinity.Tr2OptixGeometryInstance()
beardInstance.SetGeometry(hairGeometry)
closest_hit_BeardShader = trinity.Tr2OptixProgram(path + 'eve_beard_shader.ptx', 'closest_hit_BeardShader')
beardMaterial = trinity.Tr2OptixMaterial()
beardMaterial.SetClosestHit(0, closest_hit_BeardShader)
beardMaterial.SetAnyHit(1, any_hit_shadow)
beardInstance.SetMaterial(beardMaterial)
LogInfo('beard: geometry instance setup done')
return beardInstance
def _DoInit(self, scene = None):
model = None
if scene is None:
scene = PD.SkinLightmapRenderer.Scene()
self.scene = scene
self.previousVP = trinity.TriMatrix()
self.framecount = 1
self.useOIT = True
if scene is None:
LogWarn('No scene!')
return
for dynamic in scene.dynamics:
if dynamic.__typename__ == 'Tr2IntSkinnedObject':
model = dynamic
break
else:
LogWarn('No Tr2IntSkinnedObject found')
return
if model is None:
LogWarn('No Tr2IntSkinnedObject found')
return
self.skinnedObject = model
if self.skinnedObject.visualModel is None:
LogWarn('skinnedObject has no visualMeshes')
return
bg = trinity.renderContext.GetDefaultBackBuffer()
step = trinity.renderJobs.FindStepByName('SET_SWAPCHAIN_RT')
if step is not None:
bg = step.renderTarget
self.width = self.settings.get('outputWidth', bg.width)
self.height = self.settings.get('outputHeight', bg.height)
self.blitfx = FullScreenBlitter(self.width, self.height)
self.RemoveBadGeometry(model)
outputTexture = trinity.TriTextureRes(self.width, self.height, 1, trinity.PIXEL_FORMAT.R32G32B32A32_FLOAT)
self.outputTexture = outputTexture
self.capture = CaptureHelper(self.width, self.height)
self._InitUVUnwrap()
for steps in trinity.renderJobs.recurring:
if steps.name == 'FullOptixRenderer':
steps.UnscheduleRecurring()
start = time.clock()
optix = trinity.Tr2Optix()
self.optix = optix
optix.SetInteropDevice()
optix.SetRayTypeCount(4)
optix.SetEntryPointCount(1)
if False:
optix.EnableAllExceptions()
optix.SetPrintEnabled(True)
optix.SetPrintBufferSize(16384)
optix.SetUInt('radiance_ray_type', 0)
optix.SetUInt('shadow_ray_type', 1)
optix.SetUInt('translucency_ray_type', 2)
optix.SetUInt('translucency_ray_type', 3)
optix.SetFloat('scene_epsilon', 0.001)
optix.SetUInt('frameIteration', 0)
self.outputBuffer = trinity.Tr2OptixBuffer()
self.outputBuffer.CreateFloat4(self.width, self.height, trinity.OPTIX_BUFFER_INPUT_OUTPUT)
optix.SetBuffer('output_buffer', self.outputBuffer)
self.ApplySettings()
path = str(blue.paths.ResolvePath('res:/graphics/effect/optix/NCC/'))
self.path = path
LogInfo('Getting files from', path)
everything = []
any_hit_shadow = trinity.Tr2OptixProgram(path + 'eve_shadow.ptx', 'any_hit_shadow')
any_hit_shadow_blend = trinity.Tr2OptixProgram(path + 'eve_shadow.ptx', 'any_hit_shadow_blend')
shader_diffuse_only_feeler = trinity.Tr2OptixProgram(path + 'eve_bounce.ptx', 'closest_hit_DiffuseOnlyFeeler2')
any_hit_cutout = trinity.Tr2OptixProgram(path + 'eve_cutout.ptx', 'any_hit_CutoutMask')
any_hit_diffuse_feeler_blend = trinity.Tr2OptixProgram(path + 'eve_shadow.ptx', 'any_hit_diffuse_feeler_blend')
everything.append(any_hit_shadow)
everything.append(any_hit_shadow_blend)
everything.append(shader_diffuse_only_feeler)
everything.append(any_hit_cutout)
mainRay = 0
shadowRay = 1
bounceRay = 3
def MakeMaterialWithShader(shader):
material = trinity.Tr2OptixMaterial()
material.SetClosestHit(mainRay, shader)
material.SetAnyHit(shadowRay, any_hit_shadow)
material.SetClosestHit(bounceRay, shader_diffuse_only_feeler)
everything.append(material)
return (material, shader)
def MakeMaterial(ptxFile, shaderName):
shader = trinity.Tr2OptixProgram(path + ptxFile + '.ptx', shaderName)
everything.append(shader)
return MakeMaterialWithShader(shader)
def MakeDecal(material):
material.SetAnyHit(mainRay, any_hit_cutout)
material.SetAnyHit(shadowRay, any_hit_shadow_blend)
material.SetAnyHit(bounceRay, any_hit_cutout)
skin_single_material, skin_single_shade = MakeMaterial('eve_skin', 'closest_hit_ShadeSinglePassSkin_Single2')
skin_single_material_scatter = MakeMaterial('eve_skin', 'closest_hit_ShadeSinglePassSkin_Single_Scatter2')[0]
skin_single_material_decal = MakeMaterialWithShader(skin_single_shade)[0]
MakeDecal(skin_single_material_decal)
glasses_shade = trinity.Tr2OptixProgram(path + 'eve_glasses.ptx', 'glasses_shade')
glasses_shadow = trinity.Tr2OptixProgram(path + 'eve_glasses.ptx', 'glasses_shadow')
glass_material = trinity.Tr2OptixMaterial()
glass_material.SetAnyHit(mainRay, glasses_shade)
glass_material.SetAnyHit(shadowRay, glasses_shadow)
glass_material.SetClosestHit(bounceRay, shader_diffuse_only_feeler)
everything.append(glasses_shade)
everything.append(glasses_shadow)
vizNames = ['closest_hit_VizNormal',
'closest_hit_VizUV',
'closest_hit_VizConstantColor',
'closest_hit_VizDiffuse']
vizualizer, vizualizer_shade = MakeMaterial('eve_basic', vizNames[0])
vizualizer_decal = MakeMaterialWithShader(vizualizer_shade)[0]
MakeDecal(vizualizer_decal)
skin_double_material, skin_double_shade = MakeMaterial('eve_skin', 'closest_hit_ShadeSinglePassSkin_Double2')
skin_double_material_decal = MakeMaterialWithShader(skin_double_shade)[0]
MakeDecal(skin_double_material_decal)
skin_double_material_transparent = MakeMaterial('eve_skin', 'closest_hit_ShadeSinglePassSkin_Double2_Blend')[0]
skin_double_material_transparent.SetAnyHit(mainRay, any_hit_cutout)
skin_double_material_transparent.SetAnyHit(shadowRay, any_hit_shadow_blend)
skin_double_material_transparent.SetAnyHit(bounceRay, any_hit_cutout)
avatar_brdf_material, avatar_brdf_shade = MakeMaterial('eve_brdf', 'closest_hit_ShadeAvatarBRDF_Single2')
avatar_brdf_material_decal = MakeMaterialWithShader(avatar_brdf_shade)[0]
MakeDecal(avatar_brdf_material_decal)
avatar_brdf_double_material, avatar_brdf_double_shade = MakeMaterial('eve_brdf', 'closest_hit_ShadeAvatarBRDF_Double2')
avatar_brdf_double_material_decal = MakeMaterialWithShader(avatar_brdf_double_shade)[0]
MakeDecal(avatar_brdf_double_material_decal)
avatar_hair_material = trinity.Tr2OptixMaterial()
avatar_hair_shade = trinity.Tr2OptixProgram(path + 'eve_hair.ptx', 'closest_hit_ShadeAvatarHair2' if self.useOIT else 'closest_hit_ShadeAvatarHair2_Blend')
avatar_hair_material.SetClosestHit(mainRay, avatar_hair_shade)
if self.useOIT:
avatar_hair_oit = trinity.Tr2OptixProgram(path + 'eve_hair.ptx', 'any_hit_HairOIT')
avatar_hair_material.SetAnyHit(mainRay, avatar_hair_oit)
avatar_hair_material.SetAnyHit(shadowRay, any_hit_shadow_blend)
avatar_hair_material.SetClosestHit(bounceRay, shader_diffuse_only_feeler)
everything.append(avatar_hair_shade)
everything.append(avatar_hair_material)
avatar_hair_material_decal = trinity.Tr2OptixMaterial()
avatar_hair_material_decal.SetClosestHit(mainRay, avatar_hair_shade)
avatar_hair_material_decal.SetAnyHit(mainRay, avatar_hair_oit if self.useOIT else any_hit_cutout)
avatar_hair_material_decal.SetAnyHit(shadowRay, any_hit_shadow_blend)
avatar_hair_material_decal.SetClosestHit(bounceRay, shader_diffuse_only_feeler)
avatar_hair_material_decal.SetAnyHit(bounceRay, any_hit_cutout)
everything.append(avatar_hair_material_decal)
eye_shade = trinity.Tr2OptixProgram(path + 'eve_eyes.ptx', 'closest_hit_ShadeEye')
eye_material = trinity.Tr2OptixMaterial()
eye_material.SetClosestHit(mainRay, eye_shade)
eye_material.SetAnyHit(shadowRay, any_hit_shadow)
everything.append(eye_shade)
everything.append(eye_material)
eye_wetness_shade = trinity.Tr2OptixProgram(path + 'eve_eyes.ptx', 'closest_hit_ShadeEyeWetness')
eye_wetness_material = trinity.Tr2OptixMaterial()
eye_wetness_material.SetClosestHit(mainRay, eye_wetness_shade)
eye_wetness_material.SetAnyHit(shadowRay, any_hit_shadow)
everything.append(eye_wetness_shade)
everything.append(eye_wetness_material)
portrait_basic_material, portrait_basic_shade = MakeMaterial('eve_basic', 'closest_hit_ShadePortraitBasic')
portrait_basic_material_decal = MakeMaterialWithShader(portrait_basic_shade)[0]
MakeDecal(portrait_basic_material_decal)
LogInfo('global setup OK', time.clock() - start, 'seconds')
def MakeSamplerFromMap(texture, name):
sampler = trinity.Tr2OptixTextureSampler()
sampler.CreateFromSurface(texture)
sampler.SetNormalizedIndexingMode(True)
optix.SetSampler(name, sampler)
LogInfo('No-Copy Interop for ', name)
everything.append(sampler)
MakeSamplerFromMap(self.oxWorldPosMapUV, 'world_pos_uv_buffer')
MakeSamplerFromMap(self.oxWorldNormalMapUV, 'world_normal_uv_buffer')
MakeSamplerFromMap(self.stretchMap, 'stretchmap_buffer')
useHdrProbe = False
if useHdrProbe:
optix.SetSamplerFromProbe('hdr_probe_sampler', 'c:/depot/optix/data/Japan_subway2_FINAL.hdr')
start = time.clock()
self.skinnedOptix = optix.CreateFromSkinnedModel(model, 72, path + 'triangle72.ptx', 'mesh_intersect', 'mesh_bounds', 64, path + 'triangle64.ptx', 'mesh_intersect', 'mesh_bounds')
optixBatches = self.skinnedOptix[0]
self.TransferBeardParameters(optix)
group = trinity.Tr2OptixGeometryGroup()
groupChildren = []
self.rayCounter = RayCountHelper(self.optix)
self.oit = OitHelper(self.optix)
self.raygen = trinity.Tr2OptixProgram(path + 'raygen.ptx', 'ray_request')
self.RunSkinningAndTesselation()
start = time.clock()
samplers = SkinRaytracingTools.InteropAllTextures(optix, model, waitForFinish=True)
everything.append(samplers)
backdrop = trinity.TriTexture2DParameter()
backdrop.resourcePath = self.settings['backgroundBitmap']
skinmap = trinity.TriTexture2DParameter()
skinmap.resourcePath = 'res:/Graphics/Character/female/paperdoll/head/head_generic/SkinMap.png'
blue.resMan.Wait()
everything.append(SkinRaytracingTools.InteropTexture('BackgroundEnvMap', backdrop.resource, waitForFinish=True, scope=optix))
everything.append(SkinRaytracingTools.InteropTexture('SkinMap', skinmap.resource, waitForFinish=True, scope=optix))
LogInfo('texture interop OK', time.clock() - start, 'seconds')
splines = self.GenerateBeardGeometry(optix, path, any_hit_shadow)
if splines is not None:
groupChildren.append(splines)
print '*** Raytracing phase ***'
def SetAlphaRef(instance, batchType):
if batchType == 1:
instance.SetFloat4('alphaRef', 0.75, 0, 0, 0)
elif batchType == 2:
instance.SetFloat4('alphaRef', 0.01, 0, 0, 0)
haveGlasses = False
for batchType in range(len(optixBatches)):
isOpaque = batchType == 0
batches = optixBatches[batchType]
for batch in batches:
if 'furshell' in batch[1].effectFilePath.lower():
continue
instance = trinity.Tr2OptixGeometryInstance()
everything.append(instance)
instance.SetGeometry(batch[0])
r = random.random()
g = random.random()
b = random.random()
instance.SetFloat4('viz_constant_color', r, g, b, 1.0)
fxpath = batch[1].effectFilePath.lower()
if False:
instance.SetMaterial(vizualizer if isOpaque else vizualizer_decal)
elif 'glassshader' in fxpath:
instance.SetMaterial(glass_material)
if not haveGlasses:
haveGlasses = True
elif 'skinnedavatarbrdfsinglepassskin_single.fx' in fxpath:
if batch[1] in self.scatterFX:
instance.SetMaterial(skin_single_material_scatter)
else:
instance.SetMaterial(skin_single_material if isOpaque else skin_single_material_decal)
SetAlphaRef(instance, batchType)
elif 'skinnedavatarbrdfsinglepassskin_double.fx' in fxpath:
instance.SetMaterial([skin_double_material, skin_double_material_decal, skin_double_material_transparent][batchType])
SetAlphaRef(instance, batchType)
elif 'skinnedavatarbrdflinear.fx' in fxpath:
instance.SetMaterial(avatar_brdf_material if isOpaque else avatar_brdf_material_decal)
elif 'skinnedavatarbrdfdoublelinear.fx' in fxpath:
instance.SetMaterial(avatar_brdf_double_material if isOpaque else avatar_brdf_double_material_decal)
elif 'skinnedavatarhair_detailed.fx' in fxpath:
instance.SetMaterial(avatar_hair_material if isOpaque else avatar_hair_material_decal)
instance.SetFloat4('alphaRef', 0.01, 0, 0, 0)
instance.SetUInt('enableCulling', 0)
elif 'eyeshader.fx' in fxpath:
instance.SetMaterial(eye_material)
elif 'eyewetnessshader.fx' in fxpath:
instance.SetMaterial(eye_wetness_material)
elif 'portraitbasic.fx' in fxpath:
instance.SetMaterial(portrait_basic_material if isOpaque else portrait_basic_material_decal)
else:
instance.SetMaterial(vizualizer if isOpaque else vizualizer_decal)
SkinRaytracingTools.CopyParametersToContext(batch[1], instance)
groupChildren.append(instance)
group.SetChildCount(len(groupChildren))
for x in xrange(len(groupChildren)):
group.SetChild(x, groupChildren[x])
everything.append(group)
group.SetAcceleration('Bvh', 'Bvh')
LogInfo('scene interop OK', time.clock() - start, 'seconds')
start = time.clock()
bufEveLights = SkinRaytracingTools.CreateBufferForLights(scene.lights, useHdrProbe)
optix.SetBuffer('trinity_lights', bufEveLights)
LogInfo('lights interop OK', time.clock() - start, 'seconds')
start = time.clock()
optix.SetGeometryGroup('top_scene', group)
optix.SetGeometryGroup('shadow_casters', group)
optix.SetRayGenerationProgram(0, self.raygen)
optix.SetEntryPointCount(1)
miss = None
if not useHdrProbe:
miss = trinity.Tr2OptixProgram(path + 'eve_miss.ptx', 'miss')
else:
miss = trinity.Tr2OptixProgram(path + 'eve_miss_probe.ptx', 'miss')
optix.SetMissProgram(3, miss)
optix.SetFloat3('bg_color', 1.0, 0, 0)
everything.append
| 0 |
3c01ca27a5eef877b606b93b04ffe6f73168cd6b
|
Python
|
(miss)
if False:
exception = trinity.Tr2OptixProgram(path + 'eve_miss.ptx', 'exception')
optix.SetExceptionProgram(0, exception)
everything.append(exception)
optix.SetStackSize(4096)
self.everything = everything
SkinRaytracingTools.SetOptixMatrixFromTrinity(optix, 'clipToWorld', self.width / float(self.height))
LogInfo('general setup OK', time.clock() - start, 'seconds')
optix.ReportObjectCounts()
start = time.clock()
optix.Compile()
LogInfo('compile OK', time.clock() - start, 'seconds')
start = time.clock()
optix.Validate()
LogInfo('validate OK', time.clock() - start, 'seconds')
start = time.clock()
optix.Run(0, 0, 0)
LogInfo('BVH OK', time.clock() - start, 'seconds')
start = time.clock()
self.blitfx.SetTexture(outputTexture, outputTexture, outputTexture)
rj = trinity.CreateRenderJob('FullOptixRenderer')
rj.PushRenderTarget(self.outputRT)
rj.PushDepthStencil(None)
self.AddCallback(FullOptixRenderer.RaytraceFrame, 'Raytrace Frame', rj)
rj.CopyRtToTexture(outputTexture).name = 'cuda -> outputTexture'
rj.PopDepthStencil()
rj.PopRenderTarget()
rj.SetStdRndStates(trinity.RM_FULLSCREEN).name = 'fullscreen quad'
rj.RenderEffect(self.blitfx.effect).name = ' blit'
self.capture.CreateRenderSteps(rj, self.blitfx.effect)
rj.steps.append(trinity.TriStepRenderFps())
rj.ScheduleRecurring(insertFront=False)
self.renderJob = rj
LogInfo('final setup OK', time.clock() - start, 'seconds')
model.display = False
self.EnablePaperDollJobs(False)
@staticmethod
def EnablePaperDollJobs(enable):
if False:
for job in trinity.renderJobs.recurring:
if 'paperdollrenderjob' in job.name.lower():
for step in job.steps:
step.enabled = enable
if enable:
trinity.device.tickInterval = 10
else:
trinity.device.tickInterval = 0
def ApplySettings(self):
self.optix.SetFloat('light_size', self.settings['light_size'])
self.optix.SetFloat3('depthOfField', 1.0, self.settings['lens_radius'], 0)
self.optix.SetFloat('HairShadows', self.settings['HairShadows'])
self.optix.SetFloat('EnvMapBoost', self.settings['EnvMapBoost'] / 3.1415927)
self.previousVP.Identity()
def SetLensRadius(self, lens_radius):
self.settings['lens_radius'] = lens_radius
self.ApplySettings()
def SetLensFocalDistance(self, lens_focal_distance):
if lens_focal_distance <= 0:
self.settings.pop('lens_focal_distance', 0)
else:
self.settings['lens_focal_distance'] = lens_focal_distance
self.ApplySettings()
def SetLightSize(self, light_size):
self.settings['light_size'] = light_size
self.ApplySettings()
def SetHairShadowsEnabled(self, enabled):
self.settings['HairShadows'] = float(enabled)
self.ApplySettings()
def SetBackgroundIntensity(self, intensity):
self.settings['EnvMapBoost'] = intensity
self.ApplySettings()
def __init__(self, scene = None, backgroundBitmap = None, memento = None, beardLength = (0.01, 0.01), beardGravity = 0.0005, outputWidth = None, outputHeight = None, asyncSetup = True, listenForUpdate = True):
LogInfo('init', self)
blue.motherLode.maxMemUsage = 0
blue.resMan.ClearAllCachedObjects()
self.framecount = 0
self.listenForUpdate = listenForUpdate
if memento is not None:
self.settings = memento
else:
self.settings = {}
self.settings['light_size'] = 0.125
self.settings['lens_radius'] = 0.001
self.settings['HairShadows'] = 1.0
self.settings['EnvMapBoost'] = 1.0
self.settings['backgroundBitmap'] = backgroundBitmap if backgroundBitmap is not None else 'res:/texture/global/red_blue_ramp.dds'
self.settings['beardLength'] = beardLength
self.settings['beardGravity'] = beardGravity
if outputWidth is not None:
self.settings['outputWidth'] = outputWidth
if outputHeight is not None:
self.settings['outputHeight'] = outputHeight
if asyncSetup:
uthread.new(self._DoInit, scene=scene)
else:
self._DoInit(scene=scene)
def GetMemento(self):
return self.settings
def __del__(self):
LogInfo('deleting', self)
if hasattr(self, 'renderJob'):
self.renderJob.UnscheduleRecurring()
self.renderJob = None
del self.raygen
del self.rayCounter
del self.oit
del self.outputBuffer
del self.skinnedOptix
del self.everything
LogInfo('Post-cleanup leak check:')
self.optix.ReportObjectCounts()
self.EnablePaperDollJobs(True)
@staticmethod
def Pause():
if FullOptixRenderer.instance is not None:
FullOptixRenderer.instance.renderJob.UnscheduleRecurring()
@staticmethod
def NotifyUpdate():
if FullOptixRenderer.instance is not None and FullOptixRenderer.instance.listenForUpdate:
LogInfo('NotifyUpdate, restarting', FullOptixRenderer.instance)
memento = FullOptixRenderer.instance.GetMemento()
FullOptixRenderer.instance = None
FullOptixRenderer.instance = FullOptixRenderer(memento=memento)
class ShipOptixRenderer():
__guid__ = 'paperDoll.ShipOptixRenderer'
instance = None
def AddCallback(self, func, name, rj):
cb = trinity.TriStepPythonCB()
weakSelf = weakref.ref(self)
cb.SetCallback(lambda : SkinRaytracingTools.FuncWrapper(weakSelf, func))
cb.name = name
rj.steps.append(cb)
def GetFrameCount(self):
return self.framecount
def SaveScreenshot(self, filename):
self.capture.SaveSurfaceToFile(filename)
def AddRenderPreviewStep(self, renderJob):
renderJob.SetStdRndStates(trinity.RM_FULLSCREEN).name = ' [optix] fullscreen quad'
renderJob.PushDepthStencil(None).name = ' [optix] push depth'
renderJob.RenderEffect(self.blitfx.effect).name = ' [optix] Blit to screenshot'
renderJob.PopDepthStencil().name = ' [optix] pop depth'
@staticmethod
def RaytraceFrame(selfRef):
start = time.time()
VP = SkinRaytracingTools.SetOptixMatrixFromTrinity(selfRef.optix, 'clipToWorld', selfRef.width / float(selfRef.height))
if not SkinRaytracingTools.matEqual(VP, selfRef.previousVP):
selfRef.previousVP = VP
selfRef.outputBuffer.Clear()
selfRef.framecount = 0
pos1 = (0, 0, 0)
pos2 = pos1
dist1 = geo2.Vec3Distance(pos1, trinity.GetViewPosition())
dist2 = geo2.Vec3Distance(pos2, trinity.GetViewPosition())
autodof = min(dist1, dist2)
dof = selfRef.settings.get('lens_focal_distance', autodof)
LogInfo('Auto-depth-of-field is at', autodof, ', actual focal distance is', dof)
selfRef.optix.SetFloat3('depthOfField', dof - trinity.GetFrontClip(), selfRef.settings['lens_radius'], 0)
else:
selfRef.framecount += 1
selfRef.optix.SetUInt('frameIteration', selfRef.framecount)
selfRef.oit.ResetAllocationCount()
selfRef.rayCounter.ResetCount()
time1 = time.time()
selfRef.optix.Run(0, selfRef.width, selfRef.height)
time2 = time.time()
traceTime = time2 - time1
raycount = selfRef.rayCounter.GetCount()
raysec = 0
if traceTime > 0:
raysec = raycount / float(traceTime)
time3 = time.time()
if selfRef.framecount % 32 == 0:
oit = selfRef.oit.GetAllocationCount()
if oit > 0:
print oit, 'oit allocations'
selfRef.blitfx.UpdateFrameCount(selfRef.framecount)
selfRef.outputBuffer.CopyToTexture(selfRef.outputTexture)
stop = time.time()
message = 'time: call %05.3f / trace %05.3f / read %05.3f ms' % (float(time1 - start) * 1000, float(time2 - time1) * 1000, float(stop - time3) * 1000)
message += '// traced %d rays in %05.3f ms / %10d Krays/sec / %d frames' % (raycount,
traceTime * 1000,
raysec / 1000,
selfRef.framecount)
LogInfo(message)
def ConvertCubeMapToSH(self, optix, ptxPath, cubeResPath):
self.shBuffer = trinity.Tr2OptixBuffer()
self.shBuffer.CreateFloat4(9, 1, trinity.OPTIX_BUFFER_INPUT_OUTPUT)
optix.SetBuffer('sh_buffer', self.shBuffer)
self.shBuffer.Clear()
program = trinity.Tr2OptixProgram(ptxPath + 'cubemapsh.ptx', 'kernel')
optix.SetRayGenerationProgram(0, program)
optix.ReportObjectCounts()
cube = trinity.TriTextureCubeParameter()
cube.resourcePath = cubeResPath
cube.name = 'Nebula'
blue.resMan.Wait()
mipmaps, names = SkinRaytracingTools.ConvertCubeToTextures(cube)
for i in range(len(names)):
if i < len(mipmaps):
sampler = trinity.Tr2OptixTextureSampler()
sampler.CreateFromTexture(mipmaps[i])
sampler.SetNormalizedIndexingMode(True)
optix.SetSampler(cube.name + names[i], sampler)
LogInfo('No-Copy Cube Side Interop for ' + cube.name + names[i])
optix.Run(0, cube.resource.width, cube.resource.width)
if False:
names = ['Y00',
'Y1m1',
'Y10',
'Y11',
'Y2m2',
'Y2m1',
'Y20',
'Y21',
'Y22']
self.shBuffer.Map()
ofs = 0
for name in names:
print name, ': (',
print self.shBuffer.GetUserDataF(ofs), ',',
ofs = ofs + 4
print self.shBuffer.GetUserDataF(ofs), ',',
ofs = ofs + 4
print self.shBuffer.GetUserDataF(ofs), ')'
ofs = ofs + 4
self.shBuffer.Unmap()
def CachedCreateMaterial(self, path, effect):
material = self.materialCache.get(effect, None)
if material is not None:
return material
shader = None
if effect in ('tripleglowv3', 'doubleglowv3', 'singleglowv3'):
shader = trinity.Tr2OptixProgram(path + 'v3ship_glow.ptx', 'closest_hit_' + effect)
elif effect in ('singleheatv3',):
shader = trinity.Tr2OptixProgram(path + 'v3ship_heat.ptx', 'closest_hit_' + effect)
elif effect in ('tripleglowoilv3',):
shader = trinity.Tr2OptixProgram(path + 'v3ship_glow_oil.ptx', 'closest_hit_' + effect)
elif effect == 'skinned_tripleglowv3':
shader = trinity.Tr2OptixProgram(path + 'v3ship_glow.ptx', 'closest_hit_tripleglowv3')
if shader is None:
return
material = trinity.Tr2OptixMaterial()
material.SetClosestHit(0, shader)
material.SetAnyHit(1, self.any_hit_shadow)
return material
def _DoInit(self, scene = None):
if scene is None:
scene = trinity.device.scene
self.scene = scene
self.previousVP = trinity.TriMatrix()
self.framecount = 1
self.materialCache = {}
self.useOIT = True
if scene is None:
LogWarn('No scene!')
return
bg = trinity.renderContext.GetDefaultBackBuffer()
step = trinity.renderJobs.FindStepByName('SET_SWAPCHAIN_RT')
if step is not None:
bg = step.renderTarget
self.width = self.settings.get('outputWidth', bg.width)
self.height = self.settings.get('outputHeight', bg.height)
self.blitfx = FullScreenBlitter(self.width, self.height)
bloomScale = 4
if False:
self.highpassRT = PD.SkinLightmapRenderer.CreateRenderTarget(self.width / bloomScale, self.height / bloomScale, trinity.PIXEL_FORMAT.R32G32B32A32_FLOAT)
self.filteredRT = PD.SkinLightmapRenderer.CreateRenderTarget(self.width / bloomScale, self.height / bloomScale, trinity.PIXEL_FORMAT.R32G32B32A32_FLOAT)
outputTexture = trinity.TriTextureRes(self.width, self.height, 1, trinity.PIXEL_FORMAT.R32G32B32A32_FLOAT)
self.outputTexture = outputTexture
self.capture = CaptureHelper(self.width, self.height)
for steps in trinity.renderJobs.recurring:
if steps.name == 'ShipOptixRenderer':
steps.UnscheduleRecurring()
path = str(blue.paths.ResolvePath('res:/graphics/effect/optix/ship/'))
self.path = path
LogInfo('Getting files from', path)
start = time.clock()
optix = trinity.Tr2Optix()
self.optix = optix
optix.SetInteropDevice()
optix.SetRayTypeCount(4)
optix.SetEntryPointCount(1)
if False:
optix.EnableAllExceptions()
if False:
optix.SetPrintEnabled(True)
optix.SetPrintBufferSize(16384)
optix.SetFloat('scene_epsilon', 0.01)
optix.SetUInt('frameIteration', 0)
nebula = PD.FindResourceByName(scene.backgroundEffect, 'NebulaMap') if scene.backgroundEffect is not None else None
if nebula is not None:
LogInfo('Converting to SH ', nebula.resourcePath)
self.ConvertCubeMapToSH(optix, path, nebula.resourcePath)
else:
self.shBuffer = trinity.Tr2OptixBuffer()
self.shBuffer.CreateFloat4(9, 1, trinity.OPTIX_BUFFER_INPUT_OUTPUT)
optix.SetBuffer('sh_buffer', self.shBuffer)
self.shBuffer.Clear()
self.outputBuffer = trinity.Tr2OptixBuffer()
self.outputBuffer.CreateFloat4(self.width, self.height, trinity.OPTIX_BUFFER_INPUT_OUTPUT)
optix.SetBuffer('output_buffer', self.outputBuffer)
self.ApplySettings()
everything = []
mainRay = 0
shadowRay = 1
bounceRay = 3
def MakeMaterialWithShader(shader):
return (material, shader)
def MakeMaterial(ptxFile, shaderName):
everything.append(shader)
return MakeMaterialWithShader(shader)
LogInfo('global setup OK', time.clock() - start, 'seconds')
useHdrProbe = False
start = time.clock()
self.rayCounter = RayCountHelper(self.optix)
self.oit = OitHelper(self.optix)
self.raygen = trinity.Tr2OptixProgram(path + 'raygen.ptx', 'ray_request')
shader = trinity.Tr2OptixProgram(path + 'vizualizer.ptx', 'closest_hit_VizGreen')
viz_material = trinity.Tr2OptixMaterial()
viz_material.SetClosestHit(0, shader)
everything.append(viz_material)
if False:
any_hit_shadow = trinity.Tr2OptixProgram(path + 'shadow.ptx', 'any_hit_shadow')
viz_material.SetAnyHit(1, any_hit_shadow)
self.any_hit_shadow = any_hit_shadow
else:
self.any_hit_shadow = None
start = time.clock()
nameTranslation = {'GlowNormalSpecularMap': 'NormalMap'}
def GroupByVertexBuffer(optixBatches):
output = []
for batchType in range(len(optixBatches)):
batches = optixBatches[batchType]
vbDict = {}
for batch in batches:
vb = batch[2]
list = vbDict.get(vb, None)
if list is not None:
list.append(batch)
else:
vbDict[vb] = [batch]
list = []
for vb in vbDict.iterkeys():
list.append(vbDict[vb])
output.append(list)
return output
cache = {}
programs = {'skinned_tripleglowv3_48': 'triangle48',
'singlev3_48': 'triangle48',
'singleheatv3_48': 'triangle48',
'tripleglowv3_40': 'triangle40',
'singleheatv3_40': 'triangle40',
'singlefresnelreflectionwithglow_56': 'triangle56',
'doublefresnelreflectionwithglow_56': 'triangle56',
'tripleglowoilv3_80': 'triangle80'}
if False:
nullintersect = trinity.Tr2OptixProgram(path + 'nullgeometry.ptx', 'intersect')
nullbounds = trinity.Tr2OptixProgram(path + 'nullgeometry.ptx', 'bounds')
everything.append(nullintersect)
everything.append(nullbounds)
mylogOK = set({})
mylogFail = set({})
linearNames = set({})
linearNames.add('MaterialDiffuseColor')
linearNames.add('MaterialReflectionColor')
linearNames.add('MaskDiffuseColor')
linearNames.add('MaskReflectionColor')
linearNames.add('SubMaskDiffuseColor')
linearNames.add('SubMaskReflectionColor')
linearNames.add('GlowColor')
topScene = trinity.Tr2OptixGroup()
interopSamplerCache = {}
for dynamic in scene.objects:
if dynamic.__typename__ not in ('EveShip2', 'EveStation2'):
continue
model = dynamic
if model.highDetailMesh is None or model.highDetailMesh.object is None:
LogWarn('ship has no high detail meshes')
continue
skinnedOptix = optix.CreateFromEveSpaceObject2(model, 0, '', '', '')
everything.append(skinnedOptix)
optixBatches = skinnedOptix[0]
self.objectsToRefresh[model] = skinnedOptix
sorted = GroupByVertexBuffer(optixBatches)
groups = []
for batchType in range(len(optixBatches)):
isOpaque = batchType == 0
vbBatches = sorted[batchType]
for batches in vbBatches:
groupChildren = []
for batch in batches:
effect = batch[1].effectFilePath.lower()
effect = effect[effect.rfind('/') + 1:]
effect = effect[:effect.rfind('.fx')]
ptx = programs.get(effect + '_' + str(batch[8]), '')
if ptx == '':
mylogFail.add(effect)
batch[0].SetIntersectProgram(nullintersect)
batch[0].SetBoundsProgram(nullbounds)
continue
mylogOK.add(effect)
intersect, bounds = cache.get(ptx, (None, None))
if intersect is None:
intersect = trinity.Tr2OptixProgram(path + ptx + '.ptx', 'intersect')
bounds = trinity.Tr2OptixProgram(path + ptx + '.ptx', 'bounds')
cache[ptx] = (intersect, bounds)
batch[0].SetIntersectProgram(intersect)
batch[0].SetBoundsProgram(bounds)
batchGeometryInstance = trinity.Tr2OptixGeometryInstance()
everything.append(batchGeometryInstance)
batchGeometryInstance.SetGeometry(batch[0])
if True:
material = self.CachedCreateMaterial(path, effect)
if material is None:
material = viz_material
else:
material = viz_material
batchGeometryInstance.SetMaterial(material)
SkinRaytracingTools.CopyParametersToContext(batch[1], batchGeometryInstance, linearNames)
groupChildren.append(batchGeometryInstance)
samplers = SkinRaytracingTools.InteropAllTexturesFromEffect(optix, batch[1], waitForFinish=True, nameTranslation=nameTranslation, scope=batchGeometryInstance, cache=interopSamplerCache)
everything.append(samplers)
group = trinity.Tr2OptixGeometryGroup()
group.SetChildCount(len(groupChildren))
for x in xrange(len(groupChildren)):
group.SetChild(x, groupChildren[x])
group.SetAcceleration('Bvh', 'Bvh')
self.objectsToMarkDirty.append(group)
groups.append(group)
everything.append(cache)
baseOffset = topScene.GetChildCount()
topScene.SetChildCount(baseOffset + len(groups))
for x in xrange(len(groups)):
topScene.SetChild(baseOffset + x, groups[x])
everything.append(groups)
if False:
sphereGeometry = trinity.Tr2OptixGeometry()
sphereGeometry.InitializeFromProgram(path + 'sphere_program.ptx', 'intersect', 'bounds')
sphereGeometry.SetPrimitiveCount(1)
everything.append(sphereGeometry)
sphereInstance = trinity.Tr2OptixGeometryInstance()
sphereInstance.SetGeometry(sphereGeometry)
sphereInstance.SetMaterial(viz_material)
sphereInstance.SetFloat4('pos_r', 0, 0, 0, 100)
sphereInstance.SetFloat4('color_watt', 1, 0, 0, 1)
everything.append(sphereInstance)
group = trinity.Tr2OptixGeometryGroup()
group.SetChildCount(1)
group.SetChild(0, sphereInstance)
group.SetAcceleration('Bvh', 'Bvh')
topScene.SetChildCount(topScene.GetChildCount() + 1)
topScene.SetChild(topScene.GetChildCount() - 1, group)
everything.append(topScene)
topScene.SetAcceleration('Bvh', 'Bvh')
self.objectsToMarkDirty.append(topScene)
optix.SetGroup('top_scene', topScene)
optix.SetGroup('shadow_casters', topScene)
if len(mylogOK) > 0:
LogInfo('Converted succesfully:', str(mylogOK))
else:
LogWarn('No effects converted succesfully!')
if len(mylogFail) > 0:
LogWarn('Failed to convert:', str(mylogFail))
if type(scene) == trinity.EveSpaceScene:
c = SkinRaytracingTools.SafeLinearize(scene.sunDiffuseColor)
optix.SetFloat4('SunDiffuseColor', c[0], c[1], c[2], c[3])
c = scene.sunDirection
optix.SetFloat4('SunDirWorld', -c[0], -c[1], -c[2], 0)
c = SkinRaytracingTools.SafeLinearize(scene.ambientColor)
optix.SetFloat4('SceneAmbientColor', c[0], c[1], c[2], c[3])
c = SkinRaytracingTools.SafeLinearize(scene.fogColor)
optix.SetFloat4('SceneFogColor', c[0], c[1], c[2], c[3])
LogInfo('scene interop OK', time.clock() - start, 'seconds')
start = time.clock()
light = trinity.Tr2InteriorLightSource()
if True:
wattage = 2000000
light.color = (1,
1,
1,
wattage)
light.radius = 50
light.position = (200, 500, -300)
else:
wattage = 10000000
light.color = (1,
1,
1,
wattage)
light.radius = 1000
light.position = (0, 0, 0)
bufEveLights = SkinRaytracingTools.CreateBufferForLights([], useHdrProbe, preserveAlpha=True)
optix.SetBuffer('trinity_lights', bufEveLights)
LogInfo('lights interop OK', time.clock() - start, 'seconds')
if False:
sphereGeometry = trinity.Tr2OptixGeometry()
sphereGeometry.InitializeFromProgram(path + 'sphere_program.ptx', 'intersect', 'bounds')
sphereGeometry.SetPrimitiveCount(1)
sphereMaterial = trinity.Tr2OptixMaterial()
sphereShader = trinity.Tr2OptixProgram(path + 'sphere_program.ptx', 'closest_hit_radiance')
sphereMaterial.SetClosestHit(0, sphereShader)
sphereInstance = trinity.Tr2OptixGeometryInstance()
sphereInstance.SetGeometry(sphereGeometry)
sphereInstance.SetMaterial(sphereMaterial)
sphereInstance.SetFloat4('pos_r', light.position[0], light.position[1], light.position[2], light.radius)
sphereInstance.SetFloat4('color_watt', light.color[0], light.color[1], light.color[2], light.color[3])
n = topScene.GetChildCount()
topScene.SetChildCount(n + 1)
sphereGroup = trinity.Tr2OptixGeometryGroup()
sphereGroup.SetChildCount(1)
sphereGroup.SetChild(0, sphereInstance)
sphereGroup.SetAcceleration('Bvh', 'Bvh')
topScene.SetChild(n, sphereGroup)
start = time.clock()
optix.SetRayGenerationProgram(0, self.raygen)
optix.SetEntryPointCount(1)
miss = None
if not useHdrProbe:
miss = trinity.Tr2OptixProgram(path + 'eve_miss.ptx', 'miss')
else:
miss = trinity.Tr2OptixProgram(path + 'eve_miss_probe.ptx', 'miss')
optix.SetMissProgram(3, miss)
optix.SetFloat3('bg_color', 1.0, 0, 0)
everything.append(miss)
if False:
exception = trinity.Tr2OptixProgram(path + 'eve_miss.ptx', 'exception')
optix.SetExceptionProgram(0, exception)
everything.append(exception)
optix.SetStackSize(4096)
self.everything = everything
SkinRaytracingTools.SetOptixMatrixFromTrinity(optix, 'clipToWorld', self.width / float(self.height))
LogInfo('general setup OK', time.clock() - start, 'seconds')
optix.ReportObjectCounts()
start = time.clock()
optix.Compile()
LogInfo('compile OK', time.clock() - start, 'seconds')
start = time.clock()
optix.Validate()
LogInfo('validate OK', time.clock() - start, 'seconds')
start = time.clock()
optix.Run(0, 0, 0)
LogInfo('BVH OK', time.clock() - start, 'seconds')
start = time.clock()
if False:
self.blitfx.SetTexture(outputTexture, self.highpassRT, self.filteredRT)
else:
self.blitfx.SetTexture(outputTexture, outputTexture, outputTexture)
rj = trinity.CreateRenderJob('ShipOptixRenderer')
self.AddCallback(ShipOptixRenderer.RaytraceFrame, 'Raytrace Frame', rj)
rj.SetStdRndStates(trinity.RM_FULLSCREEN).name = 'fullscreen state'
if False:
rj.SetRenderTarget(self.highpassRT.wrappedRenderTarget).name = ' SetRT highpassRT'
rj.RenderEffect(self.blitfx.highpassEffect).name = ' high pass'
rj.SetRenderTarget(self.filteredRT.wrappedRenderTarget).name = ' SetRT filteredRT'
rj.RenderEffect(self.blitfx.gaussianHorizEffect).name = ' horizontal blur'
rj.SetRenderTarget(self.highpassRT.wrappedRenderTarget).name = ' SetRT highpassRT'
rj.RenderEffect(self.blitfx.gaussianVertEffect).name = ' vertical blur'
rj.SetStdRndStates(trinity.RM_FULLSCREEN).name = 'fullscreen state'
rj.RenderEffect(self.blitfx.effect).name = ' blit'
tp2 = None
for job in trinity.renderJobs.recurring:
if job.name == 'TrinityPanel:View1':
tp2 = job
if tp2 is None:
rj.ScheduleRecurring(insertFront=False)
else:
final = None
for step in tp2.steps:
if step.name == 'SET_FINAL_RT':
final = step
break
if final is not None:
tp2.steps.insert(tp2.steps.index(final), trinity.TriStepRunJob(rj))
else:
tp2.steps.append(trinity.TriStepRunJob(rj))
self.renderJob = rj
LogInfo('final setup OK', time.clock() - start, 'seconds')
FullOptixRenderer.EnablePaperDollJobs(False)
def ApplySettings(self):
self.optix.SetFloat('light_size', self.settings['light_size'])
self.optix.SetFloat3('depthOfField', 1.0, self.settings['lens_radius'], 0)
self.optix.SetFloat('HairShadows', self.settings['HairShadows'])
self.optix.SetFloat('EnvMapBoost', self.settings['EnvMapBoost'] / 3.1415927)
self.previousVP.Identity()
def SetLensRadius(self, lens_radius):
self.settings['lens_radius'] = lens_radius
self.ApplySettings()
def SetLensFocalDistance(self, lens_focal_distance):
if lens_focal_distance <= 0:
self.settings.pop('lens_focal_distance', 0)
else:
self.settings['lens_focal_distance'] = lens_focal_distance
self.ApplySettings()
def SetLightSize(self, light_size):
self.settings['light_size'] = light_size
self.ApplySettings()
def SetHairShadowsEnabled(self, enabled):
self.settings['HairShadows'] = float(enabled)
self.ApplySettings()
def SetBackgroundIntensity(self, intensity):
self.settings['EnvMapBoost'] = intensity
self.ApplySettings()
def __init__(self, scene = None, backgroundBitmap = None, memento = None, beardLength = (0.01, 0.01), beardGravity = 0.0005, outputWidth = None, outputHeight = None, asyncSetup = True, listenForUpdate = True):
LogInfo('init', self)
blue.motherLode.maxMemUsage = 0
blue.resMan.ClearAllCachedObjects()
self.framecount = 0
self.listenForUpdate = listenForUpdate
self.everything = None
self.objectsToRefresh = {}
self.objectsToMarkDirty = []
if memento is not None:
self.settings = memento
else:
self.settings = {}
self.settings['light_size'] = 0.125
self.settings['lens_radius'] = 0.001
self.settings['HairShadows'] = 1.0
self.settings['EnvMapBoost'] = 1.0
self.settings['backgroundBitmap'] = backgroundBitmap if backgroundBitmap is not None else 'res:/texture/global/red_blue_ramp.dds'
self.settings['beardLength'] = beardLength
self.settings['beardGravity'] = beardGravity
if outputWidth is not None:
self.settings['outputWidth'] = outputWidth
if outputHeight is not None:
self.settings['outputHeight'] = outputHeight
if asyncSetup:
uthread.new(self._DoInit, scene=scene)
else:
self._DoInit(scene=scene)
def GetMemento(self):
return self.settings
def __del__(self):
LogInfo('deleting', self)
if hasattr(self, 'renderJob'):
self.renderJob.UnscheduleRecurring()
self.renderJob = None
del self.any_hit_shadow
del self.raygen
del self.rayCounter
del self.oit
del self.shBuffer
del self.outputBuffer
del self.everything
del self.objectsToRefresh
del self.objectsToMarkDirty
self.optix.ClearObjects()
LogInfo('Post-cleanup leak check:')
self.optix.ReportObjectCounts()
FullOptixRenderer.EnablePaperDollJobs(True)
def RefreshMatrices(self):
for ship, optixList in self.objectsToRefresh.iteritems():
self.optix.RefreshMatrices(ship, optixList)
for dirty in self.objectsToMarkDirty:
dirty.MarkDirty()
self.ApplySettings()
LogInfo('Refreshed')
@staticmethod
def Pause():
if FullOptixRenderer.instance is not None:
FullOptixRenderer.instance.renderJob.UnscheduleRecurring()
@staticmethod
def NotifyUpdate():
if FullOptixRenderer.instance is not None and FullOptixRenderer.instance.listenForUpdate:
LogInfo('NotifyUpdate, restarting', FullOptixRenderer.instance)
memento = FullOptixRenderer.instance.GetMemento()
FullOptixRenderer.instance = None
FullOptixRenderer.instance = FullOptixRenderer(memento=memento)
| 1 |
937fd6aa7bd21258bd6e0f592d94a966519ef885
|
Python
|
'''
# AWS::Chatbot Construct Library
AWS Chatbot is an AWS service that enables DevOps and software development teams to use Slack chat rooms to monitor and respond to operational events in their AWS Cloud. AWS Chatbot processes AWS service notifications from Amazon Simple Notification Service (Amazon SNS), and forwards them to Slack chat rooms so teams can analyze and act on them immediately, regardless of location.
This module is part of the [AWS Cloud Development Kit](https://github.com/aws/aws-cdk) project.
```python
import aws_cdk.aws_chatbot as chatbot
import aws_cdk.aws_sns as sns
import aws_cdk.aws_iam as iam
slack_channel = chatbot.SlackChannelConfiguration(self, "MySlackChannel",
slack_channel_configuration_name="YOUR_CHANNEL_NAME",
slack_workspace_id="YOUR_SLACK_WORKSPACE_ID",
slack_channel_id="YOUR_SLACK_CHANNEL_ID"
)
slack_channel.add_to_role_policy(iam.PolicyStatement(
effect=iam.Effect.ALLOW,
actions=["s3:GetObject"
],
resources=["arn:aws:s3:::abc/xyz/123.txt"]
))
slack_channel.add_notification_topic(sns.Topic(self, "MyTopic"))
```
## Log Group
Slack channel configuration automatically create a log group with the name `/aws/chatbot/<configuration-name>` in `us-east-1` upon first execution with
log data set to never expire.
The `logRetention` property can be used to set a different expiration period. A log group will be created if not already exists.
If the log group already exists, it's expiration will be configured to the value specified in this construct (never expire, by default).
By default, CDK uses the AWS SDK retry options when interacting with the log group. The `logRetentionRetryOptions` property
allows you to customize the maximum number of retries and base backoff duration.
*Note* that, if `logRetention` is set, a [CloudFormation custom
resource](https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cfn-customresource.html) is added
to the stack that pre-creates the log group as part of the stack deployment, if it already doesn't exist, and sets the
correct log retention period (never expire, by default).
'''
import abc
import builtins
import datetime
import enum
import typing
import jsii
import publication
import typing_extensions
from typeguard import check_type
from .._jsii import *
import constructs
from .. import (
CfnResource as _CfnResource_9df397a6,
Duration as _Duration_4839e8c3,
IInspectable as _IInspectable_c2943556,
IResolvable as _IResolvable_da3f097b,
IResource as _IResource_c80c4260,
Resource as _Resource_45bc6135,
TreeInspector as _TreeInspector_488e0dd5,
)
from ..aws_cloudwatch import (
Metric as _Metric_e396a4dc,
MetricOptions as _MetricOptions_1788b62f,
Unit as _Unit_61bc6f70,
)
from ..aws_codestarnotifications import (
INotificationRuleTarget as _INotificationRuleTarget_faa3b79b,
NotificationRuleTargetConfig as _NotificationRuleTargetConfig_ea27e095,
)
from ..aws_iam import (
IGrantable as _IGrantable_71c4f5de,
IPrincipal as _IPrincipal_539bb2fd,
IRole as _IRole_235f5d8e,
PolicyStatement as _PolicyStatement_0fe33853,
)
from ..aws_logs import (
LogRetentionRetryOptions as _LogRetentionRetryOptions_62d80a14,
RetentionDays as _RetentionDays_070f99f0,
)
from ..aws_sns import ITopic as _ITopic_9eca4852
@jsii.implements(_IInspectable_c2943556)
class CfnSlackChannelConfiguration(
_CfnResource_9df397a6,
metaclass=jsii.JSIIMeta,
jsii_type="aws-cdk-lib.aws_chatbot.CfnSlackChannelConfiguration",
):
'''A CloudFormation ``AWS::Chatbot::SlackChannelConfiguration``.
The ``AWS::Chatbot::SlackChannelConfiguration`` resource configures a Slack channel to allow users to use AWS Chatbot with AWS CloudFormation templates.
This resource requires some setup to be done in the AWS Chatbot console. To provide the required Slack workspace ID, you must perform the initial authorization flow with Slack in the AWS Chatbot console, then copy and paste the workspace ID from the console. For more details, see steps 1-4 in `Setting Up AWS Chatbot with Slack <https://docs.aws.amazon.com/chatbot/latest/adminguide/setting-up.html#Setup_intro>`_ in the *AWS Chatbot User Guide* .
:cloudformationResource: AWS::Chatbot::SlackChannelConfiguration
:link: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-chatbot-slackchannelconfiguration.html
:exampleMetadata: fixture=_generated
Example::
# The code below shows an example of how to instantiate this type.
# The values are placeholders you should change.
from aws_cdk import aws_chatbot as chatbot
cfn_slack_channel_configuration = chatbot.CfnSlackChannelConfiguration(self, "MyCfnSlackChannelConfiguration",
configuration_name="configurationName",
iam_role_arn="iamRoleArn",
slack_channel_id="slackChannelId",
slack_workspace_id="slackWorkspaceId",
# the properties below are optional
guardrail_policies=["guardrailPolicies"],
logging_level="loggingLevel",
sns_topic_arns=["snsTopicArns"],
user_role_required=False
)
'''
def __init__(
self,
scope: constructs.Construct,
id: builtins.str,
*,
configuration_name: builtins.str,
iam_role_arn: builtins.str,
slack_channel_id: builtins.str,
slack_workspace_id: builtins.str,
guardrail_policies: typing.Optional[typing.Sequence[builtins.str]] = None,
logging_level: typing.Optional[builtins.str] = None,
sns_topic_arns: typing.Optional[typing.Sequence[builtins.str]] = None,
user_role_required: typing.Optional[typing.Union[builtins.bool, _IResolvable_da3f097b]] = None,
) -> None:
'''Create a new ``AWS::Chatbot::SlackChannelConfiguration``.
:param scope: - scope in which this resource is defined.
:param id: - scoped id of the resource.
:param configuration_name: The name of the configuration.
:param iam_role_arn: The ARN of the IAM role that defines the permissions for AWS Chatbot . This is a user-definworked role that AWS Chatbot will assume. This is not the service-linked role. For more information, see `IAM Policies for AWS Chatbot <https://docs.aws.amazon.com/chatbot/latest/adminguide/chatbot-iam-policies.html>`_ .
:param slack_channel_id: The ID of the Slack channel. To get the ID, open Slack, right click on the channel name in the left pane, then choose Copy Link. The channel ID is the 9-character string at the end of the URL. For example, ``ABCBBLZZZ`` .
:param slack_workspace_id: The ID of the Slack workspace authorized with AWS Chatbot . To get the workspace ID, you must perform the initial authorization flow with Slack in the AWS Chatbot console. Then you can copy and paste the workspace ID from the console. For more details, see steps 1-4 in `Setting Up AWS Chatbot with Slack <https://docs.aws.amazon.com/chatbot/latest/adminguide/setting-up.html#Setup_intro>`_ in the *AWS Chatbot User Guide* .
:param guardrail_policies: The list of IAM policy ARNs that are applied as channel guardrails. The AWS managed 'AdministratorAccess' policy is applied as a default if this is not set.
:param logging_level: Specifies the logging level for this configuration. This property affects the log entries pushed to Amazon CloudWatch Logs. Logging levels include ``ERROR`` , ``INFO`` , or ``NONE`` .
:param sns_topic_arns: The ARNs of the SNS topics that deliver notifications to AWS Chatbot .
:param user_role_required: Enables use of a user role requirement in your chat configuration.
'''
if __debug__:
type_hints = typing.get_type_hints(CfnSlackChannelConfiguration.__init__)
check_type(argname="argument scope", value=scope, expected_type=type_hints["scope"])
check_type(argname="argument id", value=id, expected_type=type_hints["id"])
props = CfnSlackChannelConfigurationProps(
configuration_name=configuration_name,
iam_role_arn=iam_role_arn,
slack_channel_id=slack_channel_id,
slack_workspace_id=slack_workspace_id,
guardrail_policies=guardrail_policies,
logging_level=logging_level,
sns_topic_arns=sns_topic_arns,
user_role_required=user_role_required,
)
jsii.create(self.__class__, self, [scope, id, props])
@jsii.member(jsii_name="inspect")
def inspect(self, inspector: _TreeInspector_488e0dd5) -> None:
'''Examines the CloudFormation resource and discloses attributes.
:param inspector: - tree inspector to collect and process attributes.
'''
if __debug__:
type_hints = typing.get_type_hints(CfnSlackChannelConfiguration.inspect)
check_type(argname="argument inspector", value=inspector, expected_type=type_hints["inspector"])
return typing.cast(None, jsii.invoke(self, "inspect", [inspector]))
@jsii.member(jsii_name="renderProperties")
def _render_properties(
self,
props: typing.Mapping[builtins.str, typing.Any],
) -> typing.Mapping[builtins.str, typing.Any]:
'''
:param props: -
'''
if __debug__:
type_hints = typing.get_type_hints(CfnSlackChannelConfiguration._render_properties)
check_type(argname="argument props", value=props, expected_type=type_hints["props"])
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.invoke(self, "renderProperties", [props]))
@jsii.python.classproperty # type: ignore[misc]
@jsii.member(jsii_name="CFN_RESOURCE_TYPE_NAME")
def CFN_RESOURCE_TYPE_NAME(cls) -> builtins.str:
'''The CloudFormation resource type name for this resource class.'''
return typing.cast(builtins.str, jsii.sget(cls, "CFN_RESOURCE_TYPE_NAME"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrArn")
def attr_arn(self) -> builtins.str:
'''
:cloudformationAttribute: Arn
'''
return typing.cast(builtins.str, jsii.get(self, "attrArn"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="cfnProperties")
def _cfn_properties(self) -> typing.Mapping[builtins.str, typing.Any]:
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.get(self, "cfnProperties"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="configurationName")
def configuration_name(self) -> builtins.str:
'''The name of the configuration.
:link: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-chatbot-slackchannelconfiguration.html#cfn-chatbot-slackchannelconfiguration-configurationname
'''
return typing.cast(builtins.str, jsii.get(self, "configurationName"))
@configuration_name.setter
def configuration_name(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(getattr(CfnSlackChannelConfiguration, "configuration_name").fset)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "configurationName", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="iamRoleArn")
def iam_role_arn(self) -> builtins.str:
'''The ARN of the IAM role that defines the permissions for AWS Chatbot .
This is a user-definworked role that AWS Chatbot will assume. This is not the service-linked role. For more information, see `IAM Policies for AWS Chatbot <https://docs.aws.amazon.com/chatbot/latest/adminguide/chatbot-iam-policies.html>`_ .
:link: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-chatbot-slackchannelconfiguration.html#cfn-chatbot-slackchannelconfiguration-iamrolearn
'''
return typing.cast(builtins.str, jsii.get(self, "iamRoleArn"))
@iam_role_arn.setter
def iam_role_arn(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(getattr(CfnSlackChannelConfiguration, "iam_role_arn").fset)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "iamRoleArn", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="slackChannelId")
def slack_channel_id(self) -> builtins.str:
'''The ID of the Slack channel.
To get the ID, open Slack, right click on the channel name in the left pane, then choose Copy Link. The channel ID is the 9-character string at the end of the URL. For example, ``ABCBBLZZZ`` .
:link: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-chatbot-slackchannelconfiguration.html#cfn-chatbot-slackchannelconfiguration-slackchannelid
'''
return typing.cast(builtins.str, jsii.get(self, "slackChannelId"))
@slack_channel_id.setter
def slack_channel_id(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(getattr(CfnSlackChannelConfiguration, "slack_channel_id").fset)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "slackChannelId", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="slackWorkspaceId")
def slack_workspace_id(self) -> builtins.str:
'''The ID of the Slack workspace authorized with AWS Chatbot .
To get the workspace ID, you must perform the initial authorization flow with Slack in the AWS Chatbot console. Then you can copy and paste the workspace ID from the console. For more details, see steps 1-4 in `Setting Up AWS Chatbot with Slack <https://docs.aws.amazon.com/chatbot/latest/adminguide/setting-up.html#Setup_intro>`_ in the *AWS Chatbot User Guide* .
:link: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-chatbot-slackchannelconfiguration.html#cfn-chatbot-slackchannelconfiguration-slackworkspaceid
'''
return typing.cast(builtins.str, jsii.get(self, "slackWorkspaceId"))
@slack_workspace_id.setter
def slack_workspace_id(self, value: builtins.str) -> None:
if __debug__:
type_hints = typing.get_type_hints(getattr(CfnSlackChannelConfiguration, "slack_workspace_id").fset)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "slackWorkspaceId", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="guardrailPolicies")
def guardrail_policies(self) -> typing.Optional[typing.List[builtins.str]]:
'''The list of IAM policy ARNs that are applied as channel guardrails.
The AWS managed 'AdministratorAccess' policy is applied as a default if this is not set.
:link: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-chatbot-slackchannelconfiguration.html#cfn-chatbot-slackchannelconfiguration-guardrailpolicies
'''
return typing.cast(typing.Optional[typing.List[builtins.str]], jsii.get(self, "guardrailPolicies"))
@guardrail_policies.setter
def guardrail_policies(
self,
value: typing.Optional[typing.List[builtins.str]],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(getattr(CfnSlackChannelConfiguration, "guardrail_policies").fset)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "guardrailPolicies", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="loggingLevel")
def logging_level(self) -> typing.Optional[builtins.str]:
'''Specifies the logging level for this configuration. This property affects the log entries pushed to Amazon CloudWatch Logs.
Logging levels include ``ERROR`` , ``INFO`` , or ``NONE`` .
:link: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-chatbot-slackchannelconfiguration.html#cfn-chatbot-slackchannelconfiguration-logginglevel
'''
return typing.cast(typing.Optional[builtins.str], jsii.get(self, "loggingLevel"))
@logging_level.setter
def logging_level(self, value: typing.Optional[builtins.str]) -> None:
if __debug__:
type_hints = typing.get_type_hints(getattr(CfnSlackChannelConfiguration, "logging_level").fset)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "loggingLevel", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="snsTopicArns")
def sns_topic_arns(self) -> typing.Optional[typing.List[builtins.str]]:
'''The ARNs of the SNS topics that deliver notifications to AWS Chatbot .
:link: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-chatbot-slackchannelconfiguration.html#cfn-chatbot-slackchannelconfiguration-snstopicarns
'''
return typing.cast(typing.Optional[typing.List[builtins.str]], jsii.get(self, "snsTopicArns"))
@sns_topic_arns.setter
def sns_topic_arns(self, value: typing.Optional[typing.List[builtins.str]]) -> None:
if __debug__:
type_hints = typing.get_type_hints(getattr(CfnSlackChannelConfiguration, "sns_topic_arns").fset)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "snsTopicArns", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="userRoleRequired")
def user_role_required(
self,
) -> typing.Optional[typing.Union[builtins.bool, _IResolvable_da3f097b]]:
'''Enables use of a user role requirement in your chat configuration.
:link: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-chatbot-slackchannelconfiguration.html#cfn-chatbot-slackchannelconfiguration-userrolerequired
'''
return typing.cast(typing.Optional[typing.Union[builtins.bool, _IResolvable_da3f097b]], jsii.get(self, "userRoleRequired"))
@user_role_required.setter
def user_role_required(
self,
value: typing.Optional[typing.Union[builtins.bool, _IResolvable_da3f097b]],
) -> None:
if __debug__:
type_hints = typing.get_type_hints(getattr(CfnSlackChannelConfiguration, "user_role_required").fset)
check_type(argname="argument value", value=value, expected_type=type_hints["value"])
jsii.set(self, "userRoleRequired", value)
@jsii.data_type(
jsii_type="aws-cdk-lib.aws_chatbot.CfnSlackChannelConfigurationProps",
jsii_struct_bases=[],
name_mapping={
"configuration_name": "configurationName",
"iam_role_arn": "iamRoleArn",
"slack_channel_id": "slackChannelId",
"slack_workspace_id": "slackWorkspaceId",
"guardrail_policies": "guardrailPolicies",
"logging_level": "loggingLevel",
"sns_topic_arns": "snsTopicArns",
"user_role_required": "userRoleRequired",
},
)
class CfnSlackChannelConfigurationProps:
def __init__(
self,
*,
configuration_name: builtins.str,
iam_role_arn: builtins.str,
slack_channel_id: builtins.str,
slack_workspace_id: builtins.str,
guardrail_policies: typing.Optional[typing.Sequence[builtins.str]] = None,
logging_level: typing.Optional[builtins.str] = None,
sns_topic_arns: typing.Optional[typing.Sequence[builtins.str]] = None,
user_role_required: typing.Optional[typing.Union[builtins.bool, _IResolvable_da3f097b]] = None,
) -> None:
'''Properties for defining a ``CfnSlackChannelConfiguration``.
:param configuration_name: The name of the configuration.
:param iam_role_arn: The ARN of the IAM role that defines the permissions for AWS Chatbot . This is a user-definworked role that AWS Chatbot will assume. This is not the service-linked role. For more information, see `IAM Policies for AWS Chatbot <https://docs.aws.amazon.com/chatbot/latest/adminguide/chatbot-iam-policies.html>`_ .
:param slack_channel_id: The ID of the Slack channel. To get the ID, open Slack, right click on the channel name in the left pane, then choose Copy Link. The channel ID is the 9-character string at the end of the URL. For example, ``ABCBBLZZZ`` .
:param slack_workspace_id: The ID of the Slack workspace authorized with AWS Chatbot . To get the workspace ID, you must perform the initial authorization flow with Slack in the AWS Chatbot console. Then you can copy and paste the workspace ID from the console. For more details, see steps 1-4 in `Setting Up AWS Chatbot with Slack <https://docs.aws.amazon.com/chatbot/latest/adminguide/setting-up.html#Setup_intro>`_ in the *AWS Chatbot User Guide* .
:param guardrail_policies: The list of IAM policy ARNs that are applied as channel guardrails. The AWS managed 'AdministratorAccess' policy is applied as a default if this is not set.
:param logging_level: Specifies the logging level for this configuration. This property affects the log entries pushed to Amazon CloudWatch Logs. Logging levels include ``ERROR`` , ``INFO`` , or ``NONE`` .
:param sns_topic_arns: The ARNs of the SNS topics that deliver notifications to AWS Chatbot .
:param user_role_required: Enables use of a user role requirement in your chat configuration.
:link: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-chatbot-slackchannelconfiguration.html
:exampleMetadata: fixture=_generated
Example::
# The code below shows an example of how to instantiate this type.
# The values are placeholders you should change.
from aws_cdk import aws_chatbot as chatbot
cfn_slack_channel_configuration_props = chatbot.CfnSlackChannelConfigurationProps(
configuration_name="configurationName",
iam_role_arn="iamRoleArn",
slack_channel_id="slackChannelId",
slack_workspace_id="slackWorkspaceId",
# the properties below are optional
guardrail_policies=["guardrailPolicies"],
logging_level="loggingLevel",
sns_topic_arns=["snsTopicArns"],
user_role_required=False
)
'''
if __debug__:
type_hints = typing.get_type_hints(CfnSlackChannelConfigurationProps.__init__)
check_type(argname="argument configuration_name", value=configuration_name, expected_type=type_hints["configuration_name"])
check_type(argname="argument iam_role_arn", value=iam_role_arn, expected_type=type_hints["iam_role_arn"])
check_type(argname="argument slack_channel_id", value=slack_channel_id, expected_type=type_hints["slack_channel_id"])
check_type(argname="argument slack_workspace_id", value=slack_workspace_id, expected_type=type_hints["slack_workspace_id"])
check_type(argname="argument guardrail_policies", value=guardrail_policies, expected_type=type_hints["guardrail_policies"])
check_type(argname="argument logging_level", value=logging_level, expected_type=type_hints["logging_level"])
check_type(argname="argument sns_topic_arns", value=sns_topic_arns, expected_type=type_hints["sns_topic_arns"])
check_type(argname="argument user_role_required", value=user_role_required, expected_type=type_hints["user_role_required"])
self._values: typing.Dict[str, typing.Any] = {
"configuration_name": configuration_name,
"iam_role_arn": iam_role_arn,
"slack_channel_id": slack_channel_id,
"slack_workspace_id": slack_workspace_id,
}
if guardrail_policies is not None:
self._values["guardrail_policies"] = guardrail_policies
if logging_level is not None:
self._values["logging_level"] = logging_level
if sns_topic_arns is not None:
self._values["sns_topic_arns"] = sns_topic_arns
if user_role_required is not None:
self._values["user_role_required"] = user_role_required
@builtins.property
def configuration_name(self) -> builtins.str:
'''The name of the configuration.
:link: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-chatbot-slackchannelconfiguration.html#cfn-chatbot-slackchannelconfiguration-configurationname
'''
result = self._values.get("configuration_name")
assert result is not None, "Required property 'configuration_name' is missing"
return typing.cast(builtins.str, result)
@builtins.property
def iam_role_arn(self) -> builtins.str:
'''The ARN of the IAM role that defines the permissions for AWS Chatbot .
This is a user-definworked role that AWS Chatbot will assume. This is not the service-linked role. For more information, see `IAM Policies for AWS Chatbot <https://docs.aws.amazon.com/chatbot/latest/adminguide/chatbot-iam-policies.html>`_ .
:link: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-chatbot-slackchannelconfiguration.html#cfn-chatbot-slackchannelconfiguration-iamrolearn
'''
result = self._values.get("iam_role_arn")
assert result is not None, "Required property 'iam_role_arn' is missing"
return typing.cast(builtins.str, result)
@builtins.property
def slack_channel_id(self) -> builtins.str:
'''The ID of the Slack channel.
To get the ID, open Slack, right click on the channel name in the left pane, then choose Copy Link. The channel ID is the 9-character string at the end of the URL. For example, ``ABCBBLZZZ`` .
:link: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-chatbot-slackchannelconfiguration.html#cfn-chatbot-slackchannelconfiguration-slackchannelid
'''
result = self._values.get("slack_channel_id")
assert result is not None, "Required property 'slack_channel_id' is missing"
return typing.cast(builtins.str, result)
@builtins.property
def slack_workspace_id(self) -> builtins.str:
'''The ID of the Slack workspace authorized with AWS Chatbot .
To get the workspace ID, you must perform the initial authorization flow with Slack in the AWS Chatbot console. Then you can copy and paste the workspace ID from the console. For more details, see steps 1-4 in `Setting Up AWS Chatbot with Slack <https://docs.aws.amazon.com/chatbot/latest/adminguide/setting-up.html#Setup_intro>`_ in the *AWS Chatbot User Guide* .
:link: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-chatbot-slackchannelconfiguration.html#cfn-chatbot-slackchannelconfiguration-slackworkspaceid
'''
result = self._values.get("slack_workspace_id")
assert result is not None, "Required property 'slack_workspace_id' is missing"
return typing.cast(builtins.str, result)
@builtins.property
def guardrail_policies(self) -> typing.Optional[typing.List[builtins.str]]:
'''The list of IAM policy ARNs that are applied as channel guardrails.
The AWS managed 'AdministratorAccess' policy is applied as a default if this is not set.
:link: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-chatbot-slackchannelconfiguration.html#cfn-chatbot-slackchannelconfiguration-guardrailpolicies
'''
result = self._values.get("guardrail_policies")
return typing.cast(typing.Optional[typing.List[builtins.str]], result)
@builtins.property
def logging_level(self) -> typing.Optional[builtins.str]:
'''Specifies the logging level for this configuration. This property affects the log entries pushed to Amazon CloudWatch Logs.
Logging levels include ``ERROR`` , ``INFO`` , or ``NONE`` .
:link: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-chatbot-slackchannelconfiguration.html#cfn-chatbot-slackchannelconfiguration-logginglevel
'''
result = self._values.get("logging_level")
return typing.cast(typing.Optional[builtins.str], result)
@builtins.property
def sns_topic_arns(self) -> typing.Optional[typing.List[builtins.str]]:
'''The ARNs of the SNS topics that deliver notifications to AWS Chatbot .
:link: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-chatbot-slackchannelconfiguration.html#cfn-chatbot-slackchannelconfiguration-snstopicarns
'''
result = self._values.get("sns_topic_arns")
return typing.cast(typing.Optional[typing.List[builtins.str]], result)
@builtins.property
def user_role_required(
self,
) -> typing.Optional[typing.Union[builtins.bool, _IResolvable_da3f097b]]:
'''Enables use of a user role requirement in your chat configuration.
:link: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-chatbot-slackchannelconfiguration.html#cfn-chatbot-slackchannelconfiguration-userrolerequired
'''
result = self._values.get("user_role_required")
return typing.cast(typing.Optional[typing.Union[builtins.bool, _IResolvable_da3f097b]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "CfnSlackChannelConfigurationProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.interface(jsii_type="aws-cdk-lib.aws_chatbot.ISlackChannelConfiguration")
class ISlackChannelConfiguration(
_IResource_c80c4260,
_IGrantable_71c4f5de,
_INotificationRuleTarget_faa3b79b,
typing_extensions.Protocol,
):
'''Represents a Slack channel configuration.'''
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="slackChannelConfigurationArn")
def slack_channel_configuration_arn(self) -> builtins.str:
'''The ARN of the Slack channel configuration In the form of arn:aws:chatbot:{region}:{account}:chat-configuration/slack-channel/{slackChannelName}.
:attribute: true
'''
...
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="slackChannelConfigurationName")
def slack_channel_configuration_name(self) -> builtins.str:
'''The name of Slack channel configuration.
:attribute: true
'''
...
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="role")
def role(self) -> typing.Optional[_IRole_235f5d8e]:
'''The permission role of Slack channel configuration.
:default: - A role will be created.
:attribute: true
'''
...
@jsii.member(jsii_name="addToRolePolicy")
def add_to_role_policy(self, statement: _PolicyStatement_0fe33853) -> None:
'''Adds a statement to the IAM role.
:param statement: -
'''
...
@jsii.member(jsii_name="metric")
def metric(
self,
metric_name: builtins.str,
*,
account: typing.Optional[builtins.str] = None,
color: typing.Optional[builtins.str] = None,
dimensions_map: typing.Optional[typing.Mapping[builtins.str, builtins.str]] = None,
label: typing.Optional[builtins.str] = None,
period: typing.Optional[_Duration_4839e8c3] = None,
region: typing.Optional[builtins.str] = None,
statistic: typing.Optional[builtins.str] = None,
unit: typing.Optional[_Unit_61bc6f70] = None,
) -> _Metric_e396a4dc:
'''Return the given named metric for this SlackChannelConfiguration.
:param metric_name: -
:param account: Account which this metric comes from. Default: - Deployment account.
:param color: The hex color code, prefixed with '#' (e.g. '#00ff00'), to use when this metric is rendered on a graph. The ``Color`` class has a set of standard colors that can be used here. Default: - Automatic color
:param dimensions_map: Dimensions of the metric. Default: - No dimensions.
:param label: Label for this metric when added to a Graph in a Dashboard. You can use `dynamic labels <https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/graph-dynamic-labels.html>`_ to show summary information about the entire displayed time series in the legend. For example, if you use:: [max: ${MAX}] MyMetric As the metric label, the maximum value in the visible range will be shown next to the time series name in the graph's legend. Default: - No label
:param period: The period over which the specified statistic is applied. Default: Duration.minutes(5)
:param region: Region which this metric comes from. Default: - Deployment region.
:param statistic: What function to use for aggregating. Can be one of the following: - "Minimum" | "min" - "Maximum" | "max" - "Average" | "avg" - "Sum" | "sum" - "SampleCount | "n" - "pNN.NN" Default: Average
:param unit: Unit used to filter the metric stream. Only refer to datums emitted to the metric stream with the given unit and ignore all others. Only useful when datums are being emitted to the same metric stream under different units. The default is to use all matric datums in the stream, regardless of unit, which is recommended in nearly all cases. CloudWatch does not honor this property for graphs. Default: - All metric datums in the given metric stream
'''
...
class _ISlackChannelConfigurationProxy(
jsii.proxy_for(_IResource_c80c4260), # type: ignore[misc]
jsii.proxy_for(_IGrantable_71c4f5de), # type: ignore[misc]
jsii.proxy_for(_INotificationRuleTarget_faa3b79b), # type: ignore[misc]
):
'''Represents a Slack channel configuration.'''
__jsii_type__: typing.ClassVar[str] = "aws-cdk-lib.aws_chatbot.ISlackChannelConfiguration"
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="slackChannelConfigurationArn")
def slack_channel_configuration_arn(self) -> builtins.str:
'''The ARN of the Slack channel configuration In the form of arn:aws:chatbot:{region}:{account}:chat-configuration/slack-channel/{slackChannelName}.
:attribute: true
'''
return typing.cast(builtins.str, jsii.get(self, "slackChannelConfigurationArn"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="slackChannelConfigurationName")
def slack_channel_configuration_name(self) -> builtins.str:
'''The name of Slack channel configuration.
:attribute: true
'''
return typing.cast(builtins.str, jsii.get(self, "slackChannelConfigurationName"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="role")
def role(self) -> typing.Optional[_IRole_235f5d8e]:
'''The permission role of Slack channel configuration.
:default: - A role will be created.
:attribute: true
'''
return typing.cast(typing.Optional[_IRole_235f5d8e], jsii.get(self, "role"))
@jsii.member(jsii_name="addToRolePolicy")
def add_to_role_policy(self, statement: _PolicyStatement_0fe33853) -> None:
'''Adds a statement to the IAM role.
:param statement: -
'''
if __debug__:
type_hints = typing.get_type_hints(ISlackChannelConfiguration.add_to_role_policy)
check_type(argname="argument statement", value=statement, expected_type=type_hints["statement"])
return typing.cast(None, jsii.invoke(self, "addToRolePolicy", [statement]))
@jsii.member(jsii_name="metric")
def metric(
self,
metric_name: builtins.str,
*,
account: typing.Optional[builtins.str] = None,
color: typing.Optional[builtins.str] = None,
dimensions_map: typing.Optional[typing.Mapping[builtins.str, builtins.str]] = None,
label: typing.Optional[builtins.str] = None,
period: typing.Optional[_Duration_4839e8c3] = None,
region: typing.Optional[builtins.str] = None,
statistic: typing.Optional[builtins.str] = None,
unit: typing.Optional[_Unit_61bc6f70] = None,
) -> _Metric_e396a4dc:
'''Return the given named metric for this SlackChannelConfiguration.
:param metric_name: -
:param account: Account which this metric comes from. Default: - Deployment account.
:param color: The hex color code, prefixed with '#' (e.g. '#00ff00'), to use when this metric is rendered on a graph. The ``Color`` class has a set of standard colors that can be used here. Default: - Automatic color
:param dimensions_map: Dimensions of the metric. Default: - No dimensions.
:param label: Label for this metric when added to a Graph in a Dashboard. You can use `dynamic labels <https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/graph-dynamic-labels.html>`_ to show summary information about the entire displayed time series in the legend. For example, if you use:: [max: ${MAX}] MyMetric As the metric label, the maximum value in the visible range will be shown next to the time series name in the graph's legend. Default: - No label
:param period: The period over which the specified statistic is applied. Default: Duration.minutes(5)
:param region: Region which this metric comes from. Default: - Deployment region.
:param statistic: What function to use for aggregating. Can be one of the following: - "Minimum" | "min" - "Maximum" | "max" - "Average" | "avg" - "Sum" | "sum" - "SampleCount | "n" - "pNN.NN" Default: Average
:param unit: Unit used to filter the metric stream. Only refer to datums emitted to the metric stream with the given unit and ignore all others. Only useful when datums are being emitted to the same metric stream under different units. The default is to use all matric datums in the stream, regardless of unit, which is recommended in nearly all cases. CloudWatch does not honor this property for graphs. Default: - All metric datums in the given metric stream
'''
if __debug__:
type_hints = typing.get_type_hints(ISlackChannelConfiguration.metric)
check_type(argname="argument metric_name", value=metric_name, expected_type=type_hints["metric_name"])
props = _MetricOptions_1788b62f(
account=account,
color=color,
dimensions_map=dimensions_map,
label=label,
period=period,
region=region,
statistic=statistic,
unit=unit,
)
return typing.cast(_Metric_e396a4dc, jsii.invoke(self, "metric", [metric_name, props]))
# Adding a "__jsii_proxy_class__(): typing.Type" function to the interface
typing.cast(typing.Any, ISlackChannelConfiguration).__jsii_proxy_class__ = lambda : _ISlackChannelConfigurationProxy
@jsii.enum(jsii_type="aws-cdk-lib.aws_chatbot.LoggingLevel")
class LoggingLevel(enum.Enum):
'''Logging levels include ERROR, INFO, or NONE.'''
ERROR = "ERROR"
'''ERROR.'''
INFO = "INFO"
'''INFO.'''
NONE = "NONE"
'''NONE.'''
@jsii.implements(ISlackChannelConfiguration)
class SlackChannelConfiguration(
_Resource_45bc6135,
metaclass=jsii.JSIIMeta,
jsii_type="aws-cdk-lib.aws_chatbot.SlackChannelConfiguration",
):
'''A new Slack channel configuration.
:exampleMetadata: infused
Example::
import aws_cdk.aws_chatbot as chatbot
# project: codebuild.Project
target = chatbot.SlackChannelConfiguration(self, "MySlackChannel",
slack_channel_configuration_name="YOUR_CHANNEL_NAME",
slack_workspace_id="YOUR_SLACK_WORKSPACE_ID",
slack_channel_id="YOUR_SLACK_CHANNEL_ID"
)
rule = project.notify_on_build_succeeded("NotifyOnBuildSucceeded", target)
'''
def __init__(
self,
scope: constructs.Construct,
id: builtins.str,
*,
slack_channel_configuration_name: builtins.str,
slack_channel_id: builtins.str,
slack_workspace_id: builtins.str,
logging_level: typing.Optional[LoggingLevel] = None,
log_retention: typing.Optional[_RetentionDays_070f99f0] = None,
log_retention_retry_options: typing.Optional[_LogRetentionRetryOptions_62d80a14] = None,
log_retention_role: typing.Optional[_IRole_235f5d8e] = None,
notification_topics: typing.Optional[typing.Sequence[_ITopic_9eca4852]] = None,
role: typing.Optional[_IRole_235f5d8e] = None,
) -> None:
'''
:param scope: -
:param id: -
:param slack_channel_configuration_name: The name of Slack channel configuration.
:param slack_channel_id: The ID of the Slack channel. To get the ID, open Slack, right click on the channel name in the left pane, then choose Copy Link. The channel ID is the 9-character string at the end of the URL. For example, ABCBBLZZZ.
:param slack_workspace_id: The ID of the Slack workspace authorized with AWS Chatbot. To get the workspace ID, you must perform the initial authorization flow with Slack in the AWS Chatbot console. Then you can copy and paste the workspace ID from the console. For more details, see steps 1-4 in Setting Up AWS Chatbot with Slack in the AWS Chatbot User Guide.
:param logging_level: Specifies the logging level for this configuration. This property affects the log entries pushed to Amazon CloudWatch Logs. Default: LoggingLevel.NONE
:param log_retention: The number of days log events are kept in CloudWatch Logs. When updating this property, unsetting it doesn't remove the log retention policy. To remove the retention policy, set the value to ``INFINITE``. Default: logs.RetentionDays.INFINITE
:param log_retention_retry_options: When log retention is specified, a custom resource attempts to create the CloudWatch log group. These options control the retry policy when interacting with CloudWatch APIs. Default: - Default AWS SDK retry options.
:param log_retention_role: The IAM role for the Lambda function associated with the custom resource that sets the retention policy. Default: - A new role is created.
:param notification_topics: The SNS topics that deliver notifications to AWS Chatbot. Default: None
:param role: The permission role of Slack channel configuration. Default: - A role will be created.
'''
if __debug__:
type_hints = typing.get_type_hints(SlackChannelConfiguration.__init__)
check_type(argname="argument scope", value=scope, expected_type=type_hints["scope"])
check_type(argname="argument id", value=id, expected_type=type_hints["id"])
props = SlackChannelConfigurationProps(
slack_channel_configuration_name=slack_channel_configuration_name,
slack_channel_id=slack_channel_id,
slack_workspace_id=slack_workspace_id,
logging_level=logging_level,
log_retention=log_retention,
log_retention_retry_options=log_retention_retry_options,
log_retention_role=log_retention_role,
notification_topics=notification_topics,
role=role,
)
jsii.create(self.__class__, self, [scope, id, props])
@jsii.member(jsii_name="fromSlackChannelConfigurationArn") # type: ignore[misc]
@builtins.classmethod
def from_slack_channel_configuration_arn(
cls,
scope: constructs.Construct,
id: builtins.str,
slack_channel_configuration_arn: builtins.str,
) -> ISlackChannelConfiguration:
'''Import an existing Slack channel configuration provided an ARN.
:param scope: The parent creating construct.
:param id: The construct's name.
:param slack_channel_configuration_arn: configuration ARN (i.e. arn:aws:chatbot::1234567890:chat-configuration/slack-channel/my-slack).
:return: a reference to the existing Slack channel configuration
'''
if __debug__:
type_hints = typing.get_type_hints(SlackChannelConfiguration.from_slack_channel_configuration_arn)
check_type(argname="argument scope", value=scope, expected_type=type_hints["scope"])
check_type(argname="argument id", value=id, expected_type=type_hints["id"])
check_type(argname="argument slack_channel_configuration_arn", value=slack_channel_configuration_arn, expected_type=type_hints["slack_channel_configuration_arn"])
return typing.cast(ISlackChannelConfiguration, jsii.sinvoke(cls, "fromSlackChannelConfigurationArn", [scope, id, slack_channel_configuration_arn]))
@jsii.member(jsii_name="metricAll") # type: ignore[misc]
@builtins.classmethod
def metric_all(
cls,
metric_name: builtins.str,
*,
account: typing.Optional[builtins.str] = None,
color: typing.Optional[builtins.str] = None,
dimensions_map: typing.Optional[typing.Mapping[builtins.str, builtins.str]] = None,
label: typing.Optional[builtins.str] = None,
period: typing.Optional[_Duration_4839e8c3] = None,
region: typing.Optional[builtins.str] = None,
statistic: typing.Optional[builtins.str] = None,
unit: typing.Optional[_Unit_61bc6f70] = None,
) -> _Metric_e396a4dc:
'''Return the given named metric for All SlackChannelConfigurations.
:param metric_name: -
:param account: Account which this metric comes from. Default: - Deployment account.
:param color: The hex color code, prefixed with '#' (e.g. '#00ff00'), to use when this metric is rendered on a graph. The ``Color`` class has a set of standard colors that can be used here. Default: - Automatic color
:param dimensions_map: Dimensions of the metric. Default: - No dimensions.
:param label: Label for this metric when added to a Graph in a Dashboard. You can use `dynamic labels <https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/graph-dynamic-labels.html>`_ to show summary information about the entire displayed time series in the legend. For example, if you use:: [max: ${MAX}] MyMetric As the metric label, the maximum value in the visible range will be shown next to the time series name in the graph's legend. Default: - No label
:param period: The period over which the specified statistic is applied. Default: Duration.minutes(5)
:param region: Region which this metric comes from. Default: - Deployment region.
:param statistic: What function to use for aggregating. Can be one of the following: - "Minimum" | "min" - "Maximum" | "max" - "Average" | "avg" - "Sum" | "sum" - "SampleCount | "n" - "pNN.NN" Default: Average
:param unit: Unit used to filter the metric stream. Only refer to datums emitted to the metric stream with the given unit and ignore all others. Only useful when datums are being emitted to the same metric stream under different units. The default is to use all matric datums in the stream, regardless of unit, which is recommended in nearly all cases. CloudWatch does not honor this property for graphs. Default: - All metric datums in the given metric stream
'''
if __debug__:
type_hints = typing.get_type_hints(SlackChannelConfiguration.metric_all)
check_type(argname="argument metric_name", value=metric_name, expected_type=type_hints["metric_name"])
props = _MetricOptions_1788b62f(
account=account,
color=color,
dimensions_map=dimensions_map,
label=label,
period=period,
region=region,
statistic=statistic,
unit=unit,
)
return typing.cast(_Metric_e396a4dc, jsii.sinvoke(cls, "metricAll", [metric_name, props]))
@jsii.member(jsii_name="addNotificationTopic")
def add_notification_topic(self, notification_topic: _ITopic_9eca4852) -> None:
'''Adds a SNS topic that deliver notifications to AWS Chatbot.
:param notification_topic: -
'''
if __debug__:
type_hints = typing.get_type_hints(SlackChannelConfiguration.add_notification_topic)
check_type(argname="argument notification_topic", value=notification_topic, expected_type=type_hints["notification_topic"])
return typing.cast(None, jsii.invoke(self, "addNotificationTopic", [notification_topic]))
@jsii.member(jsii_name="addToRolePolicy")
def add_to_role_policy(self, statement: _PolicyStatement_0fe33853) -> None:
'''Adds extra permission to iam-role of Slack channel configuration.
:param statement: -
'''
if __debug__:
type_hints = typing.get_type_hints(SlackChannelConfiguration.add_to_role_policy)
check_type(argname="argument statement", value=statement, expected_type=type_hints["statement"])
return typing.cast(None, jsii.invoke(self, "addToRolePolicy", [statement]))
@jsii.member(jsii_name="bindAsNotificationRuleTarget")
def bind_as_notification_rule_target(
self,
_scope: constructs.Construct,
) -> _NotificationRuleTargetConfig_ea27e095:
'''Returns a target configuration for notification rule.
:param _scope: -
'''
if __debug__:
type_hints = typing.get_type_hints(SlackChannelConfiguration.bind_as_notification_rule_target)
check_type(argname="argument _scope", value=_scope, expected_type=type_hints["_scope"])
return typing.cast(_NotificationRuleTargetConfig_ea27e095, jsii.invoke(self, "bindAsNotificationRuleTarget", [_scope]))
@jsii.member(jsii_name="metric")
def metric(
self,
metric_name: builtins.str,
*,
account: typing.Optional[builtins.str] = None,
color: typing.Optional[builtins.str] = None,
dimensions_map: typing.Optional[typing.Mapping[builtins.str, builtins.str]] = None,
label: typing.Optional[builtins.str] = None,
period: typing.Optional[_Duration_4839e8c3] = None,
region: typing.Optional[builtins.str] = None,
statistic: typing.Optional[builtins.str] = None,
unit: typing.Optional[_Unit_61bc6f70] = None,
) -> _Metric_e396a4dc:
'''Return the given named metric for this SlackChannelConfiguration.
:param metric_name: -
:param account: Account which this metric comes from. Default: - Deployment account.
:param color: The hex color code, prefixed with '#' (e.g. '#00ff00'), to use when this metric is rendered on a graph. The ``Color`` class has a set of standard colors that can be used here. Default: - Automatic color
:param dimensions_map: Dimensions of the metric. Default: - No dimensions.
:param label: Label for this metric when added to a Graph in a Dashboard. You can use `dynamic labels <https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/graph-dynamic-labels.html>`_ to show summary information about the entire displayed time series in the legend. For example, if you use:: [max: ${MAX}] MyMetric As the metric label, the maximum value in the visible range will be shown next to the time series name in the graph's legend. Default: - No label
:param period: The period over which the specified statistic is applied. Default: Duration.minutes(5)
:param region: Region which this metric comes from. Default: - Deployment region.
:param statistic: What function to use for aggregating. Can be one of the following: - "Minimum" | "min" - "Maximum" | "max" - "Average" | "avg" - "Sum" | "sum" - "SampleCount | "n" - "pNN.NN" Default: Average
:param unit: Unit used to filter the metric stream. Only refer to datums emitted to the metric stream with the given unit and ignore all others. Only useful when datums are being emitted to the same metric stream under different units. The default is to use all matric datums in the stream, regardless of unit, which is recommended in nearly all cases. CloudWatch does not honor this property for graphs. Default: - All metric datums in the given metric stream
'''
if __debug__:
type_hints = typing.get_type_hints(SlackChannelConfiguration.metric)
check_type(argname="argument metric_name", value=metric_name, expected_type=type_hints["metric_name"])
props = _MetricOptions_1788b62f(
account=account,
color=color,
dimensions_map=dimensions_map,
label=label,
period=period,
region=region,
statistic=statistic,
unit=unit,
)
return typing.cast(_Metric_e396a4dc, jsii.invoke(self, "metric", [metric_name, props]))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="grantPrincipal")
def grant_principal(self) -> _IPrincipal_539bb2fd:
'''The principal to grant permissions to.'''
return typing.cast(_IPrincipal_539bb2fd, jsii.get(self, "grantPrincipal"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="slackChannelConfigurationArn")
def slack_channel_configuration_arn(self) -> builtins.str:
'''The ARN of the Slack channel configuration In the form of arn:aws:chatbot:{region}:{account}:chat-configuration/slack-channel/{slackChannelName}.'''
return typing.cast(builtins.str, jsii.get(self, "slackChannelConfigurationArn"))
@built
| 0 |
937fd6aa7bd21258bd6e0f592d94a966519ef885
|
Python
|
ins.property # type: ignore[misc]
@jsii.member(jsii_name="slackChannelConfigurationName")
def slack_channel_configuration_name(self) -> builtins.str:
'''The name of Slack channel configuration.'''
return typing.cast(builtins.str, jsii.get(self, "slackChannelConfigurationName"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="role")
def role(self) -> typing.Optional[_IRole_235f5d8e]:
'''The permission role of Slack channel configuration.'''
return typing.cast(typing.Optional[_IRole_235f5d8e], jsii.get(self, "role"))
@jsii.data_type(
jsii_type="aws-cdk-lib.aws_chatbot.SlackChannelConfigurationProps",
jsii_struct_bases=[],
name_mapping={
"slack_channel_configuration_name": "slackChannelConfigurationName",
"slack_channel_id": "slackChannelId",
"slack_workspace_id": "slackWorkspaceId",
"logging_level": "loggingLevel",
"log_retention": "logRetention",
"log_retention_retry_options": "logRetentionRetryOptions",
"log_retention_role": "logRetentionRole",
"notification_topics": "notificationTopics",
"role": "role",
},
)
class SlackChannelConfigurationProps:
def __init__(
self,
*,
slack_channel_configuration_name: builtins.str,
slack_channel_id: builtins.str,
slack_workspace_id: builtins.str,
logging_level: typing.Optional[LoggingLevel] = None,
log_retention: typing.Optional[_RetentionDays_070f99f0] = None,
log_retention_retry_options: typing.Optional[_LogRetentionRetryOptions_62d80a14] = None,
log_retention_role: typing.Optional[_IRole_235f5d8e] = None,
notification_topics: typing.Optional[typing.Sequence[_ITopic_9eca4852]] = None,
role: typing.Optional[_IRole_235f5d8e] = None,
) -> None:
'''Properties for a new Slack channel configuration.
:param slack_channel_configuration_name: The name of Slack channel configuration.
:param slack_channel_id: The ID of the Slack channel. To get the ID, open Slack, right click on the channel name in the left pane, then choose Copy Link. The channel ID is the 9-character string at the end of the URL. For example, ABCBBLZZZ.
:param slack_workspace_id: The ID of the Slack workspace authorized with AWS Chatbot. To get the workspace ID, you must perform the initial authorization flow with Slack in the AWS Chatbot console. Then you can copy and paste the workspace ID from the console. For more details, see steps 1-4 in Setting Up AWS Chatbot with Slack in the AWS Chatbot User Guide.
:param logging_level: Specifies the logging level for this configuration. This property affects the log entries pushed to Amazon CloudWatch Logs. Default: LoggingLevel.NONE
:param log_retention: The number of days log events are kept in CloudWatch Logs. When updating this property, unsetting it doesn't remove the log retention policy. To remove the retention policy, set the value to ``INFINITE``. Default: logs.RetentionDays.INFINITE
:param log_retention_retry_options: When log retention is specified, a custom resource attempts to create the CloudWatch log group. These options control the retry policy when interacting with CloudWatch APIs. Default: - Default AWS SDK retry options.
:param log_retention_role: The IAM role for the Lambda function associated with the custom resource that sets the retention policy. Default: - A new role is created.
:param notification_topics: The SNS topics that deliver notifications to AWS Chatbot. Default: None
:param role: The permission role of Slack channel configuration. Default: - A role will be created.
:exampleMetadata: infused
Example::
import aws_cdk.aws_chatbot as chatbot
# project: codebuild.Project
target = chatbot.SlackChannelConfiguration(self, "MySlackChannel",
slack_channel_configuration_name="YOUR_CHANNEL_NAME",
slack_workspace_id="YOUR_SLACK_WORKSPACE_ID",
slack_channel_id="YOUR_SLACK_CHANNEL_ID"
)
rule = project.notify_on_build_succeeded("NotifyOnBuildSucceeded", target)
'''
if isinstance(log_retention_retry_options, dict):
log_retention_retry_options = _LogRetentionRetryOptions_62d80a14(**log_retention_retry_options)
if __debug__:
type_hints = typing.get_type_hints(SlackChannelConfigurationProps.__init__)
check_type(argname="argument slack_channel_configuration_name", value=slack_channel_configuration_name, expected_type=type_hints["slack_channel_configuration_name"])
check_type(argname="argument slack_channel_id", value=slack_channel_id, expected_type=type_hints["slack_channel_id"])
check_type(argname="argument slack_workspace_id", value=slack_workspace_id, expected_type=type_hints["slack_workspace_id"])
check_type(argname="argument logging_level", value=logging_level, expected_type=type_hints["logging_level"])
check_type(argname="argument log_retention", value=log_retention, expected_type=type_hints["log_retention"])
check_type(argname="argument log_retention_retry_options", value=log_retention_retry_options, expected_type=type_hints["log_retention_retry_options"])
check_type(argname="argument log_retention_role", value=log_retention_role, expected_type=type_hints["log_retention_role"])
check_type(argname="argument notification_topics", value=notification_topics, expected_type=type_hints["notification_topics"])
check_type(argname="argument role", value=role, expected_type=type_hints["role"])
self._values: typing.Dict[str, typing.Any] = {
"slack_channel_configuration_name": slack_channel_configuration_name,
"slack_channel_id": slack_channel_id,
"slack_workspace_id": slack_workspace_id,
}
if logging_level is not None:
self._values["logging_level"] = logging_level
if log_retention is not None:
self._values["log_retention"] = log_retention
if log_retention_retry_options is not None:
self._values["log_retention_retry_options"] = log_retention_retry_options
if log_retention_role is not None:
self._values["log_retention_role"] = log_retention_role
if notification_topics is not None:
self._values["notification_topics"] = notification_topics
if role is not None:
self._values["role"] = role
@builtins.property
def slack_channel_configuration_name(self) -> builtins.str:
'''The name of Slack channel configuration.'''
result = self._values.get("slack_channel_configuration_name")
assert result is not None, "Required property 'slack_channel_configuration_name' is missing"
return typing.cast(builtins.str, result)
@builtins.property
def slack_channel_id(self) -> builtins.str:
'''The ID of the Slack channel.
To get the ID, open Slack, right click on the channel name in the left pane, then choose Copy Link.
The channel ID is the 9-character string at the end of the URL. For example, ABCBBLZZZ.
'''
result = self._values.get("slack_channel_id")
assert result is not None, "Required property 'slack_channel_id' is missing"
return typing.cast(builtins.str, result)
@builtins.property
def slack_workspace_id(self) -> builtins.str:
'''The ID of the Slack workspace authorized with AWS Chatbot.
To get the workspace ID, you must perform the initial authorization flow with Slack in the AWS Chatbot console.
Then you can copy and paste the workspace ID from the console.
For more details, see steps 1-4 in Setting Up AWS Chatbot with Slack in the AWS Chatbot User Guide.
:see: https://docs.aws.amazon.com/chatbot/latest/adminguide/setting-up.html#Setup_intro
'''
result = self._values.get("slack_workspace_id")
assert result is not None, "Required property 'slack_workspace_id' is missing"
return typing.cast(builtins.str, result)
@builtins.property
def logging_level(self) -> typing.Optional[LoggingLevel]:
'''Specifies the logging level for this configuration.
This property affects the log entries pushed to Amazon CloudWatch Logs.
:default: LoggingLevel.NONE
'''
result = self._values.get("logging_level")
return typing.cast(typing.Optional[LoggingLevel], result)
@builtins.property
def log_retention(self) -> typing.Optional[_RetentionDays_070f99f0]:
'''The number of days log events are kept in CloudWatch Logs.
When updating
this property, unsetting it doesn't remove the log retention policy. To
remove the retention policy, set the value to ``INFINITE``.
:default: logs.RetentionDays.INFINITE
'''
result = self._values.get("log_retention")
return typing.cast(typing.Optional[_RetentionDays_070f99f0], result)
@builtins.property
def log_retention_retry_options(
self,
) -> typing.Optional[_LogRetentionRetryOptions_62d80a14]:
'''When log retention is specified, a custom resource attempts to create the CloudWatch log group.
These options control the retry policy when interacting with CloudWatch APIs.
:default: - Default AWS SDK retry options.
'''
result = self._values.get("log_retention_retry_options")
return typing.cast(typing.Optional[_LogRetentionRetryOptions_62d80a14], result)
@builtins.property
def log_retention_role(self) -> typing.Optional[_IRole_235f5d8e]:
'''The IAM role for the Lambda function associated with the custom resource that sets the retention policy.
:default: - A new role is created.
'''
result = self._values.get("log_retention_role")
return typing.cast(typing.Optional[_IRole_235f5d8e], result)
@builtins.property
def notification_topics(self) -> typing.Optional[typing.List[_ITopic_9eca4852]]:
'''The SNS topics that deliver notifications to AWS Chatbot.
:default: None
'''
result = self._values.get("notification_topics")
return typing.cast(typing.Optional[typing.List[_ITopic_9eca4852]], result)
@builtins.property
def role(self) -> typing.Optional[_IRole_235f5d8e]:
'''The permission role of Slack channel configuration.
:default: - A role will be created.
'''
result = self._values.get("role")
return typing.cast(typing.Optional[_IRole_235f5d8e], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "SlackChannelConfigurationProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
__all__ = [
"CfnSlackChannelConfiguration",
"CfnSlackChannelConfigurationProps",
"ISlackChannelConfiguration",
"LoggingLevel",
"SlackChannelConfiguration",
"SlackChannelConfigurationProps",
]
publication.publish()
| 1 |
2ab303a2f36cdd64e2119856312dd5e38ee728d6
|
Python
|
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import json
import uuid
import mock
import mox
import six
from heat.common import exception
from heat.common import template_format
from heat.engine import resource
from heat.engine import rsrc_defn
from heat.engine import scheduler
from heat.tests import common
from heat.tests import utils
from ..resources import cloud_loadbalancer as lb # noqa
# The following fakes are for pyrax
cert = """\n-----BEGIN CERTIFICATE-----
MIIFBjCCAu4CCQDWdcR5LY/+/jANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQGEwJB
VTETMBEGA1UECAwKU29tZS1TdGF0ZTEhMB8GA1UECgwYSW50ZXJuZXQgV2lkZ2l0
cyBQdHkgTHRkMB4XDTE0MTAxNjE3MDYxNVoXDTE1MTAxNjE3MDYxNVowRTELMAkG
A1UEBhMCQVUxEzARBgNVBAgMClNvbWUtU3RhdGUxITAfBgNVBAoMGEludGVybmV0
IFdpZGdpdHMgUHR5IEx0ZDCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB
AMm5NcP0tMKHblT6Ud1k8TxZ9/8uOHwUNPbvFsvSyCupj0J0vGCTjbuC2I5T/CXR
tnLEIt/EarlNAqcjbDCWtSyEKs3zDmmkreoIDEa8pyAQ2ycsCXGMxDN97F3/wlLZ
agUNM0FwGHLZWBg62bM6l+bpTUcX0PqSyv/aVMhJ8EPDX0Dx1RYsVwUzIe/HWC7x
vCmtDApAp1Fwq7AwlRaKU17sGwPWJ8+I8PyouBdqNuslHm7LQ0XvBA5DfkQA6feB
ZeJIyOtctM9WFWQI5fKOsyt5P306B3Zztw9VZLAmZ8qHex+R1WY1zXxDAwKEQz/X
8bRqMA/VU8OxJcK0AmY/1v/TFmAlRh2XBCIc+5UGtCcftWvZJAsKur8Hg5pPluGv
ptyqSgSsSKtOVWkyTANP1LyOkpBA8Kmkeo2CKXu1SCFypY5Q6E+Fy8Y8RaHJPvzR
NHcm1tkBvHOKyRso6FjvxuJEyIC9EyUK010nwQm7Qui11VgCSHBoaKVvkIbFfQdK
aCes0oQO5dqY0+fC/IFDhrxlvSd2Wk7KjuNjNu9kVN9Ama2pRTxhYKaN+GsHfoL7
ra6G9HjbUVULAdjCko3zOKEUzFLLf1VZYk7hDhyv9kovk0b8sr5WowxW7+9Wy0NK
WL5f2QgVCcoHw9bGhyuYQCdBfztNmKOWe9pGj6bQAx4pAgMBAAEwDQYJKoZIhvcN
AQEFBQADggIBALFSj3G2TEL/UWtNcPeY2fbxSGBrboFx3ur8+zTkdZzvfC8H9/UK
w0aRH0rK4+lKYDqF6A9bUHP17DaJm1lF9In38VVMOuur0ehUIn1S2U3OvlDLN68S
p5D4wGKMcUfUQ6pzhSKJCMvGX561TKHCc5fZhPruy75Xq2DcwJENE189foKLFvJs
ca4sIARqP6v1vfARcfH5leSsdIq8hy6VfL0BRATXfNHZh4SNbyDJYYTxrEUPHYXW
pzW6TziZXYNMG2ZRdHF/mDJuFzw2EklOrPC9MySCZv2i9swnqyuwNYh/SAMhodTv
ZDGy4nbjWNe5BflTMBceh45VpyTcnQulFhZQFwP79fK10BoDrOc1mEefhIqT+fPI
LJepLOf7CSXtYBcWbmMCLHNh+PrlCiA1QMTyd/AC1vvoiyCbs3M419XbXcBSDEh8
tACplmhf6z1vDkElWiDr8y0kujJ/Gie24iLTun6oHG+f+o6bbQ9w196T0olLcGx0
oAYL0Olqli6cWHhraVAzZ5t5PH4X9TiESuQ+PMjqGImCIUscXY4objdnB5dfPHoz
eF5whPl36/GK8HUixCibkCyqEOBBuNqhOz7nVLM0eg5L+TE5coizEBagxVCovYSj
fQ9zkIgaC5oeH6L0C1FFG1vRNSWokheBk14ztVoJCJyFr6p0/6pD7SeR
-----END CERTIFICATE-----\n"""
private_key = """\n-----BEGIN PRIVATE KEY-----
MIIJRAIBADANBgkqhkiG9w0BAQEFAASCCS4wggkqAgEAAoICAQDJuTXD9LTCh25U
+lHdZPE8Wff/Ljh8FDT27xbL0sgrqY9CdLxgk427gtiOU/wl0bZyxCLfxGq5TQKn
I2wwlrUshCrN8w5ppK3qCAxGvKcgENsnLAlxjMQzfexd/8JS2WoFDTNBcBhy2VgY
OtmzOpfm6U1HF9D6ksr/2lTISfBDw19A8dUWLFcFMyHvx1gu8bwprQwKQKdRcKuw
MJUWilNe7BsD1ifPiPD8qLgXajbrJR5uy0NF7wQOQ35EAOn3gWXiSMjrXLTPVhVk
COXyjrMreT99Ogd2c7cPVWSwJmfKh3sfkdVmNc18QwMChEM/1/G0ajAP1VPDsSXC
tAJmP9b/0xZgJUYdlwQiHPuVBrQnH7Vr2SQLCrq/B4OaT5bhr6bcqkoErEirTlVp
MkwDT9S8jpKQQPCppHqNgil7tUghcqWOUOhPhcvGPEWhyT780TR3JtbZAbxziskb
KOhY78biRMiAvRMlCtNdJ8EJu0LotdVYAkhwaGilb5CGxX0HSmgnrNKEDuXamNPn
wvyBQ4a8Zb0ndlpOyo7jYzbvZFTfQJmtqUU8YWCmjfhrB36C+62uhvR421FVCwHY
wpKN8zihFMxSy39VWWJO4Q4cr/ZKL5NG/LK+VqMMVu/vVstDSli+X9kIFQnKB8PW
xocrmEAnQX87TZijlnvaRo+m0AMeKQIDAQABAoICAA8DuBrDxgiMqAuvLhS6hLIn
SCw4NoAVyPNwTFQTdk65qi4aHkNZ+DyyuoetfKEcAOZ97tKU/hSYxM/H9S+QqB+O
HtmBc9stJLy8qJ1DQXVDi+xYfMN05M2oW8WLWd1szVVe7Ce8vjUeNE5pYvbSL6hC
STw3a5ibAH0WtSTLTBTfH+HnniKuXjPG4InGXqvv1j+L38+LjGilaEIO+6nX1ejE
ziX09LWfzcAglsM3ZqsN8jvw6Sr1ZWniYC2Tm9aOTRUQsdPC7LpZ//GYL/Vj5bYg
qjcZ8KBCcKe1hW8PDL6oYuOwqR+YdZkAK+MuEQtZeWYiWT10dW2la9gYKe2OZuQ1
7q3zZ6zLP+XP+0N7DRMTTuk2gurBVX7VldzIzvjmW8X+8Q5QO+EAqKr2yordK3S1
uYcKmyL4Nd6rSFjRo0zSqHMNOyKt3b1r3m/eR2W623rT5uTjgNYpiwCNxnxmcjpK
Sq7JzZKz9NLbEKQWsP9gQ3G6pp3XfLtoOHEDkSKMmQxd8mzK6Ja/9iC+JGqRTJN+
STe1vL9L2DC7GnjOH1h2TwLoLtQWSGebf/GBxju0e5pAL0UYWBNjAwcpOoRU9J5J
y9E7sNbbXTmK2rg3B/5VKGQckBWfurg7CjAmHGgz9xxceJQLKvT1O5zHZc+v4TVB
XDZjtz8L2k3wFLDynDY5AoIBAQDm2fFgx4vk+gRFXPoLNN34Jw2fT+xuwD/H7K0e
0Cas0NfyNil/Kbp+rhMHuVXTt86BIY+z8GO4wwn+YdDgihBwobAh2G9T/P6wNm+Q
NcIeRioml8V/CP7lOQONQJ6sLTRYnNLfB96uMFe+13DO/PjFybee5VflfBUrJK1M
DqRLwm9wEIf5p0CWYI/ZJaDNN71B09BB/jdT/e7Ro1hXHlq3W4tKqRDPfuUqwy3H
ocYQ1SUk3oFdSiYFd6PijNkfTnrtyToa0xUL9uGL+De1LfgV+uvqkOduQqnpm/5+
XQC1qbTUjq+4WEsuPjYf2E0WAVFGzwzWcdb0LnMIUJHwPvpLAoIBAQDfsvCZlcFM
nGBk1zUnV3+21CPK+5+X3zLHr/4otQHlGMFL6ZiQManvKMX6a/cT3rG+LvECcXGD
jSsTu7JIt9l8VTpbPaS76htTmQYaAZERitBx1C8zDMuI2O4bjFLUGUX73RyTZdRm
G68IX+7Q7SL8zr/fHjcnk+3yj0L1soAVPC7lY3se7vQ/SCre97E+noP5yOhrpnRt
dij7NYy79xcvUZfc/z0//Ia4JSCcIvv2HO7JZIPzUCVO4sjbUOGsgR9pwwQkwYeP
b5P0MVaPgFnOgo/rz6Uqe+LpeY83SUwc2q8W8bskzTLZEnwSV5bxCY+gIn9KCZSG
8QxuftgIiQDbAoIBAQDQ2oTC5kXulzOd/YxK7z2S8OImLAzf9ha+LaZCplcXKqr0
e4P3hC0xxxN4fXjk3vp5YX+9b9MIqYw1FRIA02gkPmQ3erTd65oQmm88rSY+dYRU
/iKz19OkVnycIsZrR0qAkQFGvrv8I8h+5DMvUTdQ2jrCCwQGnsgYDEqs8OI7mGFx
pcMfXu3UHvCFqMFeaPtUvuk/i1tLJgYWrA2UY+X21V+j4GlREKEMmyCj5/xl5jCA
tr2bRSY49BDVOlCFPl+BGfjzo9z6whU0qRDdXgWA/U7LHOYEn1NSAsuwTzwBHtR3
KdBYm6kI4Ufeb7buHasGwPQAX2X17MAt2ZbvIEsZAoIBAQC4g5dzh5PGhmH4K48b
YU/l1TukzUIJekAfd+ozV4I1nuKppAeEQILD0yTh9zX4vMJtdbiz5DDWapWylCpt
UsBgjsgwxDriCSr7HIhs4QfwqUhf67325MHpoc1dCbS0YBhatDpC1kaI5qLMTJzm
1gL69epLtleWHK2zWjnIAbEmUtr3uMOwczciD3vVKAeZ+BQx72bOjKESPNl2w+fO
jvQfwrR5xEqYQco5j95DC5Q6oAjSM0enZV8wn10/kYpjyKnJieMcEkmnpUgrrpqQ
iTUKYqUlw8OftEopfGwGFT5junmbek57/4nGhTmzw22sac9/LZVC034ghClV5uh4
udDrAoIBAQCJHfBPJmJMT/WtSATTceVDgZiyezWNgH2yLJMqDP6sEuImnLAg2L9M
Yc6LqMcHLj7CyXfy2AEAuYTZwXFSRmVKl6Ycad7sS/hIL1ykvDveRU9VNImexDBq
AJR4GKr6jbRZnBztnRYZTsGA+TcrFc6SwdSPXgz7JQT9uw+JkhLi59m141XBdeRc
NQ/LFgOaxjvRUID81izQaYEyADId7asy+2QVazMDafuALJ23WSUMSXajCXaC6/7N
53RWrOAb+kFRgjuHM8pQkpgnY/Ds0MZxpakFw3Y7PAEL99xyYdR+rE3JOMjPlgr0
LpTt0Xs1OFZxaNpolW5Qis4os7UmmIRV
-----END PRIVATE KEY-----\n"""
class FakeException(Exception):
pass
class FakeClient(object):
user_agent = "Fake"
USER_AGENT = "Fake"
class FakeManager(object):
api = FakeClient()
def list(self):
pass
def get(self, item):
pass
def delete(self, item):
pass
def create(self, *args, **kwargs):
pass
def find(self, *args, **kwargs):
pass
def action(self, item, action_type, body=None):
pass
class FakeLoadBalancerManager(object):
def __init__(self, api=None, *args, **kwargs):
pass
def set_content_caching(self, *args, **kwargs):
pass
class FakeNode(object):
def __init__(self, address=None, port=None, condition=None, weight=None,
status=None, parent=None, type=None, id=None):
if not (address and port):
# This mimics the check that pyrax does on Node instantiation
raise TypeError("You must include an address and "
"a port when creating a node.")
self.address = address
self.port = port
self.condition = condition
self.weight = weight
self.status = status
self.parent = parent
self.type = type
self.id = id
def __eq__(self, other):
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self.__eq__(other)
def update(self):
pass
def delete(self):
pass
class FakeVirtualIP(object):
def __init__(self, address=None, port=None, condition=None,
ipVersion=None, type=None, id=None):
self.address = address
self.port = port
self.condition = condition
self.ipVersion = ipVersion
self.type = type
self.id = id
self.ip_version = ipVersion
def __eq__(self, other):
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self.__eq__(other)
class FakeLoadBalancerClient(object):
def __init__(self, *args, **kwargs):
self.Node = FakeNode
self.VirtualIP = FakeVirtualIP
pass
def get(self, *args, **kwargs):
pass
def create(self, *args, **kwargs):
pass
class FakeLoadBalancer(object):
def __init__(self, name=None, info=None, *args, **kwargs):
name = name or uuid.uuid4()
info = info or {"fake": "fake"}
self.id = uuid.uuid4()
self.manager = FakeLoadBalancerManager()
self.Node = FakeNode
self.VirtualIP = FakeVirtualIP
self.nodes = []
self.algorithm = "ROUND_ROBIN"
self.session_persistence = "HTTP_COOKIE"
self.connection_logging = False
self.timeout = None
self.httpsRedirect = False
self.protocol = None
self.port = None
self.name = None
self.halfClosed = None
self.content_caching = False
def get(self, *args, **kwargs):
pass
def add_nodes(self, *args, **kwargs):
pass
def add_ssl_termination(self, *args, **kwargs):
pass
def set_error_page(self, *args, **kwargs):
pass
def clear_error_page(self, *args, **kwargs):
pass
def add_access_list(self, *args, **kwargs):
pass
def update(self, *args, **kwargs):
pass
def add_health_monitor(self, *args, **kwargs):
pass
def delete_health_monitor(self, *args, **kwargs):
pass
def delete_ssl_termination(self, *args, **kwargs):
pass
def set_metadata(self, *args, **kwargs):
pass
def delete_metadata(self, *args, **kwargs):
pass
def add_connection_throttle(self, *args, **kwargs):
pass
def delete_connection_throttle(self, *args, **kwargs):
pass
def delete(self, *args, **kwargs):
pass
def get_health_monitor(self, *args, **kwargs):
return {}
def get_metadata(self, *args, **kwargs):
return {}
def get_error_page(self, *args, **kwargs):
pass
def get_connection_throttle(self, *args, **kwargs):
pass
def get_ssl_termination(self, *args, **kwargs):
pass
def get_access_list(self, *args, **kwargs):
pass
class LoadBalancerWithFakeClient(lb.CloudLoadBalancer):
def cloud_lb(self):
return FakeLoadBalancerClient()
def override_resource():
return {
'Rackspace::Cloud::LoadBalancer': LoadBalancerWithFakeClient
}
class LoadBalancerTest(common.HeatTestCase):
def setUp(self):
super(LoadBalancerTest, self).setUp()
self.lb_template = {
"AWSTemplateFormatVersion": "2010-09-09",
"Description": "fawef",
"Resources": {
self._get_lb_resource_name(): {
"Type": "Rackspace::Cloud::LoadBalancer",
"Properties": {
"name": "test-clb",
"nodes": [{"addresses": ["166.78.103.141"],
"port": 80,
"condition": "ENABLED"}],
"protocol": "HTTP",
"port": 80,
"virtualIps": [
{"type": "PUBLIC", "ipVersion": "IPV6"}],
"algorithm": 'LEAST_CONNECTIONS',
"connectionThrottle": {'maxConnectionRate': 1000},
'timeout': 110,
'contentCaching': 'DISABLED'
}
}
}
}
self.lb_name = 'test-clb'
self.expected_body = {
"nodes": [FakeNode(address=u"166.78.103.141", port=80,
condition=u"ENABLED", type=u"PRIMARY",
weight=1)],
"protocol": u'HTTP',
"port": 80,
"virtual_ips": [FakeVirtualIP(type=u"PUBLIC", ipVersion=u"IPV6")],
"algorithm": u'LEAST_CONNECTIONS',
"connectionThrottle": {'maxConnectionRate': 1000,
'maxConnections': None,
'rateInterval': None,
'minConnections': None},
"connectionLogging": None,
"halfClosed": None,
"healthMonitor": None,
"metadata": None,
"sessionPersistence": None,
"timeout": 110,
"httpsRedirect": False
}
lb.resource_mapping = override_resource
resource._register_class("Rackspace::Cloud::LoadBalancer",
LoadBalancerWithFakeClient)
def _get_lb_resource_name(self):
return "lb-" + str(uuid.uuid4())
def __getattribute__(self, name):
if name == 'expected_body' or name == 'lb_template':
return copy.deepcopy(super(LoadBalancerTest, self)
.__getattribute__(name))
return super(LoadBalancerTest, self).__getattribute__(name)
def _mock_create(self, tmpl, stack, resource_name, lb_name, lb_body):
resource_defns = tmpl.resource_definitions(stack)
rsrc = LoadBalancerWithFakeClient(resource_name,
resource_defns[resource_name],
stack)
fake_lb = FakeLoadBalancer(name=lb_name)
fake_lb.status = 'ACTIVE'
fake_lb.resource_id = 1234
self.m.StubOutWithMock(rsrc.clb, 'create')
rsrc.clb.create(lb_name, **lb_body).AndReturn(fake_lb)
self.m.StubOutWithMock(rsrc.clb, 'get')
rsrc.clb.get(mox.IgnoreArg()).MultipleTimes().AndReturn(
fake_lb)
return (rsrc, fake_lb)
def _get_first_resource_name(self, templ):
return next(k for k in templ['Resources'])
def _mock_loadbalancer(self, lb_template, expected_name, expected_body):
t = template_format.parse(json.dumps(lb_template))
self.stack = utils.parse_stack(t, stack_name=utils.random_name())
rsrc, fake_lb = self._mock_create(self.stack.t, self.stack,
self.
_get_first_resource_name(
lb_template),
expected_name,
expected_body)
return (rsrc, fake_lb)
def _set_template(self, templ, **kwargs):
for k, v in six.iteritems(kwargs):
templ['Resources'][self._get_first_resource_name(templ)][
'Properties'][k] = v
return templ
def _set_expected(self, expected, **kwargs):
for k, v in six.iteritems(kwargs):
expected[k] = v
return expected
def test_process_node(self):
nodes = [{'addresses': ['1234'], 'port': 80, 'enabled': True},
{'addresses': ['4567', '8901', '8903'], 'port': 80,
'enabled': True},
{'addresses': [], 'port': 80, 'enabled': True}]
rsrc, fake_lb = self._mock_loadbalancer(self.lb_template,
self.lb_name,
self.expected_body)
expected_nodes = [{'address': '1234', 'port': 80, 'enabled': True},
{'address': '4567', 'port': 80, 'enabled': True},
{'address': '8901', 'port': 80, 'enabled': True},
{'address': '8903', 'port': 80, 'enabled': True}]
self.assertEqual(expected_nodes, list(rsrc._process_nodes(nodes)))
def test_nodeless(self):
"""It's possible to create a LoadBalancer resource with no nodes."""
template = self._set_template(self.lb_template,
nodes=[])
expected_body = copy.deepcopy(self.expected_body)
expected_body['nodes'] = []
rsrc, fake_lb = self._mock_loadbalancer(
template, self.lb_name, expected_body)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
self.m.VerifyAll()
def test_alter_properties(self):
# test alter properties functions
template = self._set_template(self.lb_template,
sessionPersistence='HTTP_COOKIE',
connectionLogging=True,
metadata={'yolo': 'heeyyy_gurl'})
expected = self._set_expected(self.expected_body,
sessionPersistence={
'persistenceType': 'HTTP_COOKIE'},
connectionLogging={'enabled': True},
metadata=[
{'key': 'yolo',
'value': 'heeyyy_gurl'}])
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
expected)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
self.m.VerifyAll()
def test_validate_vip(self):
snippet = {
"nodes": [],
"protocol": 'HTTP',
"port": 80,
"halfClosed": None,
"algorithm": u'LEAST_CONNECTIONS',
"virtualIps": [{"id": "1234"}]
}
stack = mock.Mock()
stack.db_resource_get.return_value = None
stack.has_cache_data.return_value = False
# happy path
resdef = rsrc_defn.ResourceDefinition("testvip",
lb.CloudLoadBalancer,
properties=snippet)
rsrc = lb.CloudLoadBalancer("testvip", resdef, stack)
self.assertIsNone(rsrc.validate())
# make sure the vip id prop is exclusive
snippet["virtualIps"][0]["type"] = "PUBLIC"
exc = self.assertRaises(exception.StackValidationFailed,
rsrc.validate)
self.assertIn("Cannot specify type or version", str(exc))
# make sure you have to specify type and version if no id
snippet["virtualIps"] = [{}]
exc = self.assertRaises(exception.StackValidationFailed,
rsrc.validate)
self.assertIn("Must specify VIP type and version", str(exc))
def test_validate_half_closed(self):
# test failure (invalid protocol)
template = self._set_template(self.lb_template, halfClosed=True)
expected = self._set_expected(self.expected_body, halfClosed=True)
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
expected)
exc = self.assertRaises(exception.StackValidationFailed,
rsrc.validate)
self.assertIn('The halfClosed property is only available for the TCP'
' or TCP_CLIENT_FIRST protocols', str(exc))
# test TCP protocol
template = self._set_template(template, protocol='TCP')
expected = self._set_expected(expected, protocol='TCP')
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
expected)
self.assertIsNone(rsrc.validate())
# test TCP_CLIENT_FIRST protocol
template = self._set_template(template,
protocol='TCP_CLIENT_FIRST')
expected = self._set_expected(expected,
protocol='TCP_CLIENT_FIRST')
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
expected)
self.assertIsNone(rsrc.validate())
def test_validate_health_monitor(self):
# test connect success
health_monitor = {
'type': 'CONNECT',
'attemptsBeforeDeactivation': 1,
'delay': 1,
'timeout': 1
}
template = self._set_template(self.lb_template,
healthMonitor=health_monitor)
expected = self._set_expected(self.expected_body,
healthMonitor=health_monitor)
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
expected)
self.assertIsNone(rsrc.validate())
# test connect failure
# bodyRegex is only valid for type 'HTTP(S)'
health_monitor['bodyRegex'] = 'dfawefawe'
template = self._set_template(template,
healthMonitor=health_monitor)
expected = self._set_expected(expected,
healthMonitor=health_monitor)
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
expected)
exc = self.assertRaises(exception.StackValidationFailed,
rsrc.validate)
self.assertIn('Unknown Property bodyRegex', str(exc))
# test http fields
health_monitor['type'] = 'HTTP'
health_monitor['bodyRegex'] = 'bodyRegex'
health_monitor['statusRegex'] = 'statusRegex'
health_monitor['hostHeader'] = 'hostHeader'
health_monitor['path'] = 'path'
template = self._set_template(template,
healthMonitor=health_monitor)
expected = self._set_expected(expected,
healthMonitor=health_monitor)
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
expected)
self.assertIsNone(rsrc.validate())
def test_validate_ssl_termination(self):
ssl_termination = {
'privatekey': 'ewfawe',
'intermediateCertificate': 'fwaefawe',
'secureTrafficOnly': True
}
# test ssl termination enabled without required fields failure
template = self._set_template(self.lb_template,
sslTermination=ssl_termination)
expected = self._set_expected(self.expected_body,
sslTermination=ssl_termination)
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
expected)
exc = self.assertRaises(exception.StackValidationFailed, rsrc.validate)
self.assertIn("Property certificate not assigned", six.text_type(exc))
ssl_termination['certificate'] = 'dfaewfwef'
template = self._set_template(template,
sslTermination=ssl_termination)
expected = self._set_expected(expected,
sslTermination=ssl_termination)
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
expected)
self.assertIsNone(rsrc.validate())
def test_ssl_termination_unstripped_certificates(self):
ssl_termination_template = {
'securePort': 443,
'privatekey': 'afwefawe',
'certificate': ' \nfawefwea\n ',
'intermediateCertificate': "\n\nintermediate_certificate\n",
'secureTrafficOnly': False
}
ssl_termination_api = copy.deepcopy(ssl_termination_template)
template = self._set_template(self.lb_template,
sslTermination=ssl_termination_template)
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
self.expected_body)
self.m.StubOutWithMock(fake_lb, 'get_ssl_termination')
fake_lb.get_ssl_termination().AndReturn({})
fake_lb.get_ssl_termination().AndReturn({
'securePort': 443,
'certificate': 'fawefwea',
'intermediateCertificate': "intermediate_certificate",
'secureTrafficOnly': False,
'enabled': True,
})
self.m.StubOutWithMock(fake_lb, 'add_ssl_termination')
fake_lb.add_ssl_termination(**ssl_termination_api)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
self.m.VerifyAll()
def test_ssl_termination_intermediateCertificate_None(self):
ssl_termination_template = {
'securePort': 443,
'privatekey': 'afwefawe',
'certificate': ' \nfawefwea\n ',
'intermediateCertificate': None,
'secureTrafficOnly': False
}
template = self._set_template(self.lb_template,
sslTermination=ssl_termination_template)
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
self.expected_body)
self.m.StubOutWithMock(fake_lb, 'get_ssl_termination')
fake_lb.get_ssl_termination().AndReturn({})
fake_lb.get_ssl_termination().AndReturn({
'securePort': 443,
'certificate': 'fawefwea',
'secureTrafficOnly': False,
'enabled': True,
})
self.m.StubOutWithMock(fake_lb, 'add_ssl_termination')
add_ssl_termination_args = {
'securePort': 443,
'privatekey': 'afwefawe',
'certificate': ' \nfawefwea\n ',
'intermediateCertificate': '',
'secureTrafficOnly': False
}
fake_lb.add_ssl_termination(**add_ssl_termination_args)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
self.m.VerifyAll()
def test_post_creation_access_list(self):
access_list = [{"address": '192.168.1.1/0',
'type': 'ALLOW'},
{'address': '172.165.3.43',
'type': 'DENY'}]
api_access_list = [{"address": '192.168.1.1/0', 'id': 1234,
'type': 'ALLOW'},
{'address': '172.165.3.43', 'id': 3422,
'type': 'DENY'}]
template = self._set_template(self.lb_template,
accessList=access_list)
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
self.expected_body)
self.m.StubOutWithMock(fake_lb, 'get_access_list')
fake_lb.get_access_list().AndReturn([])
fake_lb.get_access_list().AndReturn(api_access_list)
self.m.StubOutWithMock(fake_lb, 'add_access_list')
fake_lb.add_access_list(access_list)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
self.m.VerifyAll()
def test_ref_id(self):
"""The Reference ID of the resource is the resource ID."""
template = self._set_template(self.lb_template)
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
self.expected_body)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
self.m.VerifyAll()
self.assertEqual(rsrc.resource_id, rsrc.FnGetRefId())
def test_post_creation_error_page(self):
error_page = "REALLY BIG ERROR"
template = self._set_template(self.lb_template,
errorPage=error_page)
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
self.expected_body)
self.m.StubOutWithMock(fake_lb, 'get_error_page')
fake_lb.get_error_page().AndReturn({u'errorpage': {u'content': u''}})
fake_lb.get_error_page().AndReturn(
{u'errorpage': {u'content': error_page}})
self.m.StubOutWithMock(fake_lb, 'set_error_page')
fake_lb.set_error_page(error_page)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
self.m.VerifyAll()
def test_post_creation_ssl_termination(self):
ssl_termination_template = {
'securePort': 443,
'privatekey': 'afwefawe',
'certificate': 'fawefwea',
'intermediateCertificate': "intermediate_certificate",
'secureTrafficOnly': False
}
ssl_termination_api = copy.deepcopy(ssl_termination_template)
template = self._set_template(self.lb_template,
sslTermination=ssl_termination_template)
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
self.expected_body)
self.m.StubOutWithMock(fake_lb, 'get_ssl_termination')
fake_lb.get_ssl_termination().AndReturn({})
fake_lb.get_ssl_termination().AndReturn({
'securePort': 443,
'certificate': 'fawefwea',
'intermediateCertificate': "intermediate_certificate",
'secureTrafficOnly': False,
'enabled': True,
})
self.m.StubOutWithMock(fake_lb, 'add_ssl_termination')
fake_lb.add_ssl_termination(**ssl_termination_api)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
self.m.VerifyAll()
def test_post_creation_content_caching(self):
template = self._set_template(self.lb_template,
contentCaching='ENABLED')
rsrc = self._mock_loadbalancer(template, self.lb_name,
self.expected_body)[0]
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
self.m.VerifyAll()
def test_check(self):
stack = mock.Mock()
stack.db_resource_get.return_value = None
stack.has_cache_data.return_value = False
resdef = mock.Mock(spec=rsrc_defn.ResourceDefinition)
loadbalancer = lb.CloudLoadBalancer("test", resdef, stack)
loadbalancer._add_event = mock.Mock()
mock_cloud_lb = mock.Mock()
mock_get = mock.Mock(return_value=mock_cloud_lb)
loadbalancer.clb.get = mock_get
mock_cloud_lb.status = 'ACTIVE'
scheduler.TaskRunner(loadbalancer.check)()
self.assertEqual('CHECK', loadbalancer.action)
self.assertEqual('COMPLETE', loadbalancer.status)
mock_cloud_lb.status = 'FOOBAR'
exc = self.assertRaises(exception.ResourceFailure,
scheduler.TaskRunner(loadbalancer.check))
self.assertEqual('CHECK', loadbalancer.action)
self.assertEqual('FAILED', loadbalancer.status)
self.assertIn('FOOBAR', str(exc))
mock_get.side_effect = lb.NotFound('boom')
exc = self.assertRaises(exception.ResourceFailure,
scheduler.TaskRunner(loadbalancer.check))
self.assertEqual('CHECK', loadbalancer.action)
self.assertEqual('FAILED', loadbalancer.status)
self.assertIn('boom', str(exc))
def test_update_add_node_by_address(self):
rsrc, fake_lb = self._mock_loadbalancer(self.lb_template,
self.lb_name,
self.expected_body)
fake_lb.nodes = self.expected_body['nodes']
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
update_template = copy.deepcopy(rsrc.t)
expected_ip = '172.168.1.4'
update_template['Properties']['nodes'] = [
{"addresses": ["166.78.103.141"],
"port": 80,
"condition": "ENABLED",
"type": "PRIMARY",
"weight": 1},
{"addresses": [expected_ip],
"port": 80,
"condition": "ENABLED",
"type": "PRIMARY",
"weight": 1}]
self.m.UnsetStubs()
self.m.StubOutWithMock(rsrc.clb, 'get')
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb)
fake_lb1 = copy.deepcopy(fake_lb)
fake_lb1.nodes = [
FakeNode(address=u"172.168.1.4", port=80, condition=u"ENABLED",
type="PRIMARY", weight=1),
FakeNode(address=u"166.78.103.141", port=80, condition=u"ENABLED",
type="PRIMARY", weight=1),
]
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb1)
self.m.StubOutWithMock(fake_lb, 'add_nodes')
fake_lb.add_nodes([
fake_lb.Node(address=expected_ip,
port=80,
condition='ENABLED',
type="PRIMARY", weight=1)])
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_resolve_attr_noid(self):
stack = mock.Mock()
stack.db_resource_get.return_value = None
stack.has_cache_data.return_value = False
resdef = mock.Mock(spec=rsrc_defn.ResourceDefinition)
lbres = lb.CloudLoadBalancer("test", resdef, stack)
self.assertIsNone(lbres._resolve_attribute("PublicIp"))
def test_resolve_attr_virtualips(self):
rsrc, fake_lb = self._mock_loadbalancer(self.lb_template,
self.lb_name,
self.expected_body)
fake_lb.virtual_ips = [FakeVirtualIP(address='1.2.3.4',
type='PUBLIC',
ipVersion="IPv6",
id='test-id')]
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
expected = [{
'ip_version': 'IPv6',
'type': 'PUBLIC',
'id': 'test-id',
'address': '1.2.3.4'}]
self.m.ReplayAll()
self.assertEqual(expected, rsrc._resolve_attribute("virtualIps"))
self.m.VerifyAll()
def test_update_nodes_immutable(self):
rsrc, fake_lb = self._mock_loadbalancer(self.lb_template,
self.lb_name,
self.expected_body)
current_nodes = [
FakeNode(address=u"1.1.1.1", port=80, condition=u"ENABLED",
type="PRIMARY", weight=1),
FakeNode(address=u"2.2.2.2", port=80, condition=u"ENABLED",
type="PRIMARY", weight=1),
FakeNode(address=u"3.3.3.3", port=80, condition=u"ENABLED",
type="PRIMARY", weight=1)
]
fake_lb.nodes = current_nodes
fake_lb.tracker = "fake_lb"
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
update_template = copy.deepcopy(rsrc.t)
expected_ip = '4.4.4.4'
update_template['Properties']['nodes'] = [
{"addresses": ["1.1.1.1"], "port": 80, "condition": "ENABLED",
"type": "PRIMARY", "weight": 1},
{"addresses": ["2.2.2.2"], "port": 80, "condition": "DISABLED",
"type": "PRIMARY", "weight": 1},
{"addresses": [expected_ip], "port": 80, "condition": "ENABLED",
"type": "PRIMARY", "weight": 1}
]
self.m.UnsetStubs()
self.m.StubOutWithMock(rsrc.clb, 'get')
fake_lb1 = copy.deepcopy(fake_lb)
fake_lb1.status = "PENDING_UPDATE"
fake_lb1.tracker = "fake_lb1"
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb) # ACTIVE
# Add node `expected_ip`
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb1) # PENDING_UPDATE
fake_lb2 = copy.deepcopy(fake_lb1)
fake_lb2.status = "ACTIVE"
fake_lb2.nodes = [
FakeNode(address=u"1.1.1.1", port=80, condition=u"ENABLED",
type="PRIMARY", weight=1),
FakeNode(address=u"2.2.2.2", port=80, condition=u"ENABLED",
type="PRIMARY", weight=1),
FakeNode(address=u"3.3.3.3", port=80, condition=u"ENABLED",
type="PRIMARY", weight=1),
FakeNode(address=u"4.4.4.4", port=80, condition=u"ENABLED",
type="PRIMARY", weight=1),
]
fake_lb2.tracker = "fake_lb2"
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb2) # ACTIVE
# Delete node 3.3.3.3
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb1) # PENDING_UPDATE
fake_lb3 = copy.deepcopy(fake_lb2)
fake_lb3.status = "ACTIVE"
fake_lb3.nodes = [
FakeNode(address=u"1.1.1.1", port=80, condition=u"ENABLED",
type="PRIMARY", weight=1),
FakeNode(address=u"2.2.2.2", port=80, condition=u"ENABLED",
type="PRIMARY", weight=1),
FakeNode(address=u"4.4.4.4", port=80, condition=u"ENABLED",
type="PRIMARY", weight=1)
]
fake_lb3.tracker = "fake_lb3"
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb3) # ACTIVE
# Update node 2.2.2.2
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb1) # PENDING_UPDATE
fake_lb4 = copy.deepcopy(fake_lb3)
fake_lb4.status = "ACTIVE"
fake_lb4.nodes = [
FakeNode(address=u"1.1.1.1", port=80, condition=u"ENABLED",
type="PRIMARY", weight=1),
FakeNode(address=u"2.2.2.2", port=80, condition=u"DISABLED",
type="PRIMARY", weight=1),
FakeNode(address=u"4.4.4.4", port=80, condition=u"ENABLED",
type="PRIMARY", weight=1)
]
fake_lb4.tracker = "fake_lb4"
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb4) # ACTIVE
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_update_pending_update_status(self):
rsrc, fake_lb = self._mock_loadbalancer(self.lb_template,
self.lb_name,
self.expected_body)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
update_template = copy.deepcopy(rsrc.t)
update_template['Properties']['name'] = "updated_name"
self.m.UnsetStubs()
self.m.StubOutWithMock(rsrc.clb, 'get')
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb)
fake_lb1 = copy.deepcopy(fake_lb)
fake_lb1.name = "updated_name"
fake_lb1.status = "PENDING_UPDATE" # lb is immutable
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb1)
fake_lb2 = copy.deepcopy(fake_lb)
fake_lb2.name = "updated_name"
fake_lb2.status = "ACTIVE"
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb2)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_update_immutable_exception(self):
rsrc, fake_lb = self._mock_loadbalancer(self.lb_template,
self.lb_name,
self.expected_body)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
update_template = copy.deepcopy(rsrc.t)
update_template['Properties']['name'] = "updated_name"
self.m.UnsetStubs()
self.m.StubOutWithMock(rsrc.clb, 'get')
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb) # initial iteration
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb) # immutable
fake_lb1 = copy.deepcopy(fake_lb)
fake_lb1.name = "updated_name"
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb1) # after update
self.m.StubOutWithMock(fake_lb, 'update')
msg = ("Load Balancer '%s' has a status of 'PENDING_UPDATE' and "
"is considered immutable." % rsrc.resource_id)
fake_lb.update(name="updated_name").AndRaise(Exception(msg))
fake_lb.update(name="updated_name").AndReturn(None)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_create_immutable_exception(self):
access_list = [{"address": '192.168.1.1/0',
'type': 'ALLOW'},
{'address': '172.165.3.43',
'type': 'DENY'}]
template = self._set_template(self.lb_template,
accessList=access_list)
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
self.expected_body)
self.m.StubOutWithMock(fake_lb, 'get_access_list')
fake_lb.get_access_list().AndReturn({})
fake_lb.get_access_list().AndReturn({})
fake_lb.get_access_list().AndReturn(access_list)
self.m.StubOutWithMock(fake_lb, 'add_access_list')
msg = ("Load Balancer '%s' has a status of 'PENDING_UPDATE' and "
"is considered immutable." % rsrc.resource_id)
fake_lb.add_access_list(access_list).AndRaise(Exception(msg))
fake_lb.add_access_list(access_list)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
self.m.VerifyAll()
def test_update_lb_name(self):
rsrc, fake_lb = self._mock_loadbalancer(self.lb_template,
self.lb_name,
self.expected_body)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
update_template = copy.deepcopy(rsrc.t)
update_template['Properties']['name'] = "updated_name"
self.m.UnsetStubs()
self.m.StubOutWithMock(rsrc.clb, 'get')
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb)
fake_lb1 = copy.deepcopy(fake_lb)
fake_lb1.name = "updated_name"
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb1)
self.m.StubOutWithMock(fake_lb, 'update')
fake_lb.update(name="updated_name")
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_update_lb_multiple(self):
rsrc, fake_lb = self._mock_loadbalancer(self.lb_template,
self.lb_name,
self.expected_body)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
update_template = copy.deepcopy(rsrc.t)
update_template['Properties']['name'] = "updated_name"
update_template['Properties']['algorithm'] = "RANDOM"
self.m.UnsetStubs()
self.m.StubOutWithMock(rsrc.clb, 'get')
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb)
fake_lb1 = copy.deepcopy(fake_lb)
fake_lb1.name = "updated_name"
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb1)
fake_lb2 = copy.deepcopy(fake_lb)
fake_lb2.algorithm = "RANDOM"
fake_lb2.name = "updated_name"
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb2)
self.m.StubOutWithMock(fake_lb, 'update')
fake_lb.update(name="updated_name", algorithm="RANDOM")
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_update_lb_algorithm(self):
rsrc, fake_lb = self._mock_loadbalancer(self.lb_template,
self.lb_name,
self.expected_body)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
update_template = copy.deepcopy(rsrc.t)
update_template['Properties']['algorithm'] = "RANDOM"
self.m.UnsetStubs()
self.m.StubOutWithMock(rsrc.clb, 'get')
fake_lb1 = copy.deepcopy(fake_lb)
fake_lb1.algorithm = "ROUND_ROBIN"
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb1)
self.m.StubOutWithMock(fake_lb1, 'update')
fake_lb1.update(algorithm="RANDOM")
fake_lb2 = copy.deepcopy(fake_lb)
fake_lb2.algorithm = "RANDOM"
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb2)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc
| 0 |
2ab303a2f36cdd64e2119856312dd5e38ee728d6
|
Python
|
.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_update_lb_protocol(self):
rsrc, fake_lb = self._mock_loadbalancer(self.lb_template,
self.lb_name,
self.expected_body)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
update_template = copy.deepcopy(rsrc.t)
update_template['Properties']['protocol'] = "IMAPS"
self.m.UnsetStubs()
self.m.StubOutWithMock(rsrc.clb, 'get')
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb)
fake_lb1 = copy.deepcopy(fake_lb)
fake_lb1.protocol = "IMAPS"
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb1)
self.m.StubOutWithMock(fake_lb, 'update')
fake_lb.update(protocol="IMAPS")
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_update_lb_redirect(self):
template = self._set_template(
self.lb_template, protocol="HTTPS")
expected = self._set_expected(
self.expected_body, protocol="HTTPS")
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
expected)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
update_template = copy.deepcopy(rsrc.t)
update_template['Properties']['httpsRedirect'] = True
self.m.UnsetStubs()
self.m.StubOutWithMock(rsrc.clb, 'get')
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb)
fake_lb1 = copy.deepcopy(fake_lb)
fake_lb1.httpsRedirect = True
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb1)
self.m.StubOutWithMock(fake_lb, 'update')
fake_lb.update(httpsRedirect=True)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_lb_redirect_https(self):
template = self._set_template(
self.lb_template, protocol="HTTPS", httpsRedirect=True)
expected = self._set_expected(
self.expected_body, protocol="HTTPS", httpsRedirect=True)
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
expected)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
self.assertEqual((rsrc.CREATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_lb_redirect_HTTP_with_SSL_term(self):
ssl_termination_template = {
'privatekey': private_key,
'intermediateCertificate': 'fwaefawe',
'secureTrafficOnly': True,
'securePort': 443,
'certificate': cert
}
ssl_termination_api = copy.deepcopy(ssl_termination_template)
ssl_termination_api['enabled'] = True
del ssl_termination_api['privatekey']
template = self._set_template(
self.lb_template, sslTermination=ssl_termination_template,
protocol="HTTP", httpsRedirect=True)
expected = self._set_expected(
self.expected_body, protocol="HTTP", httpsRedirect=False)
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
expected)
self.m.UnsetStubs()
self.m.StubOutWithMock(rsrc.clb, 'create')
rsrc.clb.create(self.lb_name, **expected).AndReturn(fake_lb)
self.m.StubOutWithMock(rsrc.clb, 'get')
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb)
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb)
fake_lb1 = copy.deepcopy(fake_lb)
fake_lb1.httpsRedirect = True
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb1)
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb1)
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb1)
self.m.StubOutWithMock(fake_lb, 'get_ssl_termination')
fake_lb.get_ssl_termination().AndReturn({})
fake_lb.get_ssl_termination().AndReturn(ssl_termination_api)
self.m.StubOutWithMock(fake_lb1, 'get_ssl_termination')
fake_lb1.get_ssl_termination().AndReturn(ssl_termination_api)
fake_lb1.get_ssl_termination().AndReturn(ssl_termination_api)
fake_lb1.get_ssl_termination().AndReturn(ssl_termination_api)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
self.assertEqual((rsrc.CREATE, rsrc.COMPLETE), rsrc.state)
def test_update_lb_half_closed(self):
rsrc, fake_lb = self._mock_loadbalancer(self.lb_template,
self.lb_name,
self.expected_body)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
update_template = copy.deepcopy(rsrc.t)
update_template['Properties']['halfClosed'] = True
self.m.UnsetStubs()
self.m.StubOutWithMock(rsrc.clb, 'get')
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb)
fake_lb1 = copy.deepcopy(fake_lb)
fake_lb1.halfClosed = True
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb1)
self.m.StubOutWithMock(fake_lb, 'update')
fake_lb.update(halfClosed=True)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_update_lb_port(self):
rsrc, fake_lb = self._mock_loadbalancer(self.lb_template,
self.lb_name,
self.expected_body)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
update_template = copy.deepcopy(rsrc.t)
update_template['Properties']['port'] = 1234
self.m.UnsetStubs()
self.m.StubOutWithMock(rsrc.clb, 'get')
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb)
fake_lb1 = copy.deepcopy(fake_lb)
fake_lb1.port = 1234
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb1)
self.m.StubOutWithMock(fake_lb, 'update')
fake_lb.update(port=1234)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_update_lb_timeout(self):
rsrc, fake_lb = self._mock_loadbalancer(self.lb_template,
self.lb_name,
self.expected_body)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
update_template = copy.deepcopy(rsrc.t)
update_template['Properties']['timeout'] = 120
self.m.UnsetStubs()
self.m.StubOutWithMock(rsrc.clb, 'get')
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb)
fake_lb1 = copy.deepcopy(fake_lb)
fake_lb1.timeout = 120
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb1)
self.m.StubOutWithMock(fake_lb, 'update')
fake_lb.update(timeout=120)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_update_health_monitor_add(self):
rsrc, fake_lb = self._mock_loadbalancer(self.lb_template,
self.lb_name,
self.expected_body)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
update_template = copy.deepcopy(rsrc.t)
update_template['Properties']['healthMonitor'] = {
'type': "HTTP", 'delay': 10, 'timeout': 10,
'attemptsBeforeDeactivation': 4, 'path': "/",
'statusRegex': "^[234][0-9][0-9]$", 'bodyRegex': ".* testing .*",
'hostHeader': "example.com"}
self.m.StubOutWithMock(fake_lb, 'get_health_monitor')
fake_lb.get_health_monitor().AndReturn({})
fake_lb.get_health_monitor().AndReturn(
{'type': "HTTP", 'delay': 10, 'timeout': 10,
'attemptsBeforeDeactivation': 4, 'path': "/",
'statusRegex': "^[234][0-9][0-9]$", 'bodyRegex': ".* testing .*",
'hostHeader': "example.com"})
self.m.StubOutWithMock(fake_lb, 'add_health_monitor')
fake_lb.add_health_monitor(
attemptsBeforeDeactivation=4, bodyRegex='.* testing .*', delay=10,
hostHeader='example.com', path='/',
statusRegex='^[234][0-9][0-9]$', timeout=10, type='HTTP')
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_update_health_monitor_delete(self):
template = copy.deepcopy(self.lb_template)
lb_name = list(six.iterkeys(template['Resources']))[0]
hm = {'type': "HTTP", 'delay': 10, 'timeout': 10,
'attemptsBeforeDeactivation': 4, 'path': "/",
'statusRegex': "^[234][0-9][0-9]$", 'bodyRegex': ".* testing .*",
'hostHeader': "example.com"}
template['Resources'][lb_name]['Properties']['healthMonitor'] = hm
expected_body = copy.deepcopy(self.expected_body)
expected_body['healthMonitor'] = hm
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
expected_body)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
update_template = copy.deepcopy(rsrc.t)
del update_template['Properties']['healthMonitor']
self.m.StubOutWithMock(fake_lb, 'get_health_monitor')
fake_lb.get_health_monitor().AndReturn(
{'type': "HTTP", 'delay': 10, 'timeout': 10,
'attemptsBeforeDeactivation': 4, 'path': "/",
'statusRegex': "^[234][0-9][0-9]$", 'bodyRegex': ".* testing .*",
'hostHeader': "example.com"})
fake_lb.get_health_monitor().AndReturn({})
self.m.StubOutWithMock(fake_lb, 'delete_health_monitor')
fake_lb.delete_health_monitor()
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_update_session_persistence_add(self):
rsrc, fake_lb = self._mock_loadbalancer(self.lb_template,
self.lb_name,
self.expected_body)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
update_template = copy.deepcopy(rsrc.t)
update_template['Properties']['sessionPersistence'] = 'SOURCE_IP'
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.assertEqual('SOURCE_IP', fake_lb.session_persistence)
self.m.VerifyAll()
def test_update_session_persistence_delete(self):
template = copy.deepcopy(self.lb_template)
lb_name = list(six.iterkeys(template['Resources']))[0]
template['Resources'][lb_name]['Properties'][
'sessionPersistence'] = "SOURCE_IP"
expected_body = copy.deepcopy(self.expected_body)
expected_body['sessionPersistence'] = {'persistenceType': "SOURCE_IP"}
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
expected_body)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
update_template = copy.deepcopy(rsrc.t)
del update_template['Properties']['sessionPersistence']
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.assertEqual('', fake_lb.session_persistence)
self.m.VerifyAll()
def test_update_ssl_termination_add(self):
rsrc, fake_lb = self._mock_loadbalancer(self.lb_template,
self.lb_name,
self.expected_body)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
update_template = copy.deepcopy(rsrc.t)
update_template['Properties']['sslTermination'] = {
'securePort': 443, 'privatekey': private_key, 'certificate': cert,
'secureTrafficOnly': False, 'intermediateCertificate': ''}
self.m.StubOutWithMock(fake_lb, 'get_ssl_termination')
fake_lb.get_ssl_termination().AndReturn({})
fake_lb.get_ssl_termination().AndReturn({
'securePort': 443, 'certificate': cert,
'secureTrafficOnly': False, 'enabled': True})
self.m.StubOutWithMock(fake_lb, 'add_ssl_termination')
fake_lb.add_ssl_termination(
securePort=443, privatekey=private_key, certificate=cert,
secureTrafficOnly=False, intermediateCertificate='')
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_update_ssl_termination_delete(self):
template = copy.deepcopy(self.lb_template)
ssl_termination_template = {
'securePort': 443, 'privatekey': private_key, 'certificate': cert,
'intermediateCertificate': '', 'secureTrafficOnly': False}
ssl_termination_api = copy.deepcopy(ssl_termination_template)
lb_name = list(six.iterkeys(template['Resources']))[0]
template['Resources'][lb_name]['Properties']['sslTermination'] = (
ssl_termination_template)
# The SSL termination config is done post-creation, so no need
# to modify self.expected_body
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
self.expected_body)
self.m.StubOutWithMock(fake_lb, 'get_ssl_termination')
fake_lb.get_ssl_termination().AndReturn({})
self.m.StubOutWithMock(fake_lb, 'add_ssl_termination')
fake_lb.add_ssl_termination(**ssl_termination_api)
fake_lb.get_ssl_termination().AndReturn({
'securePort': 443, 'certificate': cert,
'secureTrafficOnly': False, 'enabled': True})
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
self.m.UnsetStubs()
update_template = copy.deepcopy(rsrc.t)
del update_template['Properties']['sslTermination']
self.m.StubOutWithMock(rsrc.clb, 'get')
rsrc.clb.get(mox.IgnoreArg()).MultipleTimes().AndReturn(
fake_lb)
self.m.StubOutWithMock(fake_lb, 'get_ssl_termination')
fake_lb.get_ssl_termination().AndReturn({
'securePort': 443, 'certificate': cert,
'secureTrafficOnly': False})
self.m.StubOutWithMock(fake_lb, 'delete_ssl_termination')
fake_lb.delete_ssl_termination()
fake_lb.get_ssl_termination().AndReturn({})
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_update_metadata_add(self):
rsrc, fake_lb = self._mock_loadbalancer(self.lb_template,
self.lb_name,
self.expected_body)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
update_template = copy.deepcopy(rsrc.t)
update_template['Properties']['metadata'] = {'a': 1, 'b': 2}
self.m.StubOutWithMock(fake_lb, 'get_metadata')
fake_lb.get_metadata().AndReturn({})
fake_lb.get_metadata().AndReturn({'a': 1, 'b': 2})
self.m.StubOutWithMock(fake_lb, 'set_metadata')
fake_lb.set_metadata({'a': 1, 'b': 2})
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_update_metadata_delete(self):
template = copy.deepcopy(self.lb_template)
lb_name = list(six.iterkeys(template['Resources']))[0]
template['Resources'][lb_name]['Properties']['metadata'] = {
'a': 1, 'b': 2}
expected_body = copy.deepcopy(self.expected_body)
expected_body['metadata'] = mox.SameElementsAs(
[{'key': 'a', 'value': 1},
{'key': 'b', 'value': 2}])
rsrc, fake_lb = self._mock_loadbalancer(
template, self.lb_name, expected_body)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
update_template = copy.deepcopy(rsrc.t)
del update_template['Properties']['metadata']
self.m.StubOutWithMock(fake_lb, 'get_metadata')
fake_lb.get_metadata().AndReturn({'a': 1, 'b': 2})
fake_lb.get_metadata().AndReturn({})
self.m.StubOutWithMock(fake_lb, 'delete_metadata')
fake_lb.delete_metadata()
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_update_errorpage_add(self):
rsrc, fake_lb = self._mock_loadbalancer(self.lb_template,
self.lb_name,
self.expected_body)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
error_page = (
'<html><head><title>Service Unavailable</title></head><body><h2>'
'Service Unavailable</h2>The service is unavailable</body></html>')
update_template = copy.deepcopy(rsrc.t)
update_template['Properties']['errorPage'] = error_page
self.m.StubOutWithMock(fake_lb, 'get_error_page')
fake_lb.get_error_page().AndReturn(
{'errorpage': {'content': 'foo'}})
fake_lb.get_error_page().AndReturn(
{'errorpage': {'content': error_page}})
self.m.StubOutWithMock(fake_lb, 'set_error_page')
fake_lb.set_error_page(error_page)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_update_errorpage_delete(self):
template = copy.deepcopy(self.lb_template)
lb_name = list(six.iterkeys(template['Resources']))[0]
error_page = (
'<html><head><title>Service Unavailable</title></head><body><h2>'
'Service Unavailable</h2>The service is unavailable</body></html>')
template['Resources'][lb_name]['Properties']['errorPage'] = error_page
# The error page config is done post-creation, so no need to
# modify self.expected_body
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
self.expected_body)
self.m.StubOutWithMock(fake_lb, 'get_error_page')
fake_lb.get_error_page().AndReturn({})
self.m.StubOutWithMock(fake_lb, 'set_error_page')
fake_lb.set_error_page(error_page)
fake_lb.get_error_page().AndReturn({'errorpage':
{'content': error_page}})
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
self.m.UnsetStubs()
update_template = copy.deepcopy(rsrc.t)
del update_template['Properties']['errorPage']
self.m.StubOutWithMock(rsrc.clb, 'get')
rsrc.clb.get(mox.IgnoreArg()).MultipleTimes().AndReturn(
fake_lb)
self.m.StubOutWithMock(fake_lb, 'clear_error_page')
fake_lb.clear_error_page()
self.m.StubOutWithMock(fake_lb, 'get_error_page')
fake_lb.get_error_page().AndReturn(
{'errorpage': {'content': error_page}})
fake_lb.get_error_page().AndReturn({'errorpage': {'content': ""}})
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_update_connection_logging_enable(self):
rsrc, fake_lb = self._mock_loadbalancer(self.lb_template,
self.lb_name,
self.expected_body)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
update_template = copy.deepcopy(rsrc.t)
update_template['Properties']['connectionLogging'] = True
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.assertTrue(fake_lb.connection_logging)
self.m.VerifyAll()
def test_update_connection_logging_delete(self):
template = copy.deepcopy(self.lb_template)
lb_name = list(six.iterkeys(template['Resources']))[0]
template['Resources'][lb_name]['Properties'][
'connectionLogging'] = True
expected_body = copy.deepcopy(self.expected_body)
expected_body['connectionLogging'] = {'enabled': True}
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
expected_body)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
self.m.UnsetStubs()
self.m.StubOutWithMock(rsrc.clb, 'get')
fake_lb1 = copy.deepcopy(fake_lb)
fake_lb1.connection_logging = True
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb1)
fake_lb2 = copy.deepcopy(fake_lb)
fake_lb2.connection_logging = False
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb2)
update_template = copy.deepcopy(rsrc.t)
del update_template['Properties']['connectionLogging']
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.assertFalse(fake_lb.connection_logging)
self.m.VerifyAll()
def test_update_connection_logging_disable(self):
template = copy.deepcopy(self.lb_template)
lb_name = list(six.iterkeys(template['Resources']))[0]
template['Resources'][lb_name]['Properties'][
'connectionLogging'] = True
expected_body = copy.deepcopy(self.expected_body)
expected_body['connectionLogging'] = {'enabled': True}
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
expected_body)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
update_template = copy.deepcopy(rsrc.t)
update_template['Properties']['connectionLogging'] = False
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.assertFalse(fake_lb.connection_logging)
self.m.VerifyAll()
def test_update_connection_throttle_add(self):
rsrc, fake_lb = self._mock_loadbalancer(self.lb_template,
self.lb_name,
self.expected_body)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
update_template = copy.deepcopy(rsrc.t)
update_template['Properties']['connectionThrottle'] = {
'maxConnections': 1000}
self.m.StubOutWithMock(fake_lb, 'add_connection_throttle')
self.m.StubOutWithMock(fake_lb, 'get_connection_throttle')
fake_lb.get_connection_throttle().AndReturn(
{'maxConnectionRate': None, 'minConnections': None,
'rateInterval': None, 'maxConnections': 100})
fake_lb.add_connection_throttle(
maxConnections=1000, maxConnectionRate=None, minConnections=None,
rateInterval=None)
fake_lb.get_connection_throttle().AndReturn(
{'maxConnectionRate': None, 'minConnections': None,
'rateInterval': None, 'maxConnections': 1000})
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_update_connection_throttle_delete(self):
template = copy.deepcopy(self.lb_template)
lb_name = list(six.iterkeys(template['Resources']))[0]
template['Resources'][lb_name]['Properties'][
'connectionThrottle'] = {'maxConnections': 1000}
expected_body = copy.deepcopy(self.expected_body)
expected_body['connectionThrottle'] = {
'maxConnections': 1000, 'maxConnectionRate': None,
'rateInterval': None, 'minConnections': None}
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
expected_body)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
update_template = copy.deepcopy(rsrc.t)
del update_template['Properties']['connectionThrottle']
self.m.StubOutWithMock(fake_lb, 'get_connection_throttle')
fake_lb.get_connection_throttle().AndReturn({
'maxConnections': 1000, 'maxConnectionRate': None,
'rateInterval': None, 'minConnections': None})
self.m.StubOutWithMock(fake_lb, 'delete_connection_throttle')
fake_lb.delete_connection_throttle()
fake_lb.get_connection_throttle().AndReturn({})
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_update_content_caching_enable(self):
rsrc, fake_lb = self._mock_loadbalancer(self.lb_template,
self.lb_name,
self.expected_body)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
update_template = copy.deepcopy(rsrc.t)
update_template['Properties']['contentCaching'] = 'ENABLED'
self.m.UnsetStubs()
self.m.StubOutWithMock(rsrc.clb, 'get')
fake_lb1 = copy.deepcopy(fake_lb)
fake_lb1.content_caching = False
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb1)
fake_lb2 = copy.deepcopy(fake_lb)
fake_lb2.content_caching = True
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb2)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_update_content_caching_deleted(self):
template = copy.deepcopy(self.lb_template)
lb_name = list(six.iterkeys(template['Resources']))[0]
template['Resources'][lb_name]['Properties'][
'contentCaching'] = 'ENABLED'
# Enabling the content cache is done post-creation, so no need
# to modify self.expected_body
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
self.expected_body)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
update_template = copy.deepcopy(rsrc.t)
del update_template['Properties']['contentCaching']
self.m.UnsetStubs()
self.m.StubOutWithMock(rsrc.clb, 'get')
fake_lb1 = copy.deepcopy(fake_lb)
fake_lb1.content_caching = True
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb1)
fake_lb2 = copy.deepcopy(fake_lb)
fake_lb2.content_caching = False
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb2)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_update_content_caching_disable(self):
template = copy.deepcopy(self.lb_template)
lb_name = list(six.iterkeys(template['Resources']))[0]
template['Resources'][lb_name]['Properties'][
'contentCaching'] = 'ENABLED'
# Enabling the content cache is done post-creation, so no need
# to modify self.expected_body
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
self.expected_body)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
update_template = copy.deepcopy(rsrc.t)
update_template['Properties']['contentCaching'] = 'DISABLED'
self.m.UnsetStubs()
self.m.StubOutWithMock(rsrc.clb, 'get')
fake_lb1 = copy.deepcopy(fake_lb)
fake_lb1.content_caching = True
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb1)
fake_lb2 = copy.deepcopy(fake_lb)
fake_lb2.content_caching = False
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb2)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_delete(self):
template = self._set_template(self.lb_template,
contentCaching='ENABLED')
rsrc, fake_lb = self._mock_loadbalancer(template, self.lb_name,
self.expected_body)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
self.m.VerifyAll()
self.m.UnsetStubs()
self.m.StubOutWithMock(rsrc.clb, 'get')
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb)
rsrc.clb.get(mox.IgnoreArg()).AndRaise(lb.NotFound('foo'))
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.delete)()
self.assertEqual((rsrc.DELETE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_delete_immutable(self):
template = self._set_template(self.lb_template,
contentCaching='ENABLED')
rsrc, fake_lb = self._mock_loadbalancer(template, self.lb_name,
self.expected_body)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
self.m.UnsetStubs()
self.m.StubOutWithMock(rsrc.clb, 'get')
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb)
rsrc.clb.get(mox.IgnoreArg()).AndRaise(lb.NotFound('foo'))
self.m.StubOutWithMock(fake_lb, 'delete')
fake_lb.delete().AndRaise(Exception('immutable'))
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.delete)()
self.assertEqual((rsrc.DELETE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_delete_non_immutable_exc(self):
template = self._set_template(self.lb_template,
contentCaching='ENABLED')
rsrc, fake_lb = self._mock_loadbalancer(template, self.lb_name,
self.expected_body)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
self.m.UnsetStubs()
self.m.StubOutWithMock(rsrc.clb, 'get')
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb)
self.m.StubOutWithMock(fake_lb, 'delete')
fake_lb.delete().AndRaise(FakeException())
self.m.ReplayAll()
exc = self.assertRaises(exception.ResourceFailure,
scheduler.TaskRunner(rsrc.delete))
self.assertIn('FakeException', six.text_type(exc))
self.m.VerifyAll()
def test_delete_states(self):
template = self._set_template(self.lb_template,
contentCaching='ENABLED')
rsrc, fake_lb = self._mock_loadbalancer(template, self.lb_name,
self.expected_body)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
self.m.UnsetStubs()
fake_lb1 = copy.deepcopy(fake_lb)
fake_lb2 = copy.deepcopy(fake_lb)
fake_lb3 = copy.deepcopy(fake_lb)
self.m.StubOutWithMock(rsrc.clb, 'get')
fake_lb1.status = 'ACTIVE'
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb1)
fake_lb2.status = 'PENDING_DELETE'
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb2)
fake_lb3.status = 'DELETED'
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb3)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.delete)()
self.assertEqual((rsrc.DELETE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_redir(self):
mock_stack = mock.Mock()
mock_stack.db_resource_get.return_value = None
mock_stack.has_cache_data.return_value = False
props = {'httpsRedirect': True,
'protocol': 'HTTPS',
'port': 443,
'nodes': [],
'virtualIps': [{'id': '1234'}]}
mock_resdef = rsrc_defn.ResourceDefinition("test_lb",
LoadBalancerWithFakeClient,
properties=props)
mock_lb = lb.CloudLoadBalancer("test", mock_resdef, mock_stack)
self.assertIsNone(mock_lb.validate())
props['protocol'] = 'HTTP'
props['sslTermination'] = {
'secureTrafficOnly': True,
'securePort': 443,
'privatekey': "bobloblaw",
'certificate': 'mycert'
}
mock_resdef = rsrc_defn.ResourceDefinition("test_lb_2",
LoadBalancerWithFakeClient,
properties=props)
mock_lb = lb.CloudLoadBalancer("test_2", mock_resdef, mock_stack)
self.assertIsNone(mock_lb.validate())
def test_invalid_redir_proto(self):
mock_stack = mock.Mock()
mock_stack.db_resource_get.return_value = None
mock_stack.has_cache_data.return_value = False
props = {'httpsRedirect': True,
'protocol': 'TCP',
'port': 1234,
'nodes': [],
'virtualIps': [{'id': '1234'}]}
mock_resdef = rsrc_defn.ResourceDefinition("test_lb",
LoadBalancerWithFakeClient,
properties=props)
mock_lb = lb.CloudLoadBalancer("test", mock_resdef, mock_stack)
ex = self.assertRaises(exception.StackValidationFailed,
mock_lb.validate)
self.assertIn("HTTPS redirect is only available", six.text_type(ex))
def test_invalid_redir_ssl(self):
mock_stack = mock.Mock()
mock_stack.db_resource_get.return_value = None
mock_stack.has_cache_data.return_value = False
props = {'httpsRedirect': True,
'protocol': 'HTTP',
'port': 1234,
'nodes': [],
'virtualIps': [{'id': '1234'}]}
mock_resdef = rsrc_defn.ResourceDefinition("test_lb",
LoadBalancerWithFakeClient,
properties=props)
mock_lb = lb.CloudLoadBalancer("test", mock_resdef, mock_stack)
ex = self.assertRaises(exception.StackValidationFailed,
mock_lb.validate)
self.assertIn("HTTPS redirect is only available", six.text_type(ex))
props['sslTermination'] = {
'secureTrafficOnly': False,
'securePort': 443,
'privatekey': "bobloblaw",
'certificate': 'mycert'
}
mock_lb = lb.CloudLoadBalancer("test", mock_resdef, mock_stack)
ex = self.assertRaises(exception.StackValidationFailed,
mock_lb.validate)
self.assertIn("HTTPS redirect is only available", six.text_type(ex))
props['sslTermination'] = {
'secureTrafficOnly': True,
'securePort': 1234,
'privatekey': "bobloblaw",
'certificate': 'mycert'
}
mock_lb = lb.CloudLoadBalancer("test", mock_resdef, mock_stack)
ex = self.assertRaises(exception.StackValidationFailed,
mock_lb.validate)
self.assertIn("HTTPS redirect is only available", six.text_type(ex))
def test_update_nodes_condition_draining(self):
rsrc, fake_lb = self._mock_loadbalancer(self.lb_template,
self.lb_name,
self.expected_body)
fake_lb.nodes = self.expected_body['nodes']
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
update_template = copy.deepcopy(rsrc.t)
expected_ip = '172.168.1.4'
update_template['Properties']['nodes'] = [
{"addresses": ["166.78.103.141"],
"port": 80,
"condition": "DRAINING",
"type": "PRIMARY",
"weight": 1},
{"addresses": [expected_ip],
"port": 80,
"condition": "DRAINING",
"type": "PRIMARY",
"weight": 1}]
self.m.UnsetStubs()
self.m.StubOutWithMock(rsrc.clb, 'get')
fake_lb1 = copy.deepcopy(fake_lb)
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb1)
self.m.StubOutWithMock(fake_lb1, 'add_nodes')
fake_lb1.add_nodes([
fake_lb1.Node(address=expected_ip,
port=80,
condition='DRAINING',
type="PRIMARY", weight=1)])
fake_lb2 = copy.deepcopy(fake_lb)
fake_lb2.nodes = [
FakeNode(address=u"166.78.103.141", port=80,
condition=u"DRAINING", type="PRIMARY", weight=1),
FakeNode(address=u"172.168.1.4", port=80,
condition=u"DRAINING", type="PRIMARY", weight=1),
]
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb2)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_update_nodes_add_same_address_different_port(self):
rsrc, fake_lb = self._mock_loadbalancer(self.lb_template,
self.lb_name,
self.expected_body)
fake_lb.nodes = self.expected_body['nodes']
fake_lb.tracker = "fake_lb"
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
update_template = copy.deepcopy(rsrc.t)
update_template['Properties']['nodes'] = [
{"addresses": ["166.78.103.141"],
"port": 80,
"condition": "ENABLED",
"type": "PRIMARY",
"weight": 1},
{"addresses": ["166.78.103.141"],
"port": 81,
"condition": "ENABLED",
"type": "PRIMARY",
"weight": 1}]
self.m.UnsetStubs()
self.m.StubOutWithMock(rsrc.clb, 'get')
fake_lb1 = copy.deepcopy(fake_lb)
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb1)
self.m.StubOutWithMock(fake_lb1, 'add_nodes')
fake_lb1.add_nodes([
fake_lb1.Node(address="166.78.103.141",
port=81,
condition='ENABLED',
type="PRIMARY", weight=1)])
fake_lb1.tracker = "fake_lb1"
fake_lb2 = copy.deepcopy(fake_lb)
fake_lb2.nodes = [
FakeNode(address=u"166.78.103.141", port=80,
condition=u"ENABLED", type="PRIMARY", weight=1),
FakeNode(address=u"166.78.103.141", port=81,
condition=u"ENABLED", type="PRIMARY", weight=1),
]
fake_lb2.tracker = "fake_lb2"
rsrc.clb.get(mox.IgnoreArg()).AndReturn(fake_lb2)
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
def test_update_nodes_defaults(self):
template = copy.deepcopy(self.lb_template)
lb_name = list(six.iterkeys(template['Resources']))[0]
tmpl_node = template['Resources'][lb_name]['Properties']['nodes'][0]
tmpl_node['type'] = "PRIMARY"
tmpl_node['condition'] = "ENABLED"
tmpl_node['weight'] = 1
expected_body = copy.deepcopy(self.expected_body)
expected_body['nodes'] = [FakeNode(address=u"166.78.103.141", port=80,
condition=u"ENABLED",
type="PRIMARY", weight=1)]
rsrc, fake_lb = self._mock_loadbalancer(template,
self.lb_name,
expected_body)
fake_lb.nodes = self.expected_body['nodes']
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.create)()
update_template = copy.deepcopy(rsrc.t)
update_template['Properties']['nodes'] = [
{"addresses": ["166.78.103.141"],
"port": 80}]
self.m.UnsetStubs()
self.m.StubOutWithMock(rsrc.clb, 'get')
fake_lb1 = copy.deepcopy(fake_lb)
rsrc.clb.get(mox.IgnoreArg()).MultipleTimes().AndReturn(fake_lb1)
self.m.StubOutWithMock(fake_lb1, 'add_nodes')
self.m.ReplayAll()
scheduler.TaskRunner(rsrc.update, update_template)()
self.assertEqual((rsrc.UPDATE, rsrc.COMPLETE), rsrc.state)
self.m.VerifyAll()
| 1 |
12845f9dceb836a8c5f395a89d266b458a782958
|
Python
|
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Add tests for the badge server image end-point."""
import datetime
import unittest
import unittest.mock
import urllib.parse
from compatibility_lib import compatibility_store
from compatibility_lib import dependency_highlighter
from compatibility_lib import deprecated_dep_finder_stub
from compatibility_lib import fake_compatibility_store
from compatibility_lib import package
import main
import utils
APACHE_BEAM_RECENT_SUCCESS_2 = compatibility_store.CompatibilityResult(
[package.Package('apache-beam[gcp]')],
python_major_version=2,
status=compatibility_store.Status.SUCCESS,
dependency_info={
'apache-beam[gcp]': {
'current_time': datetime.datetime(2019, 5, 7, 0, 0, 0),
'installed_version': '2.12.0',
'installed_version_time': datetime.datetime(
2019, 2, 19, 21, 15, 56),
'is_latest': True,
'latest_version': '2.12.0',
'latest_version_time': datetime.datetime(
2019, 2, 19, 21, 15, 56),
},
},
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
APACHE_BEAM_GIT_RECENT_SUCCESS_2 = compatibility_store.CompatibilityResult(
[package.Package('git+git://github.com/google/apache-beam.git')],
python_major_version=2,
status=compatibility_store.Status.SUCCESS,
dependency_info={
'apache-beam[gcp]': {
'current_time': datetime.datetime(2019, 5, 7, 0, 0, 0),
'installed_version': '2.12.0',
'installed_version_time': datetime.datetime(
2019, 2, 19, 21, 15, 56),
'is_latest': True,
'latest_version': '2.12.0',
'latest_version_time': datetime.datetime(
2019, 2, 19, 21, 15, 56),
},
},
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
GOOGLE_API_CORE_RECENT_SUCCESS_2 = compatibility_store.CompatibilityResult(
[package.Package('google-api-core')],
python_major_version=2,
status=compatibility_store.Status.SUCCESS,
dependency_info={
'google-api-core': {
'current_time': datetime.datetime(2019, 5, 7, 0, 0, 0),
'installed_version': '1.9.0',
'installed_version_time': datetime.datetime(
2019, 4, 5, 18, 1, 48),
'is_latest': True,
'latest_version': '1.9.0',
'latest_version_time': datetime.datetime(
2019, 4, 5, 18, 1, 48),
},
},
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
GOOGLE_API_CORE_RECENT_SELF_INCOMPATIBLE_2 = compatibility_store.CompatibilityResult(
[package.Package('google-api-core')],
python_major_version=2,
status=compatibility_store.Status.CHECK_WARNING,
dependency_info={
'google-api-core': {
'current_time': datetime.datetime(2019, 5, 7, 0, 0, 0),
'installed_version': '1.9.0',
'installed_version_time': datetime.datetime(
2019, 4, 5, 18, 1, 48),
'is_latest': True,
'latest_version': '1.9.0',
'latest_version_time': datetime.datetime(
2019, 4, 5, 18, 1, 48),
},
},
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
GOOGLE_API_CORE_RECENT_INSTALL_FAILURE_2 = compatibility_store.CompatibilityResult(
[package.Package('google-api-core')],
python_major_version=2,
status=compatibility_store.Status.INSTALL_ERROR,
dependency_info={
'google-api-core': {
'current_time': datetime.datetime(2019, 5, 7, 0, 0, 0),
'installed_version': '1.9.0',
'installed_version_time': datetime.datetime(
2019, 4, 5, 18, 1, 48),
'is_latest': True,
'latest_version': '1.9.0',
'latest_version_time': datetime.datetime(
2019, 4, 5, 18, 1, 48),
},
},
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
GOOGLE_API_CORE_RECENT_SUCCESS_3 = compatibility_store.CompatibilityResult(
[package.Package('google-api-core')],
python_major_version=3,
status=compatibility_store.Status.SUCCESS,
dependency_info={
'google-api-core': {
'current_time': datetime.datetime(2019, 5, 7, 0, 0, 0),
'installed_version': '1.9.0',
'installed_version_time': datetime.datetime(
2019, 4, 5, 18, 1, 48),
'is_latest': True,
'latest_version': '1.9.0',
'latest_version_time': datetime.datetime(
2019, 4, 5, 18, 1, 48),
},
},
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
GOOGLE_API_CORE_RECENT_SELF_INCOMPATIBLE_3 = compatibility_store.CompatibilityResult(
[package.Package('google-api-core')],
python_major_version=3,
status=compatibility_store.Status.CHECK_WARNING,
dependency_info={
'google-api-core': {
'current_time': datetime.datetime(2019, 5, 7, 0, 0, 0),
'installed_version': '1.9.0',
'installed_version_time': datetime.datetime(
2019, 4, 5, 18, 1, 48),
'is_latest': True,
'latest_version': '1.9.0',
'latest_version_time': datetime.datetime(
2019, 4, 5, 18, 1, 48),
},
},
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
GOOGLE_API_CORE_GIT_RECENT_SUCCESS_2 = compatibility_store.CompatibilityResult(
[package.Package('git+git://github.com/google/api-core.git')],
python_major_version=2,
status=compatibility_store.Status.SUCCESS,
dependency_info={
'google-api-core': {
'current_time': datetime.datetime(2019, 5, 7, 0, 0, 0),
'installed_version': '1.9.0',
'installed_version_time': datetime.datetime(
2019, 4, 5, 18, 1, 48),
'is_latest': True,
'latest_version': '1.9.0',
'latest_version_time': datetime.datetime(
2019, 4, 5, 18, 1, 48),
},
},
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
GOOGLE_API_CORE_GIT_RECENT_SELF_INCOMPATIBLE_2 = compatibility_store.CompatibilityResult(
[package.Package('git+git://github.com/google/api-core.git')],
python_major_version=2,
status=compatibility_store.Status.CHECK_WARNING,
dependency_info={
'google-api-core': {
'current_time': datetime.datetime(2019, 5, 7, 0, 0, 0),
'installed_version': '1.9.0',
'installed_version_time': datetime.datetime(
2019, 4, 5, 18, 1, 48),
'is_latest': True,
'latest_version': '1.9.0',
'latest_version_time': datetime.datetime(
2019, 4, 5, 18, 1, 48),
},
},
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
GOOGLE_API_CORE_GIT_RECENT_INSTALL_FAILURE_2 = compatibility_store.CompatibilityResult(
[package.Package('git+git://github.com/google/api-core.git')],
python_major_version=2,
status=compatibility_store.Status.INSTALL_ERROR,
dependency_info={
'google-api-core': {
'current_time': datetime.datetime(2019, 5, 7, 0, 0, 0),
'installed_version': '1.9.0',
'installed_version_time': datetime.datetime(
2019, 4, 5, 18, 1, 48),
'is_latest': True,
'latest_version': '1.9.0',
'latest_version_time': datetime.datetime(
2019, 4, 5, 18, 1, 48),
},
},
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
GOOGLE_API_CORE_GIT_RECENT_SUCCESS_3 = compatibility_store.CompatibilityResult(
[package.Package('git+git://github.com/google/api-core.git')],
python_major_version=3,
status=compatibility_store.Status.SUCCESS,
dependency_info={
'google-api-core': {
'current_time': datetime.datetime(2019, 5, 7, 0, 0, 0),
'installed_version': '1.9.0',
'installed_version_time': datetime.datetime(
2019, 4, 5, 18, 1, 48),
'is_latest': True,
'latest_version': '1.9.0',
'latest_version_time': datetime.datetime(
2019, 4, 5, 18, 1, 48),
},
},
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
GOOGLE_API_CORE_GIT_RECENT_SELF_INCOMPATIBLE_3 = compatibility_store.CompatibilityResult(
[package.Package('git+git://github.com/google/api-core.git')],
python_major_version=3,
status=compatibility_store.Status.CHECK_WARNING,
dependency_info={
'google-api-core': {
'current_time': datetime.datetime(2019, 5, 7, 0, 0, 0),
'installed_version': '1.9.0',
'installed_version_time': datetime.datetime(
2019, 4, 5, 18, 1, 48),
'is_latest': True,
'latest_version': '1.9.0',
'latest_version_time': datetime.datetime(
2019, 4, 5, 18, 1, 48),
},
},
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
TENSORFLOW_RECENT_SUCCESS_3 = compatibility_store.CompatibilityResult(
[package.Package('tensorflow')],
python_major_version=3,
status=compatibility_store.Status.SUCCESS,
dependency_info={
'tensorflow': {
'current_time': datetime.datetime(2019, 5, 7, 0, 0, 0),
'installed_version': '1.3.1',
'installed_version_time': datetime.datetime(
2019, 4, 26, 0, 0, 0),
'is_latest': True,
'latest_version': '1.3.1',
'latest_version_time': datetime.datetime(
2019, 4, 26, 0, 0, 0),
},
},
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
TENSORFLOW_GIT_RECENT_SUCCESS_3 = compatibility_store.CompatibilityResult(
[package.Package('git+git://github.com/google/tensorflow.git')],
python_major_version=3,
status=compatibility_store.Status.SUCCESS,
dependency_info={
'tensorflow': {
'current_time': datetime.datetime(2019, 5, 7, 0, 0, 0),
'installed_version': '1.3.1',
'installed_version_time': datetime.datetime(
2019, 4, 26, 0, 0, 0),
'is_latest': True,
'latest_version': '1.3.1',
'latest_version_time': datetime.datetime(
2019, 4, 26, 0, 0, 0),
},
},
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
APACHE_BEAM_GOOGLE_API_CORE_RECENT_SUCCESS_2 = compatibility_store.CompatibilityResult(
[package.Package('apache-beam[gcp]'),
package.Package('google-api-core')],
python_major_version=2,
status=compatibility_store.Status.SUCCESS,
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
APACHE_BEAM_GOOGLE_API_CORE_RECENT_PAIR_INCOMPATIBLE_2 = compatibility_store.CompatibilityResult(
[package.Package('apache-beam[gcp]'),
package.Package('google-api-core')],
python_major_version=2,
status=compatibility_store.Status.CHECK_WARNING,
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
APACHE_BEAM_GOOGLE_API_CORE_RECENT_INSTALL_ERROR_3 = compatibility_store.CompatibilityResult(
[package.Package('apache-beam[gcp]'),
package.Package('google-api-core')],
python_major_version=3, # apache-beam does not support Python 3
status=compatibility_store.Status.INSTALL_ERROR,
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
APACHE_BEAM_GOOGLE_API_PYTHON_CLIENT_RECENT_SUCCESS_2 = compatibility_store.CompatibilityResult(
[
package.Package('apache-beam[gcp]'),
package.Package('google-api-python-client')
],
python_major_version=2,
status=compatibility_store.Status.SUCCESS,
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
APACHE_BEAM_GIT_GOOGLE_API_CORE_RECENT_SUCCESS_2 = compatibility_store.CompatibilityResult(
[package.Package('git+git://github.com/google/apache-beam.git'),
package.Package('google-api-core')],
python_major_version=2,
status=compatibility_store.Status.SUCCESS,
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
APACHE_BEAM_GIT_GOOGLE_API_CORE_RECENT_PAIR_INCOMPATIBLE_2 = compatibility_store.CompatibilityResult(
[package.Package('git+git://github.com/google/apache-beam.git'),
package.Package('google-api-core')],
python_major_version=2,
status=compatibility_store.Status.CHECK_WARNING,
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
APACHE_BEAM_GIT_GOOGLE_API_PYTHON_CLIENT_RECENT_SUCCESS_2 = compatibility_store.CompatibilityResult(
[
package.Package('git+git://github.com/google/apache-beam.git'),
package.Package('google-api-python-client')
],
python_major_version=2,
status=compatibility_store.Status.SUCCESS,
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
APACHE_BEAM_GOOGLE_API_CORE_GIT_RECENT_SUCCESS_2 = compatibility_store.CompatibilityResult(
[
package.Package('apache-beam[gcp]'),
package.Package('git+git://github.com/google/api-core.git')
],
python_major_version=2,
status=compatibility_store.Status.SUCCESS,
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
APACHE_BEAM_GOOGLE_API_CORE_GIT_RECENT_PAIR_INCOMPATIBLE_2 = compatibility_store.CompatibilityResult(
[
package.Package('apache-beam[gcp]'),
package.Package('git+git://github.com/google/api-core.git')
],
python_major_version=2,
status=compatibility_store.Status.CHECK_WARNING,
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
APACHE_BEAM_GOOGLE_API_CORE_GIT_RECENT_INSTALL_ERROR_3 = compatibility_store.CompatibilityResult(
[
package.Package('apache-beam[gcp]'),
package.Package('git+git://github.com/google/api-core.git')
],
python_major_version=3, # apache-beam does not support Python 3
status=compatibility_store.Status.INSTALL_ERROR,
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
APACHE_BEAM_GOOGLE_API_PYTHON_CLIENT_GIT_RECENT_SUCCESS_2 = compatibility_store.CompatibilityResult(
[
package.Package('apache-beam[gcp]'),
package.Package('git+git://github.com/google/api-python-client.git')
],
python_major_version=2,
status=compatibility_store.Status.SUCCESS,
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
GOOGLE_API_CORE_GOOGLE_API_PYTHON_CLIENT_RECENT_SUCCESS_2 = compatibility_store.CompatibilityResult(
[
package.Package('google-api-core'),
package.Package('google-api-python-client')
],
python_major_version=2,
status=compatibility_store.Status.SUCCESS,
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
GOOGLE_API_CORE_GOOGLE_API_PYTHON_CLIENT_RECENT_SUCCESS_3 = compatibility_store.CompatibilityResult(
[
package.Package('google-api-core'),
package.Package('google-api-python-client')
],
python_major_version=3,
status=compatibility_store.Status.SUCCESS,
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
GOOGLE_API_CORE_GOOGLE_API_PYTHON_CLIENT_RECENT_PAIR_INCOMPATIBLE_2 = compatibility_store.CompatibilityResult(
[
package.Package('google-api-core'),
package.Package('google-api-python-client')
],
python_major_version=2,
status=compatibility_store.Status.CHECK_WARNING,
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
GOOGLE_API_CORE_GOOGLE_API_PYTHON_CLIENT_RECENT_PAIR_INCOMPATIBLE_3 = compatibility_store.CompatibilityResult(
[
package.Package('google-api-core'),
package.Package('google-api-python-client')
],
python_major_version=3,
status=compatibility_store.Status.CHECK_WARNING,
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
GOOGLE_API_CORE_GIT_GOOGLE_API_PYTHON_CLIENT_RECENT_SUCCESS_2 = compatibility_store.CompatibilityResult(
[
package.Package('git+git://github.com/google/api-core.git'),
package.Package('google-api-python-client')
],
python_major_version=2,
status=compatibility_store.Status.SUCCESS,
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
GOOGLE_API_CORE_GIT_GOOGLE_API_PYTHON_CLIENT_RECENT_SUCCESS_3 = compatibility_store.CompatibilityResult(
[
package.Package('git+git://github.com/google/api-core.git'),
package.Package('google-api-python-client')
],
python_major_version=3,
status=compatibility_store.Status.SUCCESS,
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
GOOGLE_API_CORE_GIT_GOOGLE_API_PYTHON_CLIENT_RECENT_PAIR_INCOMPATIBLE_2 = compatibility_store.CompatibilityResult(
[
package.Package('git+git://github.com/google/api-core.git'),
package.Package('google-api-python-client')
],
python_major_version=2,
status=compatibility_store.Status.CHECK_WARNING,
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
GOOGLE_API_CORE_GIT_GOOGLE_API_PYTHON_CLIENT_RECENT_PAIR_INCOMPATIBLE_3 = compatibility_store.CompatibilityResult(
[
package.Package('git+git://github.com/google/api-core.git'),
package.Package('google-api-python-client')
],
python_major_version=3,
status=compatibility_store.Status.CHECK_WARNING,
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
GOOGLE_API_CORE_TENSORFLOW_RECENT_INSTALL_ERROR_2 = compatibility_store.CompatibilityResult(
[package.Package('google-api-core'),
package.Package('tensorflow')],
python_major_version=2, # tensorflow does not support Python 2
status=compatibility_store.Status.INSTALL_ERROR,
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
GOOGLE_API_CORE_TENSORFLOW_RECENT_SUCCESS_3 = compatibility_store.CompatibilityResult(
[package.Package('google-api-core'),
package.Package('tensorflow')],
python_major_version=3,
status=compatibility_store.Status.SUCCESS,
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
GOOGLE_API_CORE_TENSORFLOW_RECENT_PAIR_INCOMPATIBLE_3 = compatibility_store.CompatibilityResult(
[package.Package('google-api-core'),
package.Package('tensorflow')],
python_major_version=3,
status=compatibility_store.Status.CHECK_WARNING,
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
GOOGLE_API_CORE_GIT_TENSORFLOW_RECENT_INSTALL_ERROR_2 = compatibility_store.CompatibilityResult(
[
package.Package('git+git://github.com/google/api-core.git'),
package.Package('tensorflow')
],
python_major_version=2, # tensorflow does not support Python 2
status=compatibility_store.Status.INSTALL_ERROR,
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
GOOGLE_API_CORE_GIT_TENSORFLOW_RECENT_SUCCESS_3 = compatibility_store.CompatibilityResult(
[
package.Package('git+git://github.com/google/api-core.git'),
package.Package('tensorflow')
],
python_major_version=3,
status=compatibility_store.Status.SUCCESS,
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
GOOGLE_API_CORE_GIT_TENSORFLOW_RECENT_PAIR_INCOMPATIBLE_3 = compatibility_store.CompatibilityResult(
[
package.Package('git+git://github.com/google/api-core.git'),
package.Package('tensorflow')
],
python_major_version=3,
status=compatibility_store.Status.CHECK_WARNING,
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
GOOGLE_API_PYTHON_CLIENT_TENSORFLOW_RECENT_SUCCESS_3 = compatibility_store.CompatibilityResult(
[
package.Package('google-api-python-client'),
package.Package('tensorflow')
],
python_major_version=3,
status=compatibility_store.Status.SUCCESS,
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0))
RECENT_SUCCESS_DATA = [
APACHE_BEAM_RECENT_SUCCESS_2,
APACHE_BEAM_GIT_RECENT_SUCCESS_2,
GOOGLE_API_CORE_RECENT_SUCCESS_2,
GOOGLE_API_CORE_RECENT_SUCCESS_3,
GOOGLE_API_CORE_GIT_RECENT_SUCCESS_2,
GOOGLE_API_CORE_GIT_RECENT_SUCCESS_3,
TENSORFLOW_RECENT_SUCCESS_3,
TENSORFLOW_GIT_RECENT_SUCCESS_3,
APACHE_BEAM_GOOGLE_API_CORE_RECENT_SUCCESS_2,
APACHE_BEAM_GIT_GOOGLE_API_CORE_RECENT_SUCCESS_2,
APACHE_BEAM_GOOGLE_API_CORE_GIT_RECENT_SUCCESS_2,
APACHE_BEAM_GOOGLE_API_PYTHON_CLIENT_GIT_RECENT_SUCCESS_2,
APACHE_BEAM_GOOGLE_API_PYTHON_CLIENT_RECENT_SUCCESS_2,
APACHE_BEAM_GIT_GOOGLE_API_PYTHON_CLIENT_RECENT_SUCCESS_2,
GOOGLE_API_CORE_GOOGLE_API_PYTHON_CLIENT_RECENT_SUCCESS_2,
GOOGLE_API_CORE_GOOGLE_API_PYTHON_CLIENT_RECENT_SUCCESS_3,
GOOGLE_API_CORE_GIT_GOOGLE_API_PYTHON_CLIENT_RECENT_SUCCESS_2,
GOOGLE_API_CORE_GIT_GOOGLE_API_PYTHON_CLIENT_RECENT_SUCCESS_3,
GOOGLE_API_CORE_TENSORFLOW_RECENT_SUCCESS_3,
GOOGLE_API_CORE_GIT_TENSORFLOW_RECENT_SUCCESS_3,
GOOGLE_API_PYTHON_CLIENT_TENSORFLOW_RECENT_SUCCESS_3,
]
GOOGLE_API_CORE_SELF_INCOMPATIBLE_DATA = [
APACHE_BEAM_RECENT_SUCCESS_2,
APACHE_BEAM_GIT_RECENT_SUCCESS_2,
GOOGLE_API_CORE_RECENT_SELF_INCOMPATIBLE_2,
GOOGLE_API_CORE_RECENT_SELF_INCOMPATIBLE_3,
GOOGLE_API_CORE_GIT_RECENT_SELF_INCOMPATIBLE_2,
GOOGLE_API_CORE_GIT_RECENT_SELF_INCOMPATIBLE_3,
TENSORFLOW_RECENT_SUCCESS_3,
TENSORFLOW_GIT_RECENT_SUCCESS_3,
APACHE_BEAM_GOOGLE_API_CORE_RECENT_PAIR_INCOMPATIBLE_2,
APACHE_BEAM_GIT_GOOGLE_API_CORE_RECENT_PAIR_INCOMPATIBLE_2,
APACHE_BEAM_GOOGLE_API_CORE_GIT_RECENT_PAIR_INCOMPATIBLE_2,
APACHE_BEAM_GOOGLE_API_PYTHON_CLIENT_GIT_RECENT_SUCCESS_2,
APACHE_BEAM_GOOGLE_API_PYTHON_CLIENT_RECENT_SUCCESS_2,
APACHE_BEAM_GIT_GOOGLE_API_PYTHON_CLIENT_RECENT_SUCCESS_2,
GOOGLE_API_CORE_GOOGLE_API_PYTHON_CLIENT_RECENT_PAIR_INCOMPATIBLE_2,
GOOGLE_API_CORE_GOOGLE_API_PYTHON_CLIENT_RECENT_PAIR_INCOMPATIBLE_3,
GOOGLE_API_CORE_GIT_GOOGLE_API_PYTHON_CLIENT_RECENT_PAIR_INCOMPATIBLE_2,
GOOGLE_API_CORE_GIT_GOOGLE_API_PYTHON_CLIENT_RECENT_PAIR_INCOMPATIBLE_3,
GOOGLE_API_CORE_TENSORFLOW_RECENT_PAIR_INCOMPATIBLE_3,
GOOGLE_API_CORE_GIT_TENSORFLOW_RECENT_PAIR_INCOMPATIBLE_3,
GOOGLE_API_PYTHON_CLIENT_TENSORFLOW_RECENT_SUCCESS_3,
]
UP_TO_DATE_DEPS = {
'google-auth': {
'current_time': datetime.datetime(2019, 5, 7, 0, 0, 0),
'installed_version': '1.6.3',
'installed_version_time': datetime.datetime(
2019, 2, 19, 21, 15, 56),
'is_latest': True,
'latest_version': '1.6.3',
'latest_version_time': datetime.datetime(
2019, 2, 19, 21, 15, 56)
},
'grpcio': {
'current_time': datetime.datetime(2019, 5, 7, 0, 0, 0),
'installed_version': '1.19.0',
'installed_version_time': datetime.datetime(
2019, 2, 27, 0, 0, 53),
'is_latest': True,
'latest_version': '1.19.0',
'latest_version_time': datetime.datetime(
2019, 2, 27, 0, 0, 53)
},
'requests': {
'current_time': datetime.datetime(2019, 5, 7, 0, 0, 0),
'installed_version': '1.9.0',
'installed_version_time': datetime.datetime(
2019, 4, 5, 18, 1, 48),
'is_latest': True,
'latest_version': '1.9.0',
'latest_version_time': datetime.datetime(
2019, 4, 5, 18, 1, 48)
},
}
class BadgeTestCase(unittest.TestCase):
"""Base class for tests of badge images."""
def setUp(self):
self.fake_store = fake_compatibility_store.CompatibilityStore()
self.dependency_highlighter_stub = dependency_highlighter.DependencyHighlighter(
store=self.fake_store)
self.deprecated_dep_finder_stub = deprecated_dep_finder_stub.DeprecatedDepFinderStub(
)
main.app.config['TESTING'] = True
self.client = main.app.test_client()
self._store_patch = unittest.mock.patch('utils.store', self.fake_store)
self._highlighter_patch = unittest.mock.patch(
'utils.highlighter', self.dependency_highlighter_stub)
self._finder_patch = unittest.mock.patch(
'utils.finder', self.deprecated_dep_finder_stub)
self._pkg_list_patch = unittest.mock.patch(
'compatibility_lib.configs.PKG_LIST', [
'apache-beam[gcp]',
'google-api-core',
'google-api-python-client',
'tensorflow',
])
self._whitelist_urls_patch = unittest.mock.patch(
'compatibility_lib.configs.WHITELIST_URLS', {
'git+git://github.com/google/apache-beam.git':
'apache-beam[gcp]',
'git+git://github.com/google/api-core.git': 'google-api-core',
'git+git://github.com/google/api-python-client.git':
'google-api-python-client',
'git+git://github.com/google/tensorflow.git': 'tensorflow',
})
self._store_patch.start()
self.addCleanup(self._store_patch.stop)
self._highlighter_patch.start()
self.addCleanup(self._highlighter_patch.stop)
self._finder_patch.start()
self.addCleanup(self._finder_patch.stop)
self._pkg_list_patch.start()
self.addCleanup(self._pkg_list_patch.stop)
self._whitelist_urls_patch.start()
self.addCleanup(self._whitelist_urls_patch.stop)
def get_image_json(self, package):
"""Return the calculated badge data for a package as a dict."""
return self.client.get(
'/one_badge_image', query_string={
'package': package
}).get_json()
def get_target_json(self, package):
"""Return the calculated details page data for a package as a dict."""
return self.client.get(
'/one_badge_target', query_string={
'package': package
}).get_json()
def assertLinkUrl(self, package, actual_url):
"""Assert that the link for the badge image is correct for a package."""
parsed_url = urllib.parse.urlparse(actual_url)
params = urllib.parse.parse_qs(parsed_url.query)
self.assertEqual([package], params['package'])
def _assertImageResponse(
self, package_name, expected_status, expected_left_text):
"""Assert that the badge image response is correct for a package."""
json_response = self.get_image_json(package_name)
self.assertEqual(json_response['left_text'], expected_left_text)
self.assertEqual(json_response['right_text'], expected_status.value)
self.assertEqual(json_response['right_color'],
main.BADGE_STATUS_TO_COLOR.get(expected_status))
self.assertLinkUrl(package_name, json_response['whole_link'])
def _assertImageResponsePyPI(self, package_name, expected_status):
"""Assert that the badge image response is correct for a PyPI package."""
self._assertImageResponse(
package_name, expected_status, 'compatibility check (PyPI)')
def _assertImageResponseGithub(self, package_name, expected_status):
"""Assert that the badge image response is correct for a github package."""
self._assertImageResponse(
package_name, expected_status, 'compatibility check (master)')
def assertBadgeStatusToColor(self, badge_status_to_color):
"""Assert that the given badge status to color mapping is correct."""
for status, color in badge_status_to_color.items():
badge_status = main.BadgeStatus(status)
self.assertEqual(main.BADGE_STATUS_TO_COLOR[badge_status], color)
class TestSuccess(BadgeTestCase):
"""Tests for the cases where the badge image displays 'success.'"""
def setUp(self):
BadgeTestCase.setUp(self)
self.success_data = RECENT_SUCCESS_DATA
# All of the CompatibilityResults in pairs_without_common_versions and
# github_pairs have erroneous statuses but should still yield a
# 'success' status as they should be skipped.
self.pairs_without_common_versions = [
APACHE_BEAM_GOOGLE_API_CORE_RECENT_INSTALL_ERROR_3,
APACHE_BEAM_GOOGLE_API_CORE_GIT_RECENT_INSTALL_ERROR_3,
GOOGLE_API_CORE_TENSORFLOW_RECENT_INSTALL_ERROR_2,
GOOGLE_API_CORE_GIT_TENSORFLOW_RECENT_INSTALL_ERROR_2,
]
self.github_pairs = [
compatibility_store.CompatibilityResult(
[
package.Package('git+git://github.com/google/apache-beam.git'),
package.Package('google-api-core')
],
python_major_version=2,
status=compatibility_store.Status.INSTALL_ERROR,
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0)),
compatibility_store.CompatibilityResult(
[
package.Package('git+git://github.com/google/tensorflow.git'),
package.Package('google-api-core')
],
python_major_version=3,
status=compatibility_store.Status.INSTALL_ERROR,
timestamp=datetime.datetime(2019, 5, 7, 0, 0, 0)),
]
def assertImageResponsePyPI(self, package_name):
"""Assert that the badge image response is correct for a PyPI package."""
BadgeTestCase._assertImageResponsePyPI(
self, package_name, main.BadgeStatus.SUCCESS)
def assertImageResponseGithub(self, package_name):
"""Assert that the badge image response is correct for a github package."""
BadgeTestCase._assertImageResponseGithub(
self, package_name, main.BadgeStatus.SUCCESS)
def assertTargetResponse(self, package_name, *supported_pyversions):
expected_status = main.BadgeStatus.SUCCESS
json_response = self.get_target_json(package_name)
self.assertEqual(json_response['package_name'], package_name)
self.assertBadgeStatusToColor(json_response['badge_status_to_color'])
# self compatibility result check
for pyversion in ['py2', 'py3']:
expected_details = utils.EMPTY_DETAILS
if pyversion not in supported_pyversions:
expected_details = ('The package does not support this '
'version of python.')
self.assertEqual(
json_response['self_compat_res'][pyversion],
{'details': expected_details, 'status': expected_status})
# pair compatibility result check
expected_result = {
'py2': {'status': expected_status, 'details': {}},
'py3': {'status': expected_status, 'details': {}}
}
self.assertEqual(json_response['google_compat_res'], expected_result)
# dependency result check
self.assertEqual(
json_response['dependency_res'],
{'deprecated_deps': '', 'details': {}, 'status': expected_status})
def test_pypi_py2py3_fresh_nodeps(self):
self.fake_store.save_compatibility_statuses(self.success_data)
package_name = 'google-api-core'
self.assertImageResponsePyPI(package_name)
self.assertTargetResponse(package_name, 'py2', 'py3')
def test_git_py2py3_fresh_nodeps(self):
self.fake_store.save_compatibility_statuses(self.success_data)
package_name = 'git+git://github.com/google/api-core.git'
self.assertImageResponseGithub(package_name)
self.assertTargetResponse(package_name, 'py2', 'py3')
def test_pypi_py2_fresh_nodeps(self):
self.fake_store.save_compatibility_statuses(self.success_data)
package_name = 'apache-beam[gcp]'
self.assertImageResponsePyPI(package_name)
self.assertTargetResponse(package_name, 'py2')
def test_git_py2_fresh_nodeps(self):
self.fake_store.save_compatibility_statuses(self.success_data)
package_name = 'git+git://github.com/google/apache-beam.git'
self.assertImageResponseGithub(package_name)
self.assertTargetResponse(package_name, 'py2')
def test_pypi_py3_fresh_nodeps(self):
self.fake_store.save_compatibility_statuses(self.success_data)
package_name = 'tensorflow'
self.assertImageResponsePyPI(package_name)
self.assertTargetResponse(package_name, 'py3')
def test_git_py3_fresh_nodeps(self):
self.fake_store.save_compatibility_statuses(self.success_data)
package_name = 'git+git://github.com/google/tensorflow.git'
self.assertImageResponseGithub(package_name)
self.assertTargetResponse(package_name, 'py3')
def test_pypi_py2py3_fresh_nodeps_ignore_pairs_without_common_versions(
self):
"""Tests that pairs not sharing a common version are ignored."""
fake_results = self.success_data + self.pairs_without_common_versions
self.fake_store.save_compatibility_statuses(fake_results)
package_name = 'google-api-core'
self.assertImageResponsePyPI(package_name)
self.assertTargetResponse(package_name, 'py2', 'py3')
def test_git_py2py3_fresh_nodeps_ignore_pairs_without_common_versions(
self):
"""Tests that pairs not sharing a common version are ignored."""
fake_results = self.success_data + self.pairs_without_common_versions
self.fake_store.save_compatibility_statuses(fake_results)
package_name = 'git+git://github.com/google/api-core.git'
self.assertImageResponseGithub(package_name)
self.assertTargetResponse(package_name, 'py2', 'py3')
def test_pypi_py2py3_fresh_nodeps_ignore_git(self):
"""Tests that pair results containing git packages are ignored."""
fake_results = self.success_data + self.github_pairs
self.fake_store.save_compatibility_statuses(fake_results)
package_name = 'google-api-core'
self.assertImageResponsePyPI(package_name)
self.assertTargetResponse(package_name, 'py2', 'py3')
def test_git_py2py3_fresh_nodeps_ignore_git(self):
"""Tests that pair results containing git packages are ignored."""
fake_results = self.success_data + self.github_pairs
self.fake_store.save_compatibility_statuses(fake_results)
package_name = 'git+git://github.com/google/api-core.git'
self.assertImageResponseGithub(package_name)
self.assertTargetResponse(package_name, 'py2', 'py3')
class TestUnknownPackage(BadgeTestCase):
"""Tests for the cases where the badge image displays 'unknown package.'"""
def assertImageResponsePyPI(self, package_name):
"""Assert that the badge image response is correct for a PyPI package."""
BadgeTestCase._assertImageResponsePyPI(
self, package_name, main.BadgeStatus.UNKNOWN_PACKAGE)
def assertImageResponseGithub(self, package_name):
"""Assert that the badge image response is correct for a github package."""
BadgeTestCase._assertImageResponseGithub(
self, package_name, main.BadgeStatus.UNKNOWN_PACKAGE)
def assertTargetResponse(self, package_name):
json_response = self.get_target_json(package_name)
self.assertEqual(json_response['package_name'], package_name)
self.assertBadgeStatusToColor(json_response['badge_status_to_color'])
# self compatibility result check
expected_status = main.BadgeStatus.UNKNOWN_PACKAGE
expected_details = ('This package is not a whitelisted google '
'python package; to whitelist a package, '
'contact the python team.')
expected_result = {
'py2': {'status': expected_status, 'details': expected_details},
'py3': {'status': expected_status, 'details': expected_details}
}
self.assertEqual(json_response['self_compat_res'], expected_result)
# pair compatibility result check
expected_result = {
'py2': {'status': expected_status, 'details': {}},
'py3': {'status': expected_status, 'details': {}}
}
self.assertEqual(json_response['google_compat_res'], expected_result)
# dependency result check
expected_result = {'status': expected_status, 'details': {}}
self.assertEqual(json_response['dependency_res'], expected_result)
def test_pypi_unknown_package(self):
self.fake_store.save_compatibility_statuses(RECENT_SUCCESS_DATA)
package_name = 'xxx'
self.assertImageResponsePyPI(package_name)
self.assertTargetResponse(package_name)
def test_github_unknown_package(self):
self.fake_store.save_compatibility_statuses(RECENT_SUCCESS_DATA)
package_name = 'https://github.com/brianquinlan/notebooks'
self.assertImageResponseGithub(package_name)
self.assertTargetResponse(package_name)
class TestMissingData(BadgeTestCase):
"""Tests for the cases where the badge image displays 'missing data.'"""
def assertImageResponsePyPI(self, package_name):
"""Assert that the badge image response is correct for a PyPI package."""
BadgeTestCase._assertImageResponsePyPI(
self, package_name, main.BadgeStatus.MISSING_DATA)
def test_missing_self_compatibility_data(self):
package_name = 'google-api-core'
missing_self_data = list(RECENT_SUCCESS_DATA)
missing_self_data.remove(GOOGLE_API_CORE_RECENT_SUCCESS_2)
self.fake_store.save_compatibility_statuses(missing_self_data)
# Test badge image
self.assertImageResponsePyPI(package_name)
# Test badge details page
json_response = self.get_target_json(package_name)
self.assertEqual(json_response['package_name'], package_name)
self.assertBadgeStatusToColor(json_response['badge_status_to_color'])
# self compatibility result check
expected_status = main.BadgeStatus.MISSING_DATA
expected_details = ("Missing data for packages=['google-api-core'], "
"versions=[2]")
expected_result = {
'py2': {'status': expected_status, 'details': expected_details},
'py3': {'status': expected_status, 'details': expected_details}
}
self.assertEqual(json_response['self_compat_res'], expected_result)
# pair compatibility result check
expected_status = main.BadgeStatus.SUCCESS
expected_result = {
'py2': {'status': expected_status, 'details': {}},
'py3': {'status': expected_status, 'details': {}}
}
self.assertEqual(json_response['google_compat_res'], expected_result)
# dependency result check
self.assertEqual(
json_response['dependency_res'],
{'deprecated_deps': '', 'details': {}, 'status': expected_status})
def test_missing_pair_compatibility_data(self):
package_name = 'google-api-core'
missing_self_data = list(RECENT_SUCCESS_DATA)
missing_self_data.remove(
GOOGLE_API_CORE_GOOGLE_API_PYTHON_CLIENT_RECENT_SUCCESS_2)
self.fake_store.save_compatibility_statuses(missing_self_data)
# Test badge image
self.assertImageResponsePyPI(package_name)
# Test badge details page
json_response = self.get_target_json(package_name)
self.assertEqual(json_response['package_name'], package_name)
self.assertBadgeStatusToColor(json_response['badge_status_to_color'])
expected_status = main.BadgeStatus.MISSING_DATA
expected_details = {
'google-api-python-client': (
"Missing data for packages=['google-api-core', "
"'google-api-python-client'], versions=[2]")
}
expected_result = {
'py2': {'status': expected_status, 'details': expected_details},
'py3': {'status': expected_status, 'details': expected_details}
}
self.assertEqual(json_response['google_compat_res'], expected_result)
expected_status = main.BadgeStatus.SUCCESS
expected_result = {
'py2': {'status': expected_status, 'details': utils.EMPTY_DETAILS},
'py3': {'status': expected_status, 'details': utils.EMPTY_DETAILS}
}
self.assertEqual(json_response['self_compat_res'], expected_result)
self.assertEqual(
json_response['dependency_res'],
{'deprecated_deps': '', 'details': {}, 'status': expected_status})
class TestSelfIncompatible(BadgeTestCase):
"""Tests for the cases where the badge image displays 'self incompatible.'"""
def assertImageResponsePyPI(self, package_name):
"""Assert that the badge image response is correct for a PyPI package."""
BadgeTestCase._assertImageResponsePyPI(
self, package_name, main.BadgeStatus.SELF_INCOMPATIBLE)
def assertImageResponseGithub(self, package_name):
"""Assert that the badge image response is correct for a github package."""
BadgeTestCase._assertImageResponseGithub(
self, package_name, main.BadgeStatus.SELF_INCOMPATIBLE)
def assertTargetResponse(self, package_name, expected_pair_result):
json_response = self.get_target_json(package_name)
self.assertEqual(json_response['package_name'], package_name)
self.assertBadgeStatusToColor(json_response['badge_status_to_color'])
# self compatibility result check
expected_status = main.BadgeStatus.SELF_INCOMPATIBLE
expected_result = {
'py2': {'status': expected_status, 'details': utils.EMPTY_DETAILS},
'py3': {'status': expected_status, 'details': utils.EMPTY_DETAILS}
}
self.assertEqual(json_response['self_compat_res'], expected_result)
# pair compatibility result check
expected_status = main.BadgeStatus.SUCCESS
self.assertEqual(
json_response['google_compat_res'],
expected_pair_result)
# dependency result check
self.assertEqual(
json_response['dependency_res'],
{'deprecated_deps': '', 'details': {}, 'status': expected_status})
def test_pypi_py2py3_incompatible_fresh_nodeps(self):
package_name = 'google-api-core'
self.fake_store.save_compatibility_statuses(
GOOGLE_API_CORE_SELF_INCOMPATIBLE_DATA)
# Test badge image
self.assertImageResponsePyPI(package_name)
# Test badge details page
expected_pair_result = {
'py2': {'details': {'apache-beam[gcp]': utils.EMPTY_DETAILS},
'status': main.BadgeStatus.PAIR_INCOMPATIBLE},
'py3': {'details': {'tensorflow': utils.EMPTY_DETAILS},
'status': main.BadgeStatus.PAIR_INCOMPATIBLE}
}
self.assertTargetResponse(package_name, expected_pair_result)
def test_pypi_py2py3_py2_installation_failure_fresh_nodeps(self):
package_name = 'google-api-core'
self_incompatible_data = list(GOOGLE_API_CORE_SELF_INCOMPATIBLE_DATA)
self_incompatible_data.remove(GOOGLE_API_CORE_RECENT_SELF_INCOMPATIBLE_2)
self_incompatible_data.append(GOOGLE_API_CORE_RECENT_INSTALL_FAILURE_2)
self.fake_store.save_compatibility_statuses(self_incompatible_data)
# Test badge image
self.assertImageResponsePyPI(package_name)
# Test badge details page
expected_pair_result = {
'py2': {'details': {'apache-beam[gcp]': utils.EMPTY_DETAILS},
'status': main.BadgeStatus.PAIR_INCOMPATIBLE},
'py3': {'details': {'tensorflow': utils.EMPTY_DETAILS},
'status': main.BadgeStatus.PAIR_INCOMPATIBLE}
}
self.assertTargetResponse(package_name, expected_pair_result)
def test_github_py2py3_incompatible_fresh_nodeps(self):
package_name = 'git+git://github.com/google/api-core.git'
self.fake_store.save_compatibility_statuses(
GOOGLE_API_CORE_SELF_INCOMPATIBLE_DATA)
# Test badge image
self.assertImageResponseGithub(package_name)
# Test badge details page
expected_pair_result = {
'py2': {'details': {'apache-beam[gcp]': utils.EMPTY_DETAILS},
'status': main.BadgeStatus.PAIR_INCOMPATIBLE},
'py3': {'details': {'tensorflow': utils.EMPTY_DETAILS},
'status': main.BadgeStatus.PAIR_INCOMPATIBLE}
}
self.assertTargetResponse(package_name, expected_pair_result)
def test_github_py2py3_py2_installation_failure_fresh_nodeps(self):
package_name = 'git+git://github.com/google/api-core.git'
self_incompatible_data = list(GOOGLE_API_CORE_SELF_INCOMPATIBLE_DATA)
self_incompatible_data.remove(
GOOGLE_API_CORE_GIT_RECENT_SELF_INCOMPATIBLE_2)
self_incompatible_data.append(
GOOGLE_API_CORE_GIT_RECENT_INSTALL_FAILURE_2)
self.fake_store.save_compatibility_statuses(self_incompatible_data)
# Test badge image
self.assertImageResponseGithub(package_name)
# Test badge details page
expected_pair_result = {
'py2': {'details': {'apache-beam[gcp]': utils.EMPTY_DETAILS},
'status': main.BadgeStatus.PAIR_INCOMPATIBLE},
'py3': {'details': {'tensorflow': utils.EMPTY_DETAILS},
'status': main.BadgeStatus.PAIR_INCOMPATIBLE}
}
self.assertTargetResponse(package_name, expected_pair_result)
class TestPairIncompatibility(BadgeTestCase):
"""Test for cases where the badge image displays 'pair incompatible.'"""
def assertImageResponsePyPI(self, package_name):
"""Assert that the badge image response is correct for a PyPI package."""
BadgeTestCase._assertImageResponsePyPI(
self, package_name, main.BadgeStatus.PAIR_INCOMPATIBLE)
def assertImageResponseGithub(self, package_name):
"""Assert that the badge image response is correct for a github package."""
BadgeTestCase._assertImageResponseGithub(
self, package_name, main.BadgeStatus.PAIR_INCOMPATIBLE)
def assertTargetResponse(self, package_name, expected_pair_result):
json_response = self.get_target_json(package_name)
self.assertEqual(json_response['package_name'], package_name)
self.assertBadgeStatusToColor(json_response['badge_status_to_color'])
# self compatibility result check
expected_status = main.BadgeStatus.SUCCESS
expected_result = {
'py2': {'status': expected_status, 'details': utils.EMPTY_DETAILS},
'py3': {'status': expected_status, 'details': utils.EMPTY_DETAILS}
}
self.assertEqual(json_response['self_compat_res'], expected_result)
# pair compatibility result check
self.assertEqual(
json_response['google_compat_res'],
expected_pair_result)
# dependency result check
self.assertEqual(
json_response['dependency_res'],
{'deprecated_deps': '', 'details': {}, '
| 0 |
12845f9dceb836a8c5f395a89d266b458a782958
|
Python
|
status': expected_status})
def test_pypi_py2py3_py2_incompatible_fresh_nodeps(self):
package_name = 'google-api-core'
pair_incompatible_data = list(RECENT_SUCCESS_DATA)
pair_incompatible_data.remove(
APACHE_BEAM_GOOGLE_API_CORE_RECENT_SUCCESS_2)
pair_incompatible_data.append(
APACHE_BEAM_GOOGLE_API_CORE_RECENT_PAIR_INCOMPATIBLE_2)
self.fake_store.save_compatibility_statuses(pair_incompatible_data)
# Test badge image
self.assertImageResponsePyPI(package_name)
# Test badge details page
expected_pair_result = {
'py2': {'details': {'apache-beam[gcp]': 'NO DETAILS'},
'status': main.BadgeStatus.PAIR_INCOMPATIBLE},
'py3': {'details': {}, 'status': main.BadgeStatus.SUCCESS}
}
self.assertTargetResponse(package_name, expected_pair_result)
def test_github_py2py3_py2_incompatible_fresh_nodeps(self):
package_name = 'git+git://github.com/google/api-core.git'
pair_incompatible_data = list(RECENT_SUCCESS_DATA)
pair_incompatible_data.remove(
APACHE_BEAM_GOOGLE_API_CORE_GIT_RECENT_SUCCESS_2)
pair_incompatible_data.append(
APACHE_BEAM_GOOGLE_API_CORE_GIT_RECENT_PAIR_INCOMPATIBLE_2)
self.fake_store.save_compatibility_statuses(pair_incompatible_data)
# Test badge image
self.assertImageResponseGithub(package_name)
# Test badge details page
expected_pair_result = {
'py2': {'details': {'apache-beam[gcp]': 'NO DETAILS'},
'status': main.BadgeStatus.PAIR_INCOMPATIBLE},
'py3': {'details': {}, 'status': main.BadgeStatus.SUCCESS}
}
self.assertTargetResponse(package_name, expected_pair_result)
def test_pypi_py2py3_py3_incompatible_fresh_nodeps(self):
package_name = 'google-api-core'
pair_incompatible_data = list(RECENT_SUCCESS_DATA)
pair_incompatible_data.remove(
GOOGLE_API_CORE_TENSORFLOW_RECENT_SUCCESS_3)
pair_incompatible_data.append(
GOOGLE_API_CORE_TENSORFLOW_RECENT_PAIR_INCOMPATIBLE_3)
self.fake_store.save_compatibility_statuses(pair_incompatible_data)
# Test badge image
self.assertImageResponsePyPI(package_name)
# Test badge details page
expected_pair_result = {
'py2': {'details': {}, 'status': main.BadgeStatus.SUCCESS},
'py3': {'details': {'tensorflow': 'NO DETAILS'},
'status': main.BadgeStatus.PAIR_INCOMPATIBLE}
}
self.assertTargetResponse(package_name, expected_pair_result)
def test_github_py2py3_py3_incompatible_fresh_nodeps(self):
package_name = 'git+git://github.com/google/api-core.git'
pair_incompatible_data = list(RECENT_SUCCESS_DATA)
pair_incompatible_data.remove(
GOOGLE_API_CORE_GIT_TENSORFLOW_RECENT_SUCCESS_3)
pair_incompatible_data.append(
GOOGLE_API_CORE_GIT_TENSORFLOW_RECENT_PAIR_INCOMPATIBLE_3)
self.fake_store.save_compatibility_statuses(pair_incompatible_data)
# Test badge image
self.assertImageResponseGithub(package_name)
# Test badge details page
expected_pair_result = {
'py2': {'details': {}, 'status': main.BadgeStatus.SUCCESS},
'py3': {'details': {'tensorflow': 'NO DETAILS'},
'status': main.BadgeStatus.PAIR_INCOMPATIBLE}
}
self.assertTargetResponse(package_name, expected_pair_result)
class TestBadgeImageDependency(TestSuccess):
"""Tests for cases with multiple dependencies displaying 'success'."""
def setUp(self):
TestSuccess.setUp(self)
# Dependency Info
dep_info = dict(UP_TO_DATE_DEPS)
# Success Data: add up-to-date dependency information for all
# CompatibilityResults containing a single package.
self.success_data = []
for compat_result in RECENT_SUCCESS_DATA:
if len(compat_result.packages) == 1:
compat_result = compat_result.with_updated_dependency_info(
dep_info)
self.success_data.append(compat_result)
class TestOutdatedDependency(BadgeTestCase):
"""Tests for cases where the badge image displays 'old dependency.'"""
def setUp(self):
BadgeTestCase.setUp(self)
self.off_by_minor_expected_details = {
'google-auth': {
'detail': 'google-auth is not up to date with the latest version',
'installed_version': '1.4.0',
'latest_version': '1.6.3',
'priority': 'LOW_PRIORITY'
}
}
self.off_by_patch_expected_details = {
'google-auth': {
'detail': 'google-auth is not up to date with the latest version',
'installed_version': '1.6.0',
'latest_version': '1.6.3',
'priority': 'LOW_PRIORITY'
}
}
def assertImageResponsePyPI(self, package_name):
"""Assert that the badge image response is correct for a PyPI package."""
BadgeTestCase._assertImageResponsePyPI(
self, package_name, main.BadgeStatus.OUTDATED_DEPENDENCY)
def assertImageResponseGithub(self, package_name):
"""Assert that the badge image response is correct for a github package."""
BadgeTestCase._assertImageResponseGithub(
self, package_name, main.BadgeStatus.OUTDATED_DEPENDENCY)
def assertTargetResponse(self, package_name, expected_details):
json_response = self.get_target_json(package_name)
self.assertEqual(json_response['package_name'], package_name)
self.assertBadgeStatusToColor(json_response['badge_status_to_color'])
# self compatibility result check
expected_status = main.BadgeStatus.SUCCESS
expected_result = {
'py2': {'status': expected_status, 'details': utils.EMPTY_DETAILS},
'py3': {'status': expected_status, 'details': utils.EMPTY_DETAILS}
}
self.assertEqual(json_response['self_compat_res'], expected_result)
# pair compatibility result check
expected_result = {
'py2': {'status': expected_status, 'details': {}},
'py3': {'status': expected_status, 'details': {}}
}
self.assertEqual(json_response['google_compat_res'], expected_result)
# dependency result check
expected_status = main.BadgeStatus.OUTDATED_DEPENDENCY
self.assertEqual(
json_response['dependency_res'],
{'deprecated_deps': '', 'details': expected_details, 'status': expected_status})
def test_pypi_py2py3_off_by_minor(self):
old_dep_info = dict(UP_TO_DATE_DEPS)
old_dep_info['google-auth'] = {
'current_time': datetime.datetime(2019, 5, 7, 0, 0, 0),
'installed_version': '1.4.0',
'installed_version_time': datetime.datetime(
2019, 2, 19, 21, 15, 56),
'is_latest': False,
'latest_version': '1.6.3',
'latest_version_time': datetime.datetime(
2019, 2, 19, 21, 15, 56)
}
old_dep_compat_results = list(RECENT_SUCCESS_DATA)
old_dep_compat_results.remove(GOOGLE_API_CORE_RECENT_SUCCESS_2)
old_dep_compat_results.remove(GOOGLE_API_CORE_RECENT_SUCCESS_3)
old_dep_compat_results.append(
GOOGLE_API_CORE_RECENT_SUCCESS_2.with_updated_dependency_info(
old_dep_info))
old_dep_compat_results.append(
GOOGLE_API_CORE_RECENT_SUCCESS_3.with_updated_dependency_info(
old_dep_info))
self.fake_store.save_compatibility_statuses(old_dep_compat_results)
package_name = 'google-api-core'
self.assertImageResponsePyPI(package_name)
self.assertTargetResponse(
package_name, self.off_by_minor_expected_details)
def test_git_py2py3_off_by_minor(self):
old_dep_info = dict(UP_TO_DATE_DEPS)
old_dep_info['google-auth'] = {
'current_time': datetime.datetime(2019, 5, 7, 0, 0, 0),
'installed_version': '1.4.0',
'installed_version_time': datetime.datetime(
2019, 2, 19, 21, 15, 56),
'is_latest': False,
'latest_version': '1.6.3',
'latest_version_time': datetime.datetime(
2019, 2, 19, 21, 15, 56)
}
old_dep_compat_results = list(RECENT_SUCCESS_DATA)
old_dep_compat_results.remove(GOOGLE_API_CORE_GIT_RECENT_SUCCESS_2)
old_dep_compat_results.remove(GOOGLE_API_CORE_GIT_RECENT_SUCCESS_3)
old_dep_compat_results.append(
GOOGLE_API_CORE_GIT_RECENT_SUCCESS_2.with_updated_dependency_info(
old_dep_info))
old_dep_compat_results.append(
GOOGLE_API_CORE_GIT_RECENT_SUCCESS_3.with_updated_dependency_info(
old_dep_info))
self.fake_store.save_compatibility_statuses(old_dep_compat_results)
package_name = 'git+git://github.com/google/api-core.git'
self.assertImageResponseGithub(package_name)
self.assertTargetResponse(
package_name, self.off_by_minor_expected_details)
def test_pypi_py2py3_off_by_patch(self):
old_dep_info = dict(UP_TO_DATE_DEPS)
old_dep_info['google-auth'] = {
'current_time': datetime.datetime(2019, 5, 7, 0, 0, 0),
'installed_version': '1.6.0',
'installed_version_time': datetime.datetime(
2019, 2, 19, 21, 15, 56),
'is_latest': False,
'latest_version': '1.6.3',
'latest_version_time': datetime.datetime(
2019, 2, 19, 21, 15, 56)
}
old_dep_compat_results = list(RECENT_SUCCESS_DATA)
old_dep_compat_results.remove(GOOGLE_API_CORE_RECENT_SUCCESS_2)
old_dep_compat_results.remove(GOOGLE_API_CORE_RECENT_SUCCESS_3)
old_dep_compat_results.append(
GOOGLE_API_CORE_RECENT_SUCCESS_2.with_updated_dependency_info(
old_dep_info))
old_dep_compat_results.append(
GOOGLE_API_CORE_RECENT_SUCCESS_3.with_updated_dependency_info(
old_dep_info))
self.fake_store.save_compatibility_statuses(old_dep_compat_results)
package_name = 'google-api-core'
self.assertImageResponsePyPI(package_name)
self.assertTargetResponse(
package_name, self.off_by_patch_expected_details)
def test_git_py2py3_off_by_patch(self):
old_dep_info = dict(UP_TO_DATE_DEPS)
old_dep_info['google-auth'] = {
'current_time': datetime.datetime(2019, 5, 7, 0, 0, 0),
'installed_version': '1.6.0',
'installed_version_time': datetime.datetime(
2019, 2, 19, 21, 15, 56),
'is_latest': False,
'latest_version': '1.6.3',
'latest_version_time': datetime.datetime(
2019, 2, 19, 21, 15, 56)
}
old_dep_compat_results = list(RECENT_SUCCESS_DATA)
old_dep_compat_results.remove(GOOGLE_API_CORE_GIT_RECENT_SUCCESS_2)
old_dep_compat_results.remove(GOOGLE_API_CORE_GIT_RECENT_SUCCESS_3)
old_dep_compat_results.append(
GOOGLE_API_CORE_GIT_RECENT_SUCCESS_2.with_updated_dependency_info(
old_dep_info))
old_dep_compat_results.append(
GOOGLE_API_CORE_GIT_RECENT_SUCCESS_3.with_updated_dependency_info(
old_dep_info))
self.fake_store.save_compatibility_statuses(old_dep_compat_results)
package_name = 'git+git://github.com/google/api-core.git'
self.assertImageResponseGithub(package_name)
self.assertTargetResponse(
package_name, self.off_by_patch_expected_details)
class TestObsoleteDependency(BadgeTestCase):
"""Tests for cases where the badge image displays 'obsolete dependency'."""
def setUp(self):
BadgeTestCase.setUp(self)
self.off_by_major_expected_details = {
'google-auth': {
'detail': ('google-auth is 1 or more major versions behind '
'the latest version'),
'installed_version': '0.9.9',
'latest_version': '1.6.3',
'priority': 'HIGH_PRIORITY'
}
}
self.off_by_minor_expected_details = {
'google-auth': {
'detail': ('google-auth is 3 or more minor versions behind '
'the latest version'),
'installed_version': '1.3.0',
'latest_version': '1.6.3',
'priority': 'HIGH_PRIORITY'
}
}
self.expired_major_grace_period_expected_details = {
'google-auth': {
'detail': ('it has been over 30 days since the major version '
'for google-auth was released'),
'installed_version': '0.9.9',
'latest_version': '1.0.0',
'priority': 'HIGH_PRIORITY'
}
}
self.expired_default_grace_period_expected_details = {
'google-auth': {
'detail': ('it has been over 6 months since the latest '
'version for google-auth was released'),
'installed_version': '1.3.0',
'latest_version': '1.0.0',
'priority': 'HIGH_PRIORITY'
}
}
def assertImageResponsePyPI(self, package_name):
"""Assert that the badge image response is correct for a PyPI package."""
BadgeTestCase._assertImageResponsePyPI(
self, package_name, main.BadgeStatus.OBSOLETE_DEPENDENCY)
def assertImageResponseGithub(self, package_name):
"""Assert that the badge image response is correct for a github package."""
BadgeTestCase._assertImageResponseGithub(
self, package_name, main.BadgeStatus.OBSOLETE_DEPENDENCY)
def assertTargetResponse(self, package_name, expected_details):
json_response = self.get_target_json(package_name)
self.assertEqual(json_response['package_name'], package_name)
self.assertBadgeStatusToColor(json_response['badge_status_to_color'])
# self compatibility result check
expected_status = main.BadgeStatus.SUCCESS
expected_result = {
'py2': {'status': expected_status, 'details': utils.EMPTY_DETAILS},
'py3': {'status': expected_status, 'details': utils.EMPTY_DETAILS}
}
self.assertEqual(json_response['self_compat_res'], expected_result)
# pair compatibility result check
expected_result = {
'py2': {'status': expected_status, 'details': {}},
'py3': {'status': expected_status, 'details': {}}
}
self.assertEqual(json_response['google_compat_res'], expected_result)
# dependency result check
expected_status = main.BadgeStatus.OBSOLETE_DEPENDENCY
self.assertEqual(
json_response['dependency_res'],
{'deprecated_deps': '', 'details': expected_details, 'status': expected_status})
def test_pypi_py2py3_off_by_major(self):
obsolete_dep_info = dict(UP_TO_DATE_DEPS)
obsolete_dep_info['google-auth'] = {
'current_time': datetime.datetime(2019, 5, 7, 0, 0, 0),
'installed_version': '0.9.9',
'installed_version_time': datetime.datetime(
2019, 2, 19, 21, 15, 56),
'is_latest': False,
'latest_version': '1.6.3',
'latest_version_time': datetime.datetime(
2019, 2, 19, 21, 15, 56)
}
obsolete_dep_compat_results = list(RECENT_SUCCESS_DATA)
obsolete_dep_compat_results.remove(GOOGLE_API_CORE_RECENT_SUCCESS_2)
obsolete_dep_compat_results.remove(GOOGLE_API_CORE_RECENT_SUCCESS_3)
obsolete_dep_compat_results.append(
GOOGLE_API_CORE_RECENT_SUCCESS_2.with_updated_dependency_info(
obsolete_dep_info))
obsolete_dep_compat_results.append(
GOOGLE_API_CORE_RECENT_SUCCESS_3.with_updated_dependency_info(
obsolete_dep_info))
self.fake_store.save_compatibility_statuses(
obsolete_dep_compat_results)
package_name = 'google-api-core'
self.assertImageResponsePyPI(package_name)
self.assertTargetResponse(
package_name, self.off_by_major_expected_details)
def test_git_py2py3_off_by_major(self):
obsolete_dep_info = dict(UP_TO_DATE_DEPS)
obsolete_dep_info['google-auth'] = {
'current_time': datetime.datetime(2019, 5, 7, 0, 0, 0),
'installed_version': '0.9.9',
'installed_version_time': datetime.datetime(
2019, 2, 19, 21, 15, 56),
'is_latest': False,
'latest_version': '1.6.3',
'latest_version_time': datetime.datetime(
2019, 2, 19, 21, 15, 56)
}
obsolete_dep_compat_results = list(RECENT_SUCCESS_DATA)
obsolete_dep_compat_results.remove(
GOOGLE_API_CORE_GIT_RECENT_SUCCESS_2)
obsolete_dep_compat_results.remove(
GOOGLE_API_CORE_GIT_RECENT_SUCCESS_3)
obsolete_dep_compat_results.append(
GOOGLE_API_CORE_GIT_RECENT_SUCCESS_2.with_updated_dependency_info(
obsolete_dep_info))
obsolete_dep_compat_results.append(
GOOGLE_API_CORE_GIT_RECENT_SUCCESS_3.with_updated_dependency_info(
obsolete_dep_info))
self.fake_store.save_compatibility_statuses(
obsolete_dep_compat_results)
package_name = 'git+git://github.com/google/api-core.git'
self.assertImageResponseGithub(package_name)
self.assertTargetResponse(
package_name, self.off_by_major_expected_details)
def test_pypi_py2py3_off_by_minor(self):
obsolete_dep_info = dict(UP_TO_DATE_DEPS)
obsolete_dep_info['google-auth'] = {
'current_time': datetime.datetime(2019, 5, 7, 0, 0, 0),
'installed_version': '1.3.0',
'installed_version_time': datetime.datetime(
2019, 2, 19, 21, 15, 56),
'is_latest': False,
'latest_version': '1.6.3',
'latest_version_time': datetime.datetime(
2019, 2, 19, 21, 15, 56)
}
obsolete_dep_compat_results = list(RECENT_SUCCESS_DATA)
obsolete_dep_compat_results.remove(GOOGLE_API_CORE_RECENT_SUCCESS_2)
obsolete_dep_compat_results.remove(GOOGLE_API_CORE_RECENT_SUCCESS_3)
obsolete_dep_compat_results.append(
GOOGLE_API_CORE_RECENT_SUCCESS_2.with_updated_dependency_info(
obsolete_dep_info))
obsolete_dep_compat_results.append(
GOOGLE_API_CORE_RECENT_SUCCESS_3.with_updated_dependency_info(
obsolete_dep_info))
self.fake_store.save_compatibility_statuses(
obsolete_dep_compat_results)
package_name = 'google-api-core'
self.assertImageResponsePyPI(package_name)
self.assertTargetResponse(
package_name, self.off_by_minor_expected_details)
def test_git_py2py3_off_by_minor(self):
obsolete_dep_info = dict(UP_TO_DATE_DEPS)
obsolete_dep_info['google-auth'] = {
'current_time': datetime.datetime(2019, 5, 7, 0, 0, 0),
'installed_version': '1.3.0',
'installed_version_time': datetime.datetime(
2019, 2, 19, 21, 15, 56),
'is_latest': False,
'latest_version': '1.6.3',
'latest_version_time': datetime.datetime(
2019, 2, 19, 21, 15, 56)
}
obsolete_dep_compat_results = list(RECENT_SUCCESS_DATA)
obsolete_dep_compat_results.remove(
GOOGLE_API_CORE_GIT_RECENT_SUCCESS_2)
obsolete_dep_compat_results.remove(
GOOGLE_API_CORE_GIT_RECENT_SUCCESS_3)
obsolete_dep_compat_results.append(
GOOGLE_API_CORE_GIT_RECENT_SUCCESS_2.with_updated_dependency_info(
obsolete_dep_info))
obsolete_dep_compat_results.append(
GOOGLE_API_CORE_GIT_RECENT_SUCCESS_3.with_updated_dependency_info(
obsolete_dep_info))
self.fake_store.save_compatibility_statuses(
obsolete_dep_compat_results)
package_name = 'git+git://github.com/google/api-core.git'
self.assertImageResponseGithub(package_name)
self.assertTargetResponse(
package_name, self.off_by_minor_expected_details)
def test_pypi_py2py3_expired_major_grace_period(self):
"""Tests that "old dependency" eventually changes to "obsolete ..."."""
obsolete_dep_info = dict(UP_TO_DATE_DEPS)
obsolete_dep_info['google-auth'] = {
'current_time': datetime.datetime(2019, 3, 23, 0, 0, 0),
'installed_version': '0.9.9',
'installed_version_time': datetime.datetime(
2019, 2, 19, 21, 15, 56),
'is_latest': False,
'latest_version': '1.0.0',
'latest_version_time': datetime.datetime(2019, 2, 19, 21, 15, 56)
}
obsolete_dep_compat_results = list(RECENT_SUCCESS_DATA)
obsolete_dep_compat_results.remove(GOOGLE_API_CORE_RECENT_SUCCESS_2)
obsolete_dep_compat_results.remove(GOOGLE_API_CORE_RECENT_SUCCESS_3)
obsolete_dep_compat_results.append(
GOOGLE_API_CORE_RECENT_SUCCESS_2.with_updated_dependency_info(
obsolete_dep_info))
obsolete_dep_compat_results.append(
GOOGLE_API_CORE_RECENT_SUCCESS_3.with_updated_dependency_info(
obsolete_dep_info))
self.fake_store.save_compatibility_statuses(
obsolete_dep_compat_results)
package_name = 'google-api-core'
self.assertImageResponsePyPI(package_name)
self.assertTargetResponse(
package_name, self.expired_major_grace_period_expected_details)
def test_git_py2py3_expired_major_grace_period(self):
"""Tests that "old dependency" eventually changes to "obsolete ..."."""
obsolete_dep_info = dict(UP_TO_DATE_DEPS)
obsolete_dep_info['google-auth'] = {
'current_time': datetime.datetime(2019, 3, 23, 0, 0, 0),
'installed_version': '0.9.9',
'installed_version_time': datetime.datetime(
2019, 2, 19, 21, 15, 56),
'is_latest': False,
'latest_version': '1.0.0',
'latest_version_time': datetime.datetime(2019, 2, 19, 21, 15, 56)
}
obsolete_dep_compat_results = list(RECENT_SUCCESS_DATA)
obsolete_dep_compat_results.remove(
GOOGLE_API_CORE_GIT_RECENT_SUCCESS_2)
obsolete_dep_compat_results.remove(
GOOGLE_API_CORE_GIT_RECENT_SUCCESS_3)
obsolete_dep_compat_results.append(
GOOGLE_API_CORE_GIT_RECENT_SUCCESS_2.with_updated_dependency_info(
obsolete_dep_info))
obsolete_dep_compat_results.append(
GOOGLE_API_CORE_GIT_RECENT_SUCCESS_3.with_updated_dependency_info(
obsolete_dep_info))
self.fake_store.save_compatibility_statuses(
obsolete_dep_compat_results)
package_name = 'git+git://github.com/google/api-core.git'
self.assertImageResponseGithub(package_name)
self.assertTargetResponse(
package_name, self.expired_major_grace_period_expected_details)
def test_pypi_py2py3_expired_default_grace_period(self):
"""Tests that "old dependency" eventually changes to "obsolete ..."."""
obsolete_dep_info = dict(UP_TO_DATE_DEPS)
obsolete_dep_info['google-auth'] = {
'current_time': datetime.datetime(2019, 8, 23, 0, 0, 0),
'installed_version': '1.3.0',
'installed_version_time': datetime.datetime(
2019, 2, 19, 21, 15, 56),
'is_latest': False,
'latest_version': '1.0.0',
'latest_version_time': datetime.datetime(2019, 2, 19, 21, 15, 56)
}
obsolete_dep_compat_results = list(RECENT_SUCCESS_DATA)
obsolete_dep_compat_results.remove(GOOGLE_API_CORE_RECENT_SUCCESS_2)
obsolete_dep_compat_results.remove(GOOGLE_API_CORE_RECENT_SUCCESS_3)
obsolete_dep_compat_results.append(
GOOGLE_API_CORE_RECENT_SUCCESS_2.with_updated_dependency_info(
obsolete_dep_info))
obsolete_dep_compat_results.append(
GOOGLE_API_CORE_RECENT_SUCCESS_3.with_updated_dependency_info(
obsolete_dep_info))
self.fake_store.save_compatibility_statuses(
obsolete_dep_compat_results)
package_name = 'google-api-core'
self.assertImageResponsePyPI(package_name)
self.assertTargetResponse(
package_name, self.expired_default_grace_period_expected_details)
def test_git_py2py3_expired_default_grace_period(self):
"""Tests that "old dependency" eventually changes to "obsolete ..."."""
obsolete_dep_info = dict(UP_TO_DATE_DEPS)
obsolete_dep_info['google-auth'] = {
'current_time': datetime.datetime(2019, 8, 23, 0, 0, 0),
'installed_version': '1.3.0',
'installed_version_time': datetime.datetime(
2019, 2, 19, 21, 15, 56),
'is_latest': False,
'latest_version': '1.0.0',
'latest_version_time': datetime.datetime(2019, 2, 19, 21, 15, 56)
}
obsolete_dep_compat_results = list(RECENT_SUCCESS_DATA)
obsolete_dep_compat_results.remove(
GOOGLE_API_CORE_GIT_RECENT_SUCCESS_2)
obsolete_dep_compat_results.remove(
GOOGLE_API_CORE_GIT_RECENT_SUCCESS_3)
obsolete_dep_compat_results.append(
GOOGLE_API_CORE_GIT_RECENT_SUCCESS_2.with_updated_dependency_info(
obsolete_dep_info))
obsolete_dep_compat_results.append(
GOOGLE_API_CORE_GIT_RECENT_SUCCESS_3.with_updated_dependency_info(
obsolete_dep_info))
self.fake_store.save_compatibility_statuses(
obsolete_dep_compat_results)
package_name = 'git+git://github.com/google/api-core.git'
self.assertImageResponseGithub(package_name)
self.assertTargetResponse(
package_name, self.expired_default_grace_period_expected_details)
| 1 |
b2ec682fba5d5eec999d160d44c829f93d8caa36
|
Python
|
# -*- coding: utf-8 -*-
# @Time : 2018/4/9 14:53:17
# @Author : SilverMaple
# @Site : https://github.com/SilverMaple
# @File : routes.py
import hashlib
import os
import logging
import sys
import shutil
import json
import subprocess
import time
from datetime import datetime
from app.decorators import async
from flask import render_template, flash, redirect, url_for, request, g, \
jsonify, current_app, session, make_response
from flask_login import current_user, login_required
from flask_babel import _, get_locale
from flask_uploads import UploadSet
from guess_language import guess_language
from app import db
from app.main.forms import EditProfileForm, PostForm, SearchForm, AddAppForm, AddAppExtensionForm, EditAppExtensionForm, \
AddAppAdminForm, AddTenantForm, AddTenantDatabaseForm, EditTenantDatabaseForm, AddAppCodeForm, AddRoleForm, AddUserForm
from app.models import User, Post, App, AppAdmin, AppExpand, AdminToApp, Tenant, TenantDb, AppCode, SaasRole, SaasUser
from app.translate import translate
from app.main import bp
from app.email import follower_notification
from app.auth import LoginType, current_login_type
from app import auth
from pip._internal import commands
from requests import Response
from werkzeug.datastructures import FileStorage
from werkzeug.test import EnvironBuilder
from werkzeug.utils import secure_filename
from werkzeug.security import generate_password_hash
logger = logging.getLogger("MirrorConstruct")
# logger = logging.getLogger("MirrorConstruct")
formatter = logging.Formatter('[%(asctime)s] %(message)s')
blank_formatter = logging.Formatter('')
# formatter = logging.Formatter('[%(asctime)s][%(levelname)s] ## %(message)s')
file_handler = logging.FileHandler("logs/mirror_construct.log")
file_handler.setFormatter(formatter) # 可以通过setFormatter指定输出格式
# 为logger添加的日志处理器
logger.addHandler(file_handler)
logger.setLevel(logging.DEBUG)
@bp.before_app_request
def before_request():
if current_user.is_authenticated:
current_user.last_seen = datetime.utcnow()
db.session.commit()
g.search_form = SearchForm()
g.locale = str(get_locale())
@bp.route('/', methods=['GET', 'POST'])
@bp.route('/index', methods=['GET', 'POST'])
@login_required
def index():
form = PostForm()
if form.validate_on_submit():
language = guess_language(form.post.data)
if language == 'UNKNOWN' or len(language) > 5:
language = ''
post = Post(body=form.post.data, author=current_user,
language=language)
db.session.add(post)
db.session.commit()
flash(_('Your post is now live!'))
return redirect(url_for('main.index'))
page = request.args.get('page', 1, type=int)
posts = current_user.followed_posts().paginate(
page, current_app.config['POSTS_PER_PAGE'], False)
next_url = url_for('main.explore', page=posts.next_num) \
if posts.has_next else None
prev_url = url_for('main.explore', page=posts.prev_num) \
if posts.has_prev else None
return render_template('index.html', title=_('Home'), form=form,
posts=posts.items, next_url=next_url,
prev_url=prev_url)
@bp.route('/index_registe')
def index_registe():
if current_user.is_authenticated and auth.current_login_type == LoginType.REGISTE_MANAGE:
return render_template('index_registe_manage.html', title=_('Registe Manage'))
else:
auth.current_login_type = LoginType.REGISTE_MANAGE
return redirect(url_for('auth.login'))
@bp.route('/index_app')
def index_app():
if current_user.is_authenticated and auth.current_login_type == LoginType.WEB_APP_MANAGE:
app_list = [a.app_id for a in AdminToApp.query.filter(
AdminToApp.app_admin_id == session['current_app_manager_id']).all()]
data = [App.query.filter(App.id == a).order_by(db.asc(App.name)).first() for a in app_list]
data.sort(key=lambda a: a.name)
app_name_list = [a.name for a in data]
current_selected_app_name = None
if session.get('current_selected_app_name'):
current_selected_app_name = session['current_selected_app_name']
return render_template('index_app_manage.html', title=_('Web App Manage'), app_name_list=app_name_list,
current_selected_app_name=current_selected_app_name)
else:
auth.current_login_type = LoginType.WEB_APP_MANAGE
return redirect(url_for('auth.login'))
@bp.route('/index_tenant')
def index_tenant():
if current_user.is_authenticated and auth.current_login_type == LoginType.TENANT_SERVICE:
return render_template('index_tenant_service.html', title=_('Tenant Service'))
else:
auth.current_login_type = LoginType.TENANT_SERVICE
return redirect(url_for('auth.login'))
@bp.route('/explore')
@login_required
def explore():
page = request.args.get('page', 1, type=int)
posts = Post.query.order_by(Post.timestamp.desc()).paginate(
page, current_app.config['POSTS_PER_PAGE'], False)
next_url = url_for('main.explore', page=posts.next_num) \
if posts.has_next else None
prev_url = url_for('main.explore', page=posts.prev_num) \
if posts.has_prev else None
return render_template('index.html', title=_('Explore'),
posts=posts.items, next_url=next_url,
prev_url=prev_url)
@bp.route('/user/<username>')
@login_required
def user(username):
user = User.query.filter_by(username=username).first_or_404()
page = request.args.get('page', 1, type=int)
posts = user.posts.order_by(Post.timestamp.desc()).paginate(
page, current_app.config['POSTS_PER_PAGE'], False)
next_url = url_for('main.user', username=user.username,
page=posts.next_num) if posts.has_next else None
prev_url = url_for('main.user', username=user.username,
page=posts.prev_num) if posts.has_prev else None
return render_template('user.html', user=user, posts=posts.items,
next_url=next_url, prev_url=prev_url)
@bp.route('/edit_profile', methods=['GET', 'POST'])
@login_required
def edit_profile():
form = EditProfileForm(current_user.username)
if form.validate_on_submit():
current_user.username = form.username.data
current_user.about_me = form.about_me.data
db.session.commit()
flash(_('Your changes have been saved.'))
return redirect(url_for('main.edit_profile'))
elif request.method == 'GET':
form.username.data = current_user.username
form.about_me.data = current_user.about_me
return render_template('edit_profile.html', title=_('Edit Profile'),
form=form)
@bp.route('/follow/<username>')
@login_required
def follow(username):
user = User.query.filter_by(username=username).first()
if user is None:
flash(_('User %(username)s not found.', username=username))
return redirect(url_for('main.index'))
if user == current_user:
flash(_('You cannot follow yourself!'))
return redirect(url_for('main.user', username=username))
current_user.follow(user)
db.session.commit()
flash(_('You are following %(username)s!', username=username))
follower_notification(user, current_user)
return redirect(url_for('main.user', username=username))
@bp.route('/unfollow/<username>')
@login_required
def unfollow(username):
user = User.query.filter_by(username=username).first()
if user is None:
flash(_('User %(username)s not found.', username=username))
return redirect(url_for('main.index'))
if user == current_user:
flash(_('You cannot unfollow yourself!'))
return redirect(url_for('main.user', username=username))
current_user.unfollow(user)
db.session.commit()
flash(_('You are not following %(username)s.', username=username))
return redirect(url_for('main.user', username=username))
@bp.route('/translate', methods=['POST'])
@login_required
def translate_text():
return jsonify({'text': translate(request.form['text'],
request.form['source_language'],
request.form['dest_language'])})
@bp.route('/search')
@login_required
def search():
if not g.search_form.validate():
return redirect(url_for('main.explore'))
page = request.args.get('page', 1, type=int)
posts, total = Post.search(g.search_form.q.data, page,
current_app.config['POSTS_PER_PAGE'])
next_url = url_for('main.search', q=g.search_form.q.data, page=page + 1) \
if total > page * current_app.config['POSTS_PER_PAGE'] else None
prev_url = url_for('main.search', q=g.search_form.q.data, page=page - 1) \
if page > 1 else None
return render_template('search.html', title=_('Search'), posts=posts,
next_url=next_url, prev_url=prev_url)
# ---------------------------------------------------------------------------------------
# registe manage app setting
# ---------------------------------------------------------------------------------------
@bp.route('/registe_manage_app_setting')
@login_required
def registe_manage_app_setting():
isCheck = True
isEdit = True
isDelete = True
session['is_delete'] = 'false'
tHead = [_('App Name'), _('App ID'), _('Creator')]
data = App.query.order_by(db.asc(App.name)).all()
return render_template('registe_manage_app_setting.html', title=_('App Setting'),
tableName=_('App List'), AppAdmin=AppAdmin,
isCheck=isCheck, isEdit=isEdit,
isDelete=isDelete, tHead=tHead, data=data)
@bp.route('/registe_manage_app_setting_add', methods=['GET', 'POST'])
@login_required
def registe_manage_app_setting_add():
form = AddAppForm(None)
if form.validate_on_submit():
app_id = hashlib.md5(form.app_name.data.encode(encoding='UTF-8')).hexdigest()
db.session.add(App(id=None, name=form.app_name.data, appid=app_id))
db.session.commit()
flash(_('New app have been added.'))
return redirect(url_for('main.registe_manage_app_setting'))
elif request.method == 'GET':
pass
return render_template('registe_manage_app_setting.html', title=_('App Setting'),
tableName=_('Add New App'), AppAdmin=AppAdmin, form=form,
addTitle=('Add New App'))
@bp.route('/registe_manage_app_setting_delete/<id>', methods=['GET', 'POST'])
@login_required
def registe_manage_app_setting_delete(id):
if request.method == 'GET':
session['current_delete_id'] = id
else:
data = request.get_json()
name = data.get('name')
if name == 'execute':
current_data = App.query.filter(App.id == session['current_delete_id']).first()
db.session.delete(current_data)
db.session.commit()
flash(_('Record have been deleted.'))
return jsonify({'result': 'success'})
isCheck = True
isEdit = True
isDelete = True
tHead = [_('App Name'), _('App ID'), _('Creator')]
data = App.query.all()
confirmTitle = 'Confirm your choice:'
confirmMessage = 'Do you want to delete this record?'
return render_template('registe_manage_app_setting.html', title=_('App Setting'),
tableName=_('App List'), AppAdmin=AppAdmin,
isCheck=isCheck, isEdit=isEdit, session=session,
isDelete=isDelete, tHead=tHead, data=data,
confirmTitle=confirmTitle, confirmMessage=confirmMessage)
@bp.route('/registe_manage_app_setting_delete_select', methods=['GET', 'POST'])
@login_required
def registe_manage_app_setting_delete_select():
flash(_('Batch delete operation are not allowed now.'))
return redirect(url_for('main.registe_manage_app_setting'))
@bp.route('/registe_manage_app_setting_edit/<id>', methods=['GET', 'POST'])
@login_required
def registe_manage_app_setting_edit(id):
if session.get('validate_app_name'):
form = AddAppForm(session['validate_app_name'])
else:
form = AddAppForm(None)
if form.validate_on_submit():
current_data = App.query.filter(App.id == id).first()
current_data.name = form.app_name.data
db.session.commit()
flash(_('App have been edited.'))
return redirect(url_for('main.registe_manage_app_setting'))
elif request.method == 'GET':
current_data = App.query.filter(App.id == id).first()
form.app_name.data = current_data.name
form.app_ID.data = current_data.appid
if AppAdmin.query.filter(AppAdmin.id == current_data.creater_id).first():
form.creator_name.data = AppAdmin.query.filter(AppAdmin.id == current_data.creater_id).first().name
session['validate_app_name'] = form.app_name.data
return render_template('registe_manage_app_setting.html', title=_('App Setting'),
tableName=_('Edit App'), AppAdmin=AppAdmin, form=form,
editTitle=('Edit App'))
# ---------------------------------------------------------------------------------------
# registe manage app extension
# ---------------------------------------------------------------------------------------
@bp.route('/registe_manage_app_extension')
@login_required
def registe_manage_app_extension():
isCheck = True
isEdit = True
isDelete = True
tHead = [_('App Type'), _('Tag Template/Begin'), _('Tag Template/End'), _('Library File'), _('DB Initial Path')]
data = AppExpand.query.order_by(db.asc(AppExpand.type)).all()
return render_template('registe_manage_app_extension.html', title=_('App Extension'),
tableName=_('App Extension List'),
isCheck=isCheck, isEdit=isEdit,
isDelete=isDelete, tHead=tHead, data=data)
@bp.route('/registe_manage_app_extension_add', methods=['GET', 'POST'])
@login_required
def registe_manage_app_extension_add():
form = AddAppExtensionForm(None)
if form.validate_on_submit():
upload = UploadSet()
if hasattr(form.library_file.data, 'filename'):
filename1 = secure_filename(form.library_file.data.filename)
filePath1 = os.path.join(os.path.join(current_app.config['UPLOAD_FOLDERS']['registe_manage_app_extension'],
'library'), filename1).replace('\\', '/')
form.library_file.data.save(filePath1)
else:
filePath1=''
if hasattr(form.library_file_depend.data, 'filename'):
filename2 = secure_filename(form.library_file_depend.data.filename)
filePath2 = os.path.join(os.path.join(current_app.config['UPLOAD_FOLDERS']['registe_manage_app_extension'],
'library_depend'), filename2).replace('\\', '/')
form.library_file_depend.data.save(filePath2)
else:
filePath2 = ''
db.session.add(AppExpand(id=None, type=form.app_type.data, pattern_begin=form.tag_begin.data,
pattern_end=form.tag_end.data, library_path=filePath1,
library_depend_path=filePath2,
library_desc=form.library_file_description.data,
db_init_path=form.db_info_file_path.data))
db.session.commit()
flash(_('New app extension have been added.'))
return redirect(url_for('main.registe_manage_app_extension'))
elif request.method == 'GET':
pass
return render_template('registe_manage_app_extension.html', title=_('App Extension'),
tableName=_('Add New App Extension'), AppAdmin=AppAdmin, form=form,
addTitle=('Add New App Extension'))
@bp.route('/registe_manage_app_extension_delete/<id>', methods=['GET', 'POST'])
@login_required
def registe_manage_app_extension_delete(id):
if request.method == 'GET':
session['current_delete_id'] = id
else:
data = request.get_json()
name = data.get('name')
if name == 'execute':
current_data = AppExpand.query.filter(AppExpand.id == session['current_delete_id']).first()
db.session.delete(current_data)
db.session.commit()
flash(_('Record have been deleted.'))
return jsonify({'result': 'success'})
isCheck = True
isEdit = True
isDelete = True
tHead = [_('App Type'), _('Tag Template/Begin'), _('Tag Template/End'), _('Library File'), _('DB Initial Path')]
data = AppExpand.query.all()
confirmTitle = 'Confirm your choice:'
confirmMessage = 'Do you want to delete this record?'
return render_template('registe_manage_app_extension.html', title=_('App Extension'),
tableName=_('App Extension List'),
isCheck=isCheck, isEdit=isEdit,
isDelete=isDelete, tHead=tHead, data=data,
confirmTitle=confirmTitle, confirmMessage=confirmMessage)
@bp.route('/registe_manage_app_extension_delete_select', methods=['GET', 'POST'])
@login_required
def registe_manage_app_extension_delete_select():
flash(_('Batch delete operation are not allowed now.'))
return redirect(url_for('main.registe_manage_app_extension'))
@bp.route('/registe_manage_app_extension_edit/<id>', methods=['GET', 'POST'])
@login_required
def registe_manage_app_extension_edit(id):
if session.get('validate_app_type'):
form = EditAppExtensionForm(session['validate_app_type'])
else:
form = EditAppExtensionForm(None)
if form.validate_on_submit():
current_data = AppExpand.query.filter(AppExpand.id == id).first()
current_data.type = form.app_type.data
current_data.pattern_begin = form.tag_begin.data
current_data.pattern_end = form.tag_end.data
current_data.library_desc = form.library_file_description.data
current_data.db_init_path = form.db_info_file_path.data
# print(form.library_file.data == '')
# print(form.library_file.data)
form.library_file.description = _('Selected File: ') + os.path.basename(current_data.library_path)
form.library_file_depend.description = _('Selected File: ') + os.path.basename(current_data.library_depend_path)
if hasattr(form.library_file.data, 'filename'):
filename1 = secure_filename(form.library_file.data.filename)
filePath1 = os.path.join(os.path.join(current_app.config['UPLOAD_FOLDERS']['registe_manage_app_extension'],
'library'), filename1).replace('\\', '/')
form.library_file.data.save(filePath1)
current_data.library_path = filePath1
if hasattr(form.library_file_depend.data, 'filename'):
filename2 = secure_filename(form.library_file_depend.data.filename)
filePath2 = os.path.join(os.path.join(current_app.config['UPLOAD_FOLDERS']['registe_manage_app_extension'],
'library_depend'), filename2).replace('\\', '/')
form.library_file_depend.data.save(filePath2)
current_data.library_depend_path = filePath2
db.session.commit()
flash(_('App have been edited.'))
return redirect(url_for('main.registe_manage_app_extension'))
elif request.method == 'GET':
current_data = AppExpand.query.filter(AppExpand.id == id).first()
form.app_type.data =current_data.type
form.tag_begin.data = current_data.pattern_begin
form.tag_end.data = current_data.pattern_end
form.library_file.description = _('Selected File: ') + os.path.basename(current_data.library_path)
form.library_file_depend.description = _('Selected File: ') + os.path.basename(current_data.library_depend_path)
form.library_file_description.data = current_data.library_desc
form.db_info_file_path.data = current_data.db_init_path
session['validate_app_type'] = form.app_type.data
return render_template('registe_manage_app_extension.html', title=_('App Extension'),
tableName=_('Edit App Extension'), AppAdmin=AppAdmin, form=form,
editTitle=('Edit App Extension'))
# ---------------------------------------------------------------------------------------
# registe manage app manager setting
# ---------------------------------------------------------------------------------------
@bp.route('/registe_manage_app_manager_setting')
@login_required
def registe_manage_app_manager_setting():
isCheck = True
isEdit = True
isDelete = True
tHead = [_('App Manager Name'), _('App Name')]
data = {}
preData = AppAdmin.query.all()
for p in preData:
managerName = p.name
for temp in AdminToApp.query.filter(AdminToApp.app_admin_id == p.id):
appName = App.query.filter(App.id == temp.app_id).first().name
if data.get(managerName):
data[managerName]['name'].append(appName)
else:
data[managerName] = {}
data[managerName]['id'] = p.id
data[managerName]['name'] = []
data[managerName]['name'].append(appName)
if not data.get(managerName):
data[managerName] = {}
data[managerName]['id'] = p.id
data[managerName]['name'] = ''
else:
data[managerName]['name'].sort()
data[managerName]['name'] = '; '.join(data[managerName]['name'])
data['sort'] = list(data.keys())
data['sort'].sort()
return render_template('registe_manage_app_manager_setting.html', title=_('App Manager Setting'),
tableName=_('App Manager List'),
isCheck=isCheck, isEdit=isEdit,
isDelete=isDelete, tHead=tHead, data=data)
@bp.route('/registe_manage_app_manager_setting_add', methods=['GET', 'POST'])
@login_required
def registe_manage_app_manager_setting_add():
form = AddAppAdminForm(None)
if form.validate_on_submit():
db.session.add(AppAdmin(id=None, name=form.app_admin_name.data,
password=generate_password_hash(form.app_admin_password.data)))
db.session.commit()
app_admin_id = AppAdmin.query.filter(AppAdmin.name == form.app_admin_name.data).first().id
for app_name in form.app_list.data:
app_id = App.query.filter(App.name == app_name).first().id
db.session.add(AdminToApp(id=None, app_admin_id=app_admin_id, app_id=app_id))
db.session.commit()
flash(_('New app manager have been added.'))
return redirect(url_for('main.registe_manage_app_manager_setting'))
elif request.method == 'GET':
pass
return render_template('registe_manage_app_manager_setting.html', title=_('App Manager Setting'),
tableName=_('Add New App Manager'), AppAdmin=AppAdmin, form=form,
addTitle=('Add New App Manager'))
@bp.route('/registe_manage_app_manager_setting_delete/<id>', methods=['GET', 'POST'])
@login_required
def registe_manage_app_manager_setting_delete(id):
if request.method == 'GET':
session['current_delete_id'] = id
else:
data = request.get_json()
name = data.get('name')
if name == 'execute':
current_data = AppAdmin.query.filter(AppAdmin.id == session['current_delete_id']).first()
for removeAdminToApp in AdminToApp.query.filter(AdminToApp.app_admin_id==current_data.id).all():
db.session.delete(removeAdminToApp)
db.session.delete(current_data)
db.session.commit()
flash(_('Record have been deleted.'))
return jsonify({'result': 'success'})
isCheck = True
isEdit = True
isDelete = True
tHead = [_('App Manager Name'), _('App Name')]
data = {}
preData = AppAdmin.query.all()
for p in preData:
managerName = p.name
for temp in AdminToApp.query.filter(AdminToApp.app_admin_id == p.id):
appName = App.query.filter(App.id == temp.app_id).first().name
if data.get(managerName):
data[managerName]['name'].append(appName)
else:
data[managerName] = {}
data[managerName]['id'] = p.id
data[managerName]['name'] = []
data[managerName]['name'].append(appName)
if not data.get(managerName):
data[managerName] = {}
data[managerName]['id'] = p.id
data[managerName]['name'] = ''
else:
data[managerName]['name'].sort()
data[managerName]['name'] = '; '.join(data[managerName]['name'])
data['sort'] = list(data.keys())
data['sort'].sort()
confirmTitle = 'Confirm your choice:'
confirmMessage = 'Do you want to delete this record?'
return render_template('registe_manage_app_manager_setting.html', title=_('App Manager Setting'),
tableName=_('App Manager List'),
isCheck=isCheck, isEdit=isEdit,
isDelete=isDelete, tHead=tHead, data=data,
confirmTitle=confirmTitle, confirmMessage=confirmMessage)
@bp.route('/registe_manage_app_manager_setting_delete_select', methods=['GET', 'POST'])
@login_required
def registe_manage_app_manager_setting_delete_select():
flash(_('Batch delete operation are not allowed now.'))
return redirect(url_for('main.registe_manage_app_manager_setting'))
@bp.route('/registe_manage_app_manager_setting_edit/<id>', methods=['GET', 'POST'])
@login_required
def registe_manage_app_manager_setting_edit(id):
if session.get('validate_app_admin_name'):
form = AddAppAdminForm(session['validate_app_admin_name'])
else:
form = AddAppAdminForm(None)
if form.validate_on_submit():
old_app_list = session['old_app_list'] if session.get('old_app_list') else []
new_app_list = form.app_list.data
add_app_list = [a for a in new_app_list if a not in old_app_list]
remove_app_list = [a for a in old_app_list if a not in new_app_list]
current_data = AppAdmin.query.filter(AppAdmin.id == id).first()
current_data.name = form.app_admin_name.data
if not form.app_admin_password.data.strip() == '':
current_data.password = generate_password_hash(form.app_admin_password.data)
for a in add_app_list:
add_app_id = App.query.filter(App.name == a).first().id
db.session.add(AdminToApp(id=None, app_admin_id=id, app_id=add_app_id))
for a in remove_app_list:
remove_app_id = App.query.filter(App.name == a).first().id
removeAdminToApp = AdminToApp.query.filter(AdminToApp.app_admin_id==id, AdminToApp.app_id==remove_app_id).first()
db.session.delete(removeAdminToApp)
db.session.commit()
flash(_('App Admin have been edited.'))
return redirect(url_for('main.registe_manage_app_manager_setting'))
elif request.method == 'GET':
current_data = AppAdmin.query.filter(AppAdmin.id == id).first()
app_list = [a.app_id for a in AdminToApp.query.filter(AdminToApp.app_admin_id == id)]
app_name_list = [App.query.filter(App.id == a).first().name for a in app_list]
form.app_admin_name.data = current_data.name
form.app_list.data = app_name_list
session['validate_app_admin_name'] = form.app_admin_name.data
session['old_app_list'] = app_name_list
return render_template('registe_manage_app_manager_setting.html', title=_('App Manager Setting'),
tableName=_('Edit App Manager'), AppAdmin=AppAdmin, form=form,
editTitle=('Edit App Manager'))
# ---------------------------------------------------------------------------------------
# registe manage app tenant setting
# ---------------------------------------------------------------------------------------
@bp.route('/registe_manage_app_tenant_setting')
@login_required
def registe_manage_app_tenant_setting():
isCheck = True
isEdit = True
isDelete = True
tHead = [_('App Tenant Name'), _('App Tenant ID'), _('App Name')]
data = Tenant.query.order_by(db.asc(Tenant.name)).all()
return render_template('registe_manage_app_tenant_setting.html', title=_('App Tenant Setting'),
tableName=_('App Tenant List'), App=App,
isCheck=isCheck, isEdit=isEdit,
isDelete=isDelete, tHead=tHead, data=data)
@bp.route('/registe_manage_app_tenant_setting_add', methods=['GET', 'POST'])
@login_required
def registe_manage_app_tenant_setting_add():
form = AddTenantForm(None)
if form.validate_on_submit():
app_id = App.query.filter(App.name == form.app_list.data).first().id
db.session.add(Tenant(id=None, name=form.tenant_name.data,
password=generate_password_hash(form.tenant_password.data),
tenantid=hashlib.md5(form.tenant_name.data.encode(encoding='UTF-8')).hexdigest(),
app_id=app_id))
db.session.commit()
flash(_('New Tenant have been added.'))
return redirect(url_for('main.registe_manage_app_tenant_setting'))
elif request.method == 'GET':
pass
return render_template('registe_manage_app_tenant_setting.html', title=_('App Tenant Setting'),
tableName=_('Add New App Tenant'), form=form,
addTitle=('Add New App Tenant'))
@bp.route('/registe_manage_app_tenant_setting_delete/<id>', methods=['GET', 'POST'])
@login_required
def registe_manage_app_tenant_setting_delete(id):
if request.method == 'GET':
session['current_delete_id'] = id
else:
data = request.get_json()
name = data.get('name')
if name == 'execute':
current_data = Tenant.query.filter(Tenant.id == session['current_delete_id']).first()
db.session.delete(current_data)
db.session.commit()
flash(_('Record have been deleted.'))
return jsonify({'result': 'success'})
isCheck = True
isEdit = True
isDelete = True
tHead = [_('App Tenant Name'), _('App Tenant ID'), _('App Name')]
data = Tenant.query.order_by(db.asc(Tenant.name)).all()
confirmTitle = 'Confirm your choice:'
confirmMessage = 'Do you want to delete this record?'
return render_template('registe_manage_app_tenant_setting.html', title=_('App Tenant Setting'),
tableName=_('App Tenant List'), App=App,
isCheck=isCheck, isEdit=isEdit,
isDelete=isDelete, tHead=tHead, data=data,
confirmTitle=confirmTitle, confirmMessage=confirmMessage)
@bp.route('/registe_manage_app_tenant_setting_delete_select', methods=['GET', 'POST'])
@login_required
def registe_manage_app_tenant_setting_delete_select():
flash(_('Batch delete operation are not allowed now.'))
return redirect(url_for('main.registe_manage_app_manager_setting'))
@bp.route('/registe_manage_app_tenant_setting_edit/<id>', methods=['GET', 'POST'])
@login_required
def registe_manage_app_tenant_setting_edit(id):
if session.get('validate_app_tenant_name'):
form = AddTenantForm(session['validate_app_tenant_name'])
else:
form = AddTenantForm(None)
if form.validate_on_submit():
current_data = Tenant.query.filter(Tenant.id == id).first()
current_data.name = form.tenant_name.data
if not form.tenant_password.data.strip() == '':
current_data.password = generate_password_hash(form.tenant_password.data)
app_id = App.query.filter(App.name == form.app_list.data).first().id
current_data.app_id = app_id
db.session.commit()
flash(_('App Tenant have been edited.'))
return redirect(url_for('main.registe_manage_app_tenant_setting'))
elif request.method == 'GET':
current_data = Tenant.query.filter(Tenant.id == id).first()
app_name = App.query.filter(App.id == current_data.app_id).first().name
form.tenant_name.data = current_data.name
form.app_list.data = app_name
form.tenant_id.data = current_data.tenantid
session['validate_app_tenant_name'] = form.tenant_name.data
return render_template('registe_manage_app_tenant_setting.html', title=_('App Tenant Setting'),
tableName=_('Edit App Tenant'), form=form,
editTitle=('Edit App Tenant'))
# ---------------------------------------------------------------------------------------
# app manage change current app
# ---------------------------------------------------------------------------------------
@bp.route('/app_manage_set_current_app', methods=['GET', 'POST'])
@login_required
def app_manage_set_current_app():
if request.method == 'POST':
data = request.get_json()
name = data.get('name')
current_data = App.query.filter(App.name == name).first()
if current_data:
session['current_selected_app_id'] = current_data.id
session['current_selected_app_name'] = current_data.name
flash(_('Switch current app success!'))
return jsonify({'result': 'success'})
def get_app_name_list():
app_list = [a.app_id for a in AdminToApp.query.filter(
AdminToApp.app_admin_id == session['current_app_manager_id']).all()]
data = [App.query.filter(App.id == a).order_by(db.asc(App.name)).first() for a in app_list]
data.sort(key=lambda a: a.name)
app_name_list = [a.name for a in data]
return app_name_list
def get_current_selected_app_name():
current_selected_app_name = None
if session.get('current_selected_app_name'):
current_selected_app_name = session['current_selected_app_name']
return current_selected_app_name
# ---------------------------------------------------------------------------------------
# app manage app list
# ---------------------------------------------------------------------------------------
@bp.route('/app_manage_app_list')
@login_required
def app_manage_app_list():
isCheck = True
tHead = [_('App Name'), _('App ID'), _('Creator')]
app_list = [ a.app_id for a in AdminToApp.query.filter(
AdminToApp.app_admin_id == session['current_app_manager_id']).all()]
data = [ App.query.filter(App.id == a).order_by(db.asc(App.name)).first() for a in app_list]
data.sort(key=lambda a: a.name)
return render_template('app_manage_app_list.html', title=_('App List'),
tableName=_('App List'), AppAdmin=AppAdmin, app_name_list=get_app_name_list(),
current_selected_app_name=get_current_selected_app_name(),
isCheck=isCheck, tHead=tHead, data=data)
# ---------------------------------------------------------------------------------------
# app manage code configure
# ---------------------------------------------------------------------------------------
@bp.route('/app_manage_function_configure')
@login_required
def app_manage_function_configure():
isCheck = True
isEdit = True
isDelete = True
session['is_delete'] = 'false'
tHead = [_('App Name'), _('App ID'), _('Creator')]
data = App.query.order_by(db.asc(App.name)).all()
return render_template('app_manage_function_configure.html', title=_('Online Function'),
tableName=_('Function Configure'), app_name_list=get_app_name_list(),
current_selected_app_name=get_current_selected_app_name(),
isCheck=isCheck, isEdit=isEdit,
isDelete=isDelete, tHead=tHead, data=data)
@bp.route('/app_manage_function_configure_test')
@login_required
def app_manage_function_configure_test():
testFunc()
isCheck = True
isEdit = True
isDelete = True
session['is_delete'] = 'false'
tHead = [_('App Name'), _('App ID'), _('Creator')]
data = App.query.order_by(db.asc(App.name)).all()
return render_template('app_manage_function_configure.html', title=_('Online Function'),
tableName=_('Function Configure'), app_name_list=get_app_name_list(),
current_selected_app_name=get_current_selected_app_name(),
isCheck=isCheck, isEdit=isEdit,
isDelete=isDelete, tHead=tHead, data=data)
def testFunc():
filePath = 'F:/test/main.html'
pattern = 'x;/<dd>.*API监控.*<\/dd>/{p;q};/<dd>.*<\/dd>/{x;h;d;ta};/<dd>.*/{x;H;ta};{x;h;d};:a'
tag_begin = '{if .role_APIguanli}'
tag_end = '{end}'
args = 'cat -n %s | sed -n "%s" | { eval $(awk \'NR==1{print "a="$1} END {print "b="$1}\'); ' \
'sed -e "$a i %s" -e "$b a %s" %s;} > F:/test/test.txt' % (filePath, pattern, tag_begin, tag_end, filePath)
shell_file = open('F:/test/temp.sh', 'w', encoding='utf-8')
shell_file.write(args)
shell_file.flush()
shell_file.close()
exec_path = "D:\Program Files\Git\git-bash.exe"
print(args)
(status, output) = subprocess.getstatusoutput([exec_path, 'F:/test/temp.sh'])
print(status, output)
@bp.route('/get_file_path/<tag>', methods=['GET', 'POST'])
@login_required
def get_file_path(tag):
app_id = App.query.filter(App.id == session['current_selected_app_id']).first().appid
if tag == 'version2package.json' or tag == 'package2function.json':
filePath = os.path.join(os.path.join(
current_app.config['UPLOAD_FOLDERS']['app_manage_function_configure'], app_id), tag)
if os.path.isfile(filePath):
filePath = os.path.join(os.path.join(
current_app.config['UPLOAD_FOLDERS']['app_manage_function_configure_html'], app_id), tag)
return jsonify({'result': 'success', 'filePath': filePath})
return jsonify({'result': 'fail', 'filePath': False})
@bp.route('/app_manage_init_file/<tag>', methods=['GET', 'POST'])
@login_required
def app_manage_init_file(tag):
app_id = App.query.filter(App.id == session['current_selected_app_id']).first().appid
filePath = os.path.join(
current_app.config['UPLOAD_FOLDERS']['app_manage_function_configure'], app_id)
if not os.path.isdir(filePath):
os.makedirs(filePath)
initJson = [
{
"data": {
"file_path": "",
"item_pattern": ""
},
"id": "Root",
"parent": "#",
"text": "Root"
}
]
if tag in ['version2package.json', 'package2function.json']:
try:
new_file = open(os.path.join(filePath, tag), 'w')
new_file.write(json.dumps(initJson))
new_file.close()
flash(_('File initial for %(tag)s success.', tag=tag))
except Exception as e:
print(e)
flash(_('File initial for %(tag)s failed.', tag=tag))
return jsonify({'result': 'success'})
@bp.route('/app_manage_save_file', methods=['GET', 'POST'])
@login_required
def app_manage_save_file():
data = request.get_json()
app_id = App.query.filter(App.id == session['current_selected_app_id']).first().appid
filePath = os.path.join(
current_app.config['UPLOAD_FOLDERS']['app_manage_function_configure'], app_id)
if not os.path.isdir(filePath):
os.makedirs(filePath)
tag = data['tag']
new_json = json.loads(data['json'])
print(new_json)
if tag in ['version2package.json', 'package2function.json']:
try:
new_file = open(os.path.join(filePath, tag), 'w')
# new_file.write(json.dumps(new_json))
# json.dump(new_json, new_file, ensure_ascii=False, indent=4)
json.dump(new_json, new_file, indent=4)
new_file.close()
flash(_('File save for %(tag)s success.', tag=tag))
except Exception as e:
print(e)
flash(_('File save for %(tag)s failed.', tag=tag))
return jsonify({'result': 'success'})
@bp.route('/app_manage_upload_file', methods=['GET', 'POST'])
@login_required
def app_manage_upload_file():
version_to_package_file = request.files['version_to_package_file']
package_to_function_file = request.files['package_to_function_file']
app_id = App.query.filter(App.id == session['current_selected_app_id']).first().appid
filePath = os.path.join(
current_app.config['UPLOAD_FOLDERS']['app_manage_function_configure'], app_id)
if not os.path.isdir(filePath):
os.makedirs(filePath)
version_to_package_file.save(os.path.join(filePath, 'version2package.json'))
package_to_function_file.save(os.path.join(filePath, 'package2function.json'))
flash(_('Import success!'))
return jsonify({'result': 'success'})
# ---------------------------------------------------------------------------------------
# app manage database configure
# ---------------------------------------------------------------------------------------
@bp.route('/app_manage_database_configure')
@login_required
def app_manage_database_configure():
isCheck = True
isEdit = True
isDelete = True
session['is_delete'] = 'false'
tHead = [_('Tenant'), _('Is System Extension'), _('Database'), _('IP'), _('Port')]
data = TenantDb.query.filter(TenantDb.app_id == session['current_selected_app_id']).order_by(db.asc(TenantDb.database)).all()
return render_template('app_manage_database_configure.html', title=_('Tenant Database List'),
tableName=_('Tenant Database List'), Tenant=Tenant, app_name_list=get_app_name_list(),
current_selected_app_name=get_current_selected_app_name(),
isCheck=isCheck, isEdit=isEdit,
isDelete=isDelete, tHead=tHead, data=data)
@bp.route('/app_manage_database_configure_add', methods=['GET', 'POST'])
@login_required
def app_manage_database_configure_add():
form = AddTenantDatabaseForm(None)
if form.validate_on_submit():
current_tenant_id = Tenant.query.filter(Tenant.name == form.tenant_name.data).first().id
current_type = 'system' if form.system_extension.data == 'System Extension' else 'origin'
db.session.add(TenantDb(id=None, hostname=form.host_name.data, driver=form.database_driver.data,
username=form.user_name.data,
password=generate_password_hash(form.user_password.data),
database=form.database_name.data, port=form.database_port.data,
aliasname='_'.join([form.database_driver.data, form.database_name.data]),
type=current_type, tenant_id=current_tenant_id, app_id=session['current_selected_app_id']))
db.session.commit()
flash(_('New tenant database have been added.'))
return redirect(url_for('main.app_manage_database_configure'))
elif request.method == 'GET':
form.app_name.data = session['current_selected_app_name']
form.host_name.data = 'localhost'
form.database_port.data = '3306'
form.database_driver.data = 'mysql'
form.user_name.data = 'root'
pass
return render_template('app_manage_database_configure.html', title=_('Tenant Database Configure'),
tableName=_('Add New Tenant Database'), form=form, app_name_list=get_app_name_list(),
current_selected_app_name=get_current_selected_app_name(),
addTitle=('Add New Tenant Database'))
@bp.route('/app_manage_database_configure_delete/<id>', methods=['GET', 'POST'])
@login_required
def app_manage_database_configure_delete(id):
if request.method == 'GET':
session['current_delete_id'] = id
else:
data = request.get_json()
name = data.get('name')
if name == 'execute':
current_data = TenantDb.query.filter(TenantDb.id == session['current_delete_id']).first()
db.session.delete(current_data)
db.session.commit()
flash(_('Record have been deleted.'))
return jsonify({'result': 'success'})
isCheck = True
isEdit = True
isDelete = True
session['is_delete'] = 'false'
tHead = [_('Tenant'), _('Is System Extension'), _('Database'), _('IP'), _('Port')]
data = TenantDb.query.filter(TenantDb.app_id == session['current_selected_app_id']).order_by(
db.asc(TenantDb.username)).all()
confirmTitle = 'Confirm your choice:'
confirmMessage = 'Do you want to delete this record?'
return render_template('app_manage_database_configure.html', title=_('Tenant Database List'),
tableName=_('Tenant Database List'), Tenant=Tenant, app_name_list=get_app_name_list(),
current_selected_app_name=get_current_selected_app_name(),
isCheck=isCheck, isEdit=isEdit,
isDelete=isDelete, tHead=tHead, data=data,
confirmTitle=confirmTitle, confirmMessage=confirmMessage)
@bp.route('/app_manage_database_configure_delete_select', methods=['GET', 'POST'])
@login_required
def app_manage_database_configure_delete_select():
flash(_('Batch delete operation are not allowed now.'))
return redirect(url_for('main.app_manage_database_configure'))
@bp.route('/app_manage_database_configure_edit/<id>', methods=['GET', 'POST'])
@login_required
def app_manage_database_configure_edit(id):
if session.get('validate_alias_name'):
form = EditTenantDatabaseForm(session['validate_alias_name'])
else:
form = EditTenantDatabaseForm(None)
if form.validate_on_submit():
current_data = TenantDb.query.filter(TenantDb.id == id).first()
current_data.hostname = form.host_name.data
current_data.driver = form.database_driver.data
current_data.username = form.user_name.data
current_data.database = form.database_name.data
current_data.port = form.database_port.data
current_data.aliasname = '_'.join([form.database_driver.data, form.database_name.data])
current_data.type = 'system' if form.system_extension.data == 'System Extension' else 'origin'
current_data.tenant_id = Tenant.query.filter(Tenant.name == form.tenant_name.data).first().id
current_data.app_id = session['current_selected_app_id']
if not form.user_password.data.strip() == '':
current_data.password = generate_password_hash(form.user_password.data)
db.session.commit()
flash(_('Tenant Database have been edited.'))
return redirect(url_for('main.app_manage_database_configure'))
elif request.method == 'GET':
current_data = TenantDb.query.filter(TenantDb.id == id).first()
form.app_name.data = session['current_selected_app_name']
form.host_name.data = current_data.hostname
form.database_port.data = current_data.port
form.system_extension.data = 'System Extension' if current_data.type == 'system' else 'Not System Extension'
form.database_driver.data = current_data.driver
form.database_name.data = current_data.database
form.user_name.data = current_data.username
form.user_password.description = 'In edit mode, set null in this field means no modification for current password.'
session['validate_alias_name'] = '_'.join([form.database_driver.data, form.database_name.data])
return render_template('app_manage_database_configure.html', title=_('Tenant Database Configure'),
tableName=_('Edit Tenant Database'), form=form,app_name_list=get_app_name_list(),
current_selected_app_name=get_current_selected_app_name(),
editTitle=('Edit Tenant Database'))
# ---------------------------------------------------------------------------------------
# app manage code configure
# ---------------------------------------------------------------------------------------
@bp.route('/app_manage_code_configure', methods=['GET', 'POST'])
@login_required
def app_manage_code_configure():
if session.get('validate_repo'):
form = AddAppCodeForm(session['validate_repo'])
else:
form = AddAppCodeForm(None)
if form.validate_on_submit():
current_data = AppCode.query.filter(AppCode.app_id == session['current_selected_app_id']).first()
current_data.repo = form.code_repo.data
current_data.app_expand_id = AppExpand.query.filter(AppExpand.type == form.app_type.data).first().id
current_data.db_config_path = form.db_config_path.data
app_id = App.query.filter(App.id == session['current_selected_app_id']).first().appid
print(app_id)
# app_id = 'fe01ce2a7fbac8fafaed7c982a04e229'
current_data.remote_login_configure_path = form.remote_login_config_path.data
current_data.remote_login_using_flag = form.remote_login_using_flag.data
# current_data.remote_login_using_content = form.remote_login_using_content.data
if hasattr(form.remote_login_using_content.data, 'filename'):
filename1 = secure_filename(form.remote_login_using_content.data.filename)
filePath1 = os.path.join(os.path.join(current_app.config['UPLOAD_FOLDERS']['app_manage_code_configure'],
app_id), filename1).replace('\\', '/')
form.remote_login_using_content.data.save(filePath1)
current_data.remote_login_using_content = filePath1
form.library_path.data = current_data.library_path
form.filter_package_path.data = current_data.filter_package_path
# form.filter_content.data = current_data.filter_content
form.filter_config_path.data = current_data.filter_configure_path
if hasattr(form.filter_content.data, 'filename'):
filename1 = secure_filename(form.filter_content.data.filename)
filePath1 = os.path.join(os.path.join(current_app.config['UPLOAD_FOLDERS']['app_manage_code_configure'],
app_id), filename1).replace('\\', '/')
form.filter_content.data.save(filePath1)
current_data.filter_content = filePath1
form.filter_import_flag.data = current_data.filter_import_flag
# form.filter_import_content.data = current_data.filter_import_content
form.filter_using_flag.data = current_data.filter_using_flag
# form.filter_using_content.data = current_data.filter_using_content
if hasattr(form.filter_import_content.data, 'filename'):
filename1 = secure_filename(form.filter_import_content.data.filename)
filePath1 = os.path.join(os.path.join(current_app.config['UPLOAD_FOLDERS']['app_manage_code_configure'],
app_id), filename1).replace('\\', '/')
form.filter_import_content.data.save(filePath1)
current_data.filter_import_content = filePath1
if hasattr(form.filter_using_content.data, 'filename'):
filename1 = secure_filename(form.filter_using_content.data.filename)
filePath1 = os.path.join(os.path.join(current_app.config['UPLOAD_FOLDERS']['app_manage_code_configure'],
app_id), filename1).replace('\\', '/')
form.filter_using_content.data.save(filePath1)
current_data.filter_using_content = filePath1
# form.call_starting_point.data = current_data.call_starting_point
# form.third_party_packages.data = current_data.third_party_packages
if hasattr(form.call_starting_point.data, 'filename'):
filename1 = secure_filename(form.call_starting_point.data.filename)
filePath1 = os.path.join(os.path.join(current_app.config['UPLOAD_FOLDERS']['app_manage_code_configure'],
app_id), filename1).replace('\\', '/')
form.call_starting_point.data.save(filePath1)
current_data.call_starting_point = filePath1
if hasattr(form.third_party_packages.data, 'filename'):
filename1 = secure_filename(form.third_party_packages.data.filename)
filePath1 = os.path.join(os.path.join(current_app.config['UPLOAD_FOLDERS']['app_manage_code_configure'],
app_id), filename1).replace('\\', '/')
form.third_party_packages.data.save(filePath1)
current_data.third_party_packages = filePath1
db.session.commit()
flash(_('Code configuration have been edited.'))
return redirect(url_for('main.app_manage_code_configure'))
elif request.method == 'GET':
current_data = AppCode.query.filter(AppCode.app_id == session['current_selected_app_id']).first()
current_extension_data = AppExpand.query.filter(AppExpand.id == current_data.app_expand_id).first()
form.app_type.data = current_extension_data.type
form.code_repo.data = current_data.repo
form.tag_begin.data = current_extension_data.pattern_begin
form.tag_end.data = current_extension_data.pattern_end
form.db_config_path.data = current_data.db_config_path
form.remote_login_config_path.data = current_data.remote_login_configure_path
form.remote_login_using_flag.data = current_data.remote_login_using_flag
# form.remote_login_using_content.data = current_data.remote_login_using_content
form.remote_login_using_content.description = _('Selected File: ') + current_data.remote_login_using_content
form.library_path.data = current_data.library_path
form.filter_package_path.data = current_data.filter_package_path
# form.filter_content.data = current_data.filter_content
form.filter_content.
| 0 |
b2ec682fba5d5eec999d160d44c829f93d8caa36
|
Python
|
description = _('Selected File: ') + current_data.filter_content
form.filter_config_path.data = current_data.filter_configure_path
form.filter_import_flag.data = current_data.filter_import_flag
# form.filter_import_content.data = current_data.filter_import_content
form.filter_import_content.description = _('Selected File: ') + current_data.filter_import_content
form.filter_using_flag.data = current_data.filter_using_flag
# form.filter_using_content.data = current_data.filter_using_content
form.filter_using_content.description = _('Selected File: ') + current_data.filter_using_content
# form.call_starting_point.data = current_data.call_starting_point
# form.third_party_packages.data = current_data.third_party_packages
form.call_starting_point.description = _('Selected File: ') + current_data.call_starting_point
form.third_party_packages.description = _('Selected File: ') + current_data.third_party_packages
session['validate_repo'] = form.code_repo.data
return render_template('app_manage_code_configure.html', title=_('Edit Code Information'),
tableName=_('Edit Code Information'), form=form, app_name_list=get_app_name_list(),
current_selected_app_name=get_current_selected_app_name())
# ---------------------------------------------------------------------------------------
# app manage mirror list
# ---------------------------------------------------------------------------------------
@bp.route('/app_manage_mirror_list')
@login_required
def app_manage_mirror_list():
isCheck = True
isEdit = True
isDelete = True
session['is_delete'] = 'false'
tHead = [_('Mirror Name'), _('Creator'), _('Created Time')]
data = [App.query.order_by(db.asc(App.name)).first()]
return render_template('app_manage_mirror_list.html', title=_('Mirror Manage'),
tableName=_('Mirror List'), AppAdmin=AppAdmin, app_name_list=get_app_name_list(),
current_selected_app_name=get_current_selected_app_name(),
isCheck=isCheck, isEdit=isEdit,
isDelete=isDelete, tHead=tHead, data=data)
@bp.route('/app_manage_mirror_list_add', methods=['GET', 'POST'])
@login_required
def app_manage_mirror_list_add():
logPath = 'logs/mirror_construct.log'
if os.path.isfile(logPath):
# os.remove(logPath)
pass
new_log_file = open(logPath, 'w', encoding='utf-8')
new_log_file.write('')
new_log_file.flush()
new_log_file.close()
app_id = App.query.filter(App.id == session['current_selected_app_id']).first().appid
current_code = AppCode.query.filter(AppCode.app_id == session['current_selected_app_id']).first()
mirror_construction(current_app._get_current_object(), app_id, current_code)
return jsonify({'code': '0', 'logPath': logPath, 'message': 'Operation done.'})
@bp.route('/app_manage_mirror_list_delete/<id>', methods=['GET', 'POST'])
@login_required
def app_manage_mirror_list_delete(id):
if request.method == 'GET':
session['current_delete_id'] = id
else:
data = request.get_json()
name = data.get('name')
if name == 'execute':
current_data = TenantDb.query.filter(TenantDb.id == session['current_delete_id']).first()
db.session.delete(current_data)
db.session.commit()
flash(_('Record have been deleted.'))
return jsonify({'result': 'success'})
isCheck = True
isEdit = True
isDelete = True
session['is_delete'] = 'false'
tHead = [_('Tenant'), _('Is System Extension'), _('Database'), _('IP'), _('Port')]
data = TenantDb.query.filter(TenantDb.app_id == session['current_selected_app_id']).order_by(
db.asc(TenantDb.username)).all()
confirmTitle = 'Confirm your choice:'
confirmMessage = 'Do you want to delete this record?'
return render_template('app_manage_mirror_list.html', title=_('Tenant Database List'),
tableName=_('Tenant Database List'), Tenant=Tenant, app_name_list=get_app_name_list(),
current_selected_app_name=get_current_selected_app_name(),
isCheck=isCheck, isEdit=isEdit,
isDelete=isDelete, tHead=tHead, data=data,
confirmTitle=confirmTitle, confirmMessage=confirmMessage)
@bp.route('/app_manage_mirror_list_delete_select', methods=['GET', 'POST'])
@login_required
def app_manage_mirror_list_delete_select():
flash(_('Batch delete operation are not allowed now.'))
return redirect(url_for('main.app_manage_mirror_list'))
@bp.route('/app_manage_mirror_list_edit/<id>', methods=['GET', 'POST'])
@login_required
def app_manage_mirror_list_edit(id):
if session.get('validate_alias_name'):
form = EditTenantDatabaseForm(session['validate_alias_name'])
else:
form = EditTenantDatabaseForm(None)
if form.validate_on_submit():
current_data = TenantDb.query.filter(TenantDb.id == id).first()
current_data.hostname = form.host_name.data
current_data.driver = form.database_driver.data
current_data.username = form.user_name.data
current_data.database = form.database_name.data
current_data.port = form.database_port.data
current_data.aliasname = '_'.join([form.database_driver.data, form.database_name.data])
current_data.type = 'system' if form.system_extension.data == 'System Extension' else 'origin'
current_data.tenant_id = Tenant.query.filter(Tenant.name == form.tenant_name.data).first().id
current_data.app_id = session['current_selected_app_id']
if not form.user_password.data.strip() == '':
current_data.password = generate_password_hash(form.user_password.data)
db.session.commit()
flash(_('Tenant Database have been edited.'))
return redirect(url_for('main.app_manage_mirror_list'))
elif request.method == 'GET':
current_data = TenantDb.query.filter(TenantDb.id == id).first()
form.app_name.data = session['current_selected_app_name']
form.host_name.data = current_data.hostname
form.database_port.data = current_data.port
form.system_extension.data = 'System Extension' if current_data.type == 'system' else 'Not System Extension'
form.database_driver.data = current_data.driver
form.database_name.data = current_data.database
form.user_name.data = current_data.username
form.user_password.description = 'In edit mode, set null in this field means no modification for current password.'
session['validate_alias_name'] = '_'.join([form.database_driver.data, form.database_name.data])
return render_template('app_manage_mirror_list.html', title=_('Tenant Database Configure'),
tableName=_('Edit Tenant Database'), form=form,app_name_list=get_app_name_list(),
current_selected_app_name=get_current_selected_app_name(),
editTitle=('Edit Tenant Database'))
@bp.route('/get_log', methods=['GET', 'POST'])
@login_required
def get_log():
logStr = ''
data = request.get_json()
if os.path.isfile(data['file']):
logStr = open(data['file']).read()[data['start']:]
pos = data['start'] + len(logStr)
hasMore = True
if 'Operation done.' in logStr:
hasMore = False
return jsonify({'code': '0', 'log': logStr.replace('\n', '<br/>'), 'pos': pos, 'hasMore': hasMore})
else:
print('debug1', data['file'])
print('debug1', os.path.isfile(data['file']))
print('debug1', os.path.exists(data['file']))
return jsonify({'code': '-1', 'message': 'Log file not exist.%s'%(data['file'])})
@bp.route('/remove_log', methods=['GET', 'POST'])
@login_required
def remove_log():
data = request.get_json()
if os.path.isfile(data['file']):
# os.remove(data['file'])
clear_file = open(data['file'], 'w')
clear_file.write('')
clear_file.flush()
clear_file.close()
return jsonify({'code': '0', 'message': 'remove log at %s' % (datetime.utcnow())})
# mirror construction
@async
def mirror_construction(app, app_id, current_code):
with app.app_context() and app.request_context(EnvironBuilder('/','http://localhost/').get_environ()):
# with app.app_context():
remove_log()
logger.info('Operation begin:\n')
logger.info('1.------Reading function package, atomic function data of app------')
#read app function json
tag = 'package2function.json'
filePath = os.path.join(os.path.join(
current_app.config['UPLOAD_FOLDERS']['app_manage_function_configure'], app_id), tag)
if os.path.isfile(filePath):
json_dict = json.load(open(filePath, encoding='utf-8'))
for a in json_dict:
id = a['id']
if 'file_path' in a['data'] and 'item_pattern' in a['data']:
file_path = a['data']['file_path']
item_pattern = a['data']['item_pattern']
# logger.info('id: %s\nfile_path: %s\nitem_pattern: %s', id, file_path, item_pattern)
logger.info('2.------Pulling code from registry------')
sourceSrcDir = 'F:/code/PPGo_ApiAdmin'
dstSrcDir = 'F:/code/Tenant_PPGo_ApiAdmin'
if os.path.exists(dstSrcDir):
# print('rmtree')
shutil.rmtree(dstSrcDir)
# print('copytree')
shutil.copytree(sourceSrcDir, dstSrcDir)
logger.info('3.------insert tag template in code------')
# 切换工作目录易引起线程安全问题
# old_cwd = os.getcwd()
# os.chdir(dstSrcDir)
args = ''
for a in json_dict:
if 'file_path' in a['data'] and 'item_pattern' in a['data'] and\
a['data']['file_path'] is not '' and a['data']['item_pattern'] is not '':
# filePath = 'F:/test/main.html'
filePath = os.path.join(dstSrcDir, a['data']['file_path']).replace('\\', '/')
# pattern = 'x;/<dd>.*API监控.*<\/dd>/{p;q};/<dd>.*<\/dd>/{x;h;d;ta};/<dd>.*/{x;H;ta};{x;h;d};:a'
pattern = a['data']['item_pattern']
# tag_begin = '{if .role_APIguanli}'
tag_begin = '{{if .role_%s}}' % (a['id'])
tag_end = '{{end}}'
args += 'cat -n %s | sed -n "%s" | { eval $(awk \'NR==1{print "a="$1} END {print "b="$1}\'); ' \
'sed -e "$a i %s" -e "$b a %s" %s;} > F:/temp.txt\n cp F:/temp.txt %s\n' % \
(filePath, pattern, tag_begin, tag_end, filePath, filePath)
shell_file = open('F:/test/temp.sh', 'w', encoding='utf-8')
shell_file.write(args)
shell_file.flush()
shell_file.close()
exec_path = "D:\Program Files\Git\git-bash.exe"
# (status, output) = subprocess.getstatusoutput([exec_path, 'F:/test/temp.sh'])
CREATE_NO_WINDOW = 0x08000000
subprocess.call([exec_path, 'F:/test/temp.sh'], creationflags=CREATE_NO_WINDOW)
# os.chdir(old_cwd)
logger.info('4.------initialing tenant database connection------')
pass
logger.info('5.------extending filter code------')
filter_package_path = os.path.join(dstSrcDir, current_code.filter_package_path).replace('\\', '/')
filter_content = current_code.filter_content
if not os.path.isdir(filter_package_path):
os.makedirs(filter_package_path)
old_filter_file = os.path.join(filter_package_path, os.path.basename(filter_content)).replace('\\', '/')
if os.path.isfile(old_filter_file):
os.remove(old_filter_file)
shutil.copyfile(filter_content, os.path.join(filter_package_path, os.path.basename(filter_content).replace('\\', '/')))
filter_config_path = os.path.join(dstSrcDir, current_code.filter_configure_path).replace('\\', '/')
filter_import_flag = current_code.filter_import_flag
filter_import_content = current_code.filter_import_content
filter_using_flag = current_code.filter_using_flag
filter_using_content = current_code.filter_using_content
with open(filter_config_path, "r", encoding="utf-8") as f:
lines = f.readlines()
with open(filter_config_path, "w", encoding="utf-8") as f_w:
for line in lines:
if filter_import_flag in line:
f_w.write(line)
pre = line[:line.index(filter_import_flag)]
wlines = open(filter_import_content, encoding="utf-8").readlines()
for l in wlines:
f_w.write(pre + l)
# f_w.write(open(filter_import_content, encoding="utf-8").read())
elif filter_using_flag in line:
f_w.write(line)
pre = line[:line.index(filter_using_flag)]
wlines = open(filter_using_content, encoding="utf-8").readlines()
for l in wlines:
f_w.write(pre + l)
# f_w.write(open(filter_using_content, encoding="utf-8").read())
else:
f_w.write(line)
logger.info('6.------extending remote login code------')
remote_login_config_path = os.path.join(dstSrcDir, current_code.remote_login_configure_path)
remote_login_using_flag = current_code.remote_login_using_flag
remote_login_using_content = current_code.remote_login_using_content
with open(remote_login_config_path, "r", encoding="utf-8") as f:
lines = f.readlines()
# 写的方式打开文件
with open(remote_login_config_path, "w", encoding="utf-8") as f_w:
for line in lines:
if remote_login_using_flag in line:
pre = line[:line.index(remote_login_using_flag)]
f_w.write(line)
wlines = open(remote_login_using_content, encoding="utf-8").readlines()
for l in wlines:
f_w.write(pre + l)
else:
f_w.write(line)
# 补充库文件
library_src_path = os.path.join(current_app.config['UPLOAD_FOLDERS']['library_path'],
'go beego\\saas_support')
library_dst_path = os.path.join(os.path.join(dstSrcDir, current_code.library_path), 'saas_support')
# if os.path.exists(library_path):
# # print('rmtree')
# shutil.rmtree(library_path)
# print('copytree')
shutil.copytree(library_src_path, library_dst_path)
logger.info('7.------packing mirror------')
file_handler.setFormatter(blank_formatter) # 改变格式
# subprocess.call([exec_path, 'docker build -t testdocker:v1 %s'%(dstSrcDir)], creationflags=CREATE_NO_WINDOW)
# state, output = subprocess.getstatusoutput('docker build -t testdocker:v1 %s'%(dstSrcDir))
cmd = 'docker build -t reg.silvermaple.com/demo/demo:1.0.0 %s'%(dstSrcDir)
p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
while p.poll() is None:
line = p.stdout.readline()
line = line.strip()
if line:
logger.info(str(line, encoding = "utf-8"))
if p.returncode == 0:
logger.info('Mirror packed success.')
else:
logger.info('Mirror packed failed.')
file_handler.setFormatter(formatter) # 指定输出格式
logger.info('8.------uploading mirror------')
file_handler.setFormatter(blank_formatter)
cmd = 'docker push reg.silvermaple.com/demo/demo:1.0.0'
# state, output = subprocess.getstatusoutput(cmd)
# logger.info(output)
p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
while p.poll() is None:
line = p.stdout.readline()
line = line.strip()
if line:
logger.info(str(line, encoding = "utf-8"))
if p.returncode == 0:
logger.info('Mirror uploaded success.')
else:
logger.info('Mirror uploaded failed.')
file_handler.setFormatter(formatter) # 指定输出格式
logger.info('Operation done.')
else:
logger.info('File package2function.json not exist.\nOperation done.')
return jsonify({'code': '-1', 'message': 'File package2function.json not exist.'})
return jsonify({'code': '0', 'message': 'Success'})
# ---------------------------------------------------------------------------------------
# app manage service deploy
# ---------------------------------------------------------------------------------------
@bp.route('/app_manage_service_deploy')
@login_required
def app_manage_service_deploy():
isCheck = True
isEdit = True
isDelete = False
session['is_delete'] = 'false'
tHead = [_('ID'), _('Mirror'), _('Instance Number'), _('State'), _('Action')]
action_list = [_('Publish'), _('Adjust'), _('Restart'), _('Stop'), _('Destroy')]
data = [App.query.order_by(db.asc(App.name)).first()]
return render_template('app_manage_service_deploy.html', title=_('Service Deploy'),
tableName=_('Service Container List'), action_list=action_list, app_name_list=get_app_name_list(),
current_selected_app_name=get_current_selected_app_name(),
isCheck=isCheck, isEdit=isEdit,
isDelete=isDelete, tHead=tHead, data=data)
@bp.route('/app_manage_service_deploy_add', methods=['GET', 'POST'])
@login_required
def app_manage_service_deploy_add():
form = True
if request.method == 'POST':
# if form.validate_on_submit():
# current_tenant_id = Tenant.query.filter(Tenant.name == form.tenant_name.data).first().id
# current_type = 'system' if form.system_extension.data == 'System Extension' else 'origin'
# db.session.add(TenantDb(id=None, hostname=form.host_name.data, driver=form.database_driver.data,
# username=form.user_name.data,
# password=generate_password_hash(form.user_password.data),
# database=form.database_name.data, port=form.database_port.data,
# aliasname='_'.join([form.database_driver.data, form.database_name.data]),
# type=current_type, tenant_id=current_tenant_id, app_id=session['current_selected_app_id']))
# db.session.commit()
flash(_('New tenant database have been added.'))
return redirect(url_for('main.app_manage_service_deploy'))
elif request.method == 'GET':
# form.app_name.data = session['current_selected_app_name']
# form.host_name.data = 'localhost'
# form.database_port.data = '3306'
# form.database_driver.data = 'mysql'
# form.user_name.data = 'root'
pass
return render_template('app_manage_service_deploy.html', title=_('Service Deploy'),
tableName=_('Add New Container'), form=form, app_name_list=get_app_name_list(),
current_selected_app_name=get_current_selected_app_name(),
addTitle=('Add New Container'))
@bp.route('/app_manage_service_deploy_delete/<id>', methods=['GET', 'POST'])
@login_required
def app_manage_service_deploy_delete(id):
if request.method == 'GET':
session['current_delete_id'] = id
else:
data = request.get_json()
name = data.get('name')
if name == 'execute':
current_data = TenantDb.query.filter(TenantDb.id == session['current_delete_id']).first()
db.session.delete(current_data)
db.session.commit()
flash(_('Record have been deleted.'))
return jsonify({'result': 'success'})
isCheck = True
isEdit = True
isDelete = True
session['is_delete'] = 'false'
tHead = [_('Tenant'), _('Is System Extension'), _('Database'), _('IP'), _('Port')]
data = TenantDb.query.filter(TenantDb.app_id == session['current_selected_app_id']).order_by(
db.asc(TenantDb.username)).all()
confirmTitle = 'Confirm your choice:'
confirmMessage = 'Do you want to delete this record?'
return render_template('app_manage_service_deploy.html', title=_('Tenant Database List'),
tableName=_('Tenant Database List'), Tenant=Tenant, app_name_list=get_app_name_list(),
current_selected_app_name=get_current_selected_app_name(),
isCheck=isCheck, isEdit=isEdit,
isDelete=isDelete, tHead=tHead, data=data,
confirmTitle=confirmTitle, confirmMessage=confirmMessage)
@bp.route('/app_manage_service_deploy_delete_select', methods=['GET', 'POST'])
@login_required
def app_manage_service_deploy_delete_select():
flash(_('Batch delete operation are not allowed now.'))
return redirect(url_for('main.app_manage_service_deploy'))
@bp.route('/app_manage_service_deploy_edit/<id>', methods=['GET', 'POST'])
@login_required
def app_manage_service_deploy_edit(id):
if session.get('validate_alias_name'):
form = EditTenantDatabaseForm(session['validate_alias_name'])
else:
form = EditTenantDatabaseForm(None)
if form.validate_on_submit():
current_data = TenantDb.query.filter(TenantDb.id == id).first()
current_data.hostname = form.host_name.data
current_data.driver = form.database_driver.data
current_data.username = form.user_name.data
current_data.database = form.database_name.data
current_data.port = form.database_port.data
current_data.aliasname = '_'.join([form.database_driver.data, form.database_name.data])
current_data.type = 'system' if form.system_extension.data == 'System Extension' else 'origin'
current_data.tenant_id = Tenant.query.filter(Tenant.name == form.tenant_name.data).first().id
current_data.app_id = session['current_selected_app_id']
if not form.user_password.data.strip() == '':
current_data.password = generate_password_hash(form.user_password.data)
db.session.commit()
flash(_('Tenant Database have been edited.'))
return redirect(url_for('main.app_manage_service_deploy'))
elif request.method == 'GET':
current_data = TenantDb.query.filter(TenantDb.id == id).first()
form.app_name.data = session['current_selected_app_name']
form.host_name.data = current_data.hostname
form.database_port.data = current_data.port
form.system_extension.data = 'System Extension' if current_data.type == 'system' else 'Not System Extension'
form.database_driver.data = current_data.driver
form.database_name.data = current_data.database
form.user_name.data = current_data.username
form.user_password.description = 'In edit mode, set null in this field means no modification for current password.'
session['validate_alias_name'] = '_'.join([form.database_driver.data, form.database_name.data])
return render_template('app_manage_service_deploy.html', title=_('Tenant Database Configure'),
tableName=_('Edit Tenant Database'), form=form,app_name_list=get_app_name_list(),
current_selected_app_name=get_current_selected_app_name(),
editTitle=('Edit Tenant Database'))
# ---------------------------------------------------------------------------------------
# tenant service customize function
# ---------------------------------------------------------------------------------------
@bp.route('/tenant_service_customize_function')
@login_required
def tenant_service_customize_function():
isCheck = True
isEdit = True
isDelete = True
session['is_delete'] = 'false'
tHead = [_('App Name'), _('App ID'), _('Creator')]
data = App.query.order_by(db.asc(App.name)).all()
# flash(_('Batch delete operation are not allowed now.'))
return render_template('tenant_service_customize_function.html', title=_('Customized Function'),
tableName=_('Function Root'), app_name_list=get_app_name_list(),
current_selected_app_name=get_current_selected_app_name(),
isCheck=isCheck, isEdit=isEdit,
isDelete=isDelete, tHead=tHead, data=data)
@bp.route('/tenant_service_customize_function_edit')
@login_required
def tenant_service_customize_function_edit():
form = True
isCheck = True
isEdit = True
isDelete = True
session['is_delete'] = 'false'
tHead = [_('App Name'), _('App ID'), _('Creator')]
data = App.query.order_by(db.asc(App.name)).all()
return render_template('tenant_service_customize_function.html', title=_('Customized Function'),
editTitle=_('Customize'),
tableName=_('Function Root'), app_name_list=get_app_name_list(),
current_selected_app_name=get_current_selected_app_name(),
isCheck=isCheck, isEdit=isEdit, form=form,
isDelete=isDelete, tHead=tHead, data=data)
@bp.route('/tenant_service_customize_function_save', methods=['GET', 'POST'])
@login_required
def tenant_service_customize_function_save():
data = request.get_json()
tenant_id = Tenant.query.filter(Tenant.id == session['current_tenant_id']).first().tenantid
filePath = os.path.join(
current_app.config['UPLOAD_FOLDERS']['tenant_service_customize_function'], tenant_id)
if not os.path.isdir(filePath):
os.makedirs(filePath)
tag = data['tag']
new_json = json.loads(data['json'])
# print(new_json)
# print(tag)
if tag in ['version2function.json']:
try:
new_file = open(os.path.join(filePath, tag), 'w')
# new_file.write(json.dumps(new_json))
# json.dump(new_json, new_file, ensure_ascii=False, indent=4)
json.dump(new_json, new_file, indent=4)
new_file.close()
flash(_('File save for %(tag)s success.', tag=tag))
except Exception as e:
print(e)
flash(_('File save for %(tag)s failed.', tag=tag))
return jsonify({'result': 'success'})
@bp.route('/get_tenant_customize_file_path/<tag>', methods=['GET', 'POST'])
@login_required
def get_tenant_customize_file_path(tag):
tenant_id = Tenant.query.filter(Tenant.id == session['current_tenant_id']).first().tenantid
if tag == 'version2function.json':
filePath = os.path.join(os.path.join(
current_app.config['UPLOAD_FOLDERS']['tenant_service_customize_function'], tenant_id), tag)
if os.path.isfile(filePath):
filePath = os.path.join(os.path.join(
current_app.config['UPLOAD_FOLDERS']['tenant_service_customize_function_html'], tenant_id), tag)
return jsonify({'result': 'success', 'filePath': filePath})
# filePath1 = os.path.join(
# current_app.config['UPLOAD_FOLDERS']['tenant_service_customize_function'], tenant_id)
# if not os.path.isdir(filePath1):
# os.makedirs(filePath1)
# app_id = App.query.filter(App.id == session['current_selected_app_id']).first().appid
# app_file = os.path.join(os.path.join(
# current_app.config['UPLOAD_FOLDERS']['app_manage_function_configure'], app_id), 'version2package.json')
# shutil.copyfile(app_file, filePath)
# filePath = os.path.join(os.path.join(
# current_app.config['UPLOAD_FOLDERS']['tenant_service_customize_function_html'], tenant_id), tag)
# return jsonify({'result': 'success', 'filePath': filePath})
# flash(_('No customize function now!'))
return jsonify({'result': 'fail', 'filePath': False})
# ---------------------------------------------------------------------------------------
# tenant service customize function
# ---------------------------------------------------------------------------------------
@bp.route('/tenant_service_role_setting')
@login_required
def tenant_service_role_setting():
isCheck = True
isEdit = True
isDelete = True
session['is_delete'] = 'false'
tHead = [_('Role Name'), _('Creator'), _('App Name')]
data = SaasRole.query.order_by(db.asc(SaasRole.name)).all()
current_tenant_id = session['current_tenant_id']
current_tenant_name = Tenant.query.filter(Tenant.id == current_tenant_id).first().name
# flash(_('Batch delete operation are not allowed now.'))
return render_template('tenant_service_role_setting.html', title=_('Role List'),
tableName=_('Role List'), app_name_list=get_app_name_list(),
current_selected_app_name=get_current_selected_app_name(),
isCheck=isCheck, isEdit=isEdit, current_tenant_name=current_tenant_name,
isDelete=isDelete, tHead=tHead, data=data)
@bp.route('/tenant_service_role_setting_allocate/<id>', methods=['GET', 'POST'])
@login_required
def tenant_service_role_setting_allocate(id):
form = True
isCheck = True
isEdit = True
isDelete = True
session['is_delete'] = 'false'
session['current_role_id'] = id
tHead = [_('Role Name'), _('Creator'), _('App Name')]
data = SaasRole.query.order_by(db.asc(SaasRole.name)).all()
role_name = SaasRole.query.filter(SaasRole.id==id).first().name
current_tenant_id = session['current_tenant_id']
current_tenant_name = Tenant.query.filter(Tenant.id == current_tenant_id).first().name
# flash(_('Batch delete operation are not allowed now.'))
return render_template('tenant_service_role_setting.html', title=_('Role List'),
tableName=_('Allocate Function'), app_name_list=get_app_name_list(), form=form, role_id=id,
current_selected_app_name=get_current_selected_app_name(), role_name=role_name,
isCheck=isCheck, isEdit=isEdit, current_tenant_name=current_tenant_name,
isDelete=isDelete, tHead=tHead, data=data, role_tag_prefix='role_')
@bp.route('/tenant_service_role_setting_save', methods=['GET', 'POST'])
@login_required
def tenant_service_role_setting_save():
data = request.get_json()
role_id = session['current_role_id']
tenant_id = Tenant.query.filter(Tenant.id == session['current_tenant_id']).first().tenantid
filePath = os.path.join(os.path.join(
current_app.config['UPLOAD_FOLDERS']['tenant_service_role_setting'], tenant_id), role_id)
if not os.path.isdir(filePath):
os.makedirs(filePath)
tag = data['tag']
new_json = json.loads(data['json'])
print(new_json)
print(tag)
if tag in ['version2function.json']:
try:
new_file = open(os.path.join(filePath, tag), 'w')
# new_file.write(json.dumps(new_json))
# json.dump(new_json, new_file, ensure_ascii=False, indent=4)
json.dump(new_json, new_file, indent=4)
new_file.close()
flash(_('File save for %(tag)s success.', tag=tag))
except Exception as e:
print(e)
flash(_('File save for %(tag)s failed.', tag=tag))
return jsonify({'result': 'success'})
@bp.route('/get_role_customize_file_path/<tag>', methods=['GET', 'POST'])
@login_required
def get_role_customize_file_path(tag):
tenant_id = Tenant.query.filter(Tenant.id == session['current_tenant_id']).first().tenantid
role_id = session['current_role_id']
if tag == 'version2function.json':
filePath = os.path.join(os.path.join(os.path.join(
current_app.config['UPLOAD_FOLDERS']['tenant_service_role_setting'], tenant_id), role_id), tag)
if os.path.isfile(filePath):
filePath = os.path.join(os.path.join(os.path.join(
current_app.config['UPLOAD_FOLDERS']['tenant_service_role_setting_html'], tenant_id), role_id), tag)
return jsonify({'result': 'success', 'filePath': filePath})
return jsonify({'result': 'fail', 'filePath': False})
@bp.route('/tenant_service_role_setting_add', methods=['GET', 'POST'])
@login_required
def tenant_service_role_setting_add():
form = AddRoleForm(None)
current_tenant_id = session['current_tenant_id']
current_tenant_name = Tenant.query.filter(Tenant.id == current_tenant_id).first().name
if form.validate_on_submit():
db.session.add(SaasRole(id=None, name=form.role_name.data, funcdata_mod_time=datetime.now().__str__()))
db.session.commit()
flash(_('New role have been added.'))
return redirect(url_for('main.tenant_service_role_setting'))
elif request.method == 'GET':
form.creator.data = current_tenant_name
form.app_name.data = get_current_selected_app_name()
pass
isCheck = True
isEdit = True
isDelete = True
session['is_delete'] = 'false'
tHead = [_('App Name'), _('App ID'), _('Creator')]
# flash(_('Batch delete operation are not allowed now.'))
return render_template('tenant_service_role_setting.html', title=_('Role List'), form=form,
tableName=_('Add Role'), app_name_list=get_app_name_list(), addTitle=_('Add Role'),
current_selected_app_name=get_current_selected_app_name(),
isCheck=isCheck, isEdit=isEdit,
isDelete=isDelete, tHead=tHead)
@bp.route('/tenant_service_role_setting_delete/<id>', methods=['GET', 'POST'])
@login_required
def tenant_service_role_setting_delete(id):
if request.method == 'GET':
session['current_delete_id'] = id
else:
data = request.get_json()
name = data.get('name')
if name == 'execute':
current_data = SaasRole.query.filter(SaasRole.id == session['current_delete_id']).first()
db.session.delete(current_data)
db.session.commit()
flash(_('Record have been deleted.'))
return jsonify({'result': 'success'})
isCheck = True
isEdit = True
isDelete = True
session['is_delete'] = 'false'
tHead = [_('Role Name'), _('Creator'), _('App Name')]
data = SaasRole.query.order_by(db.asc(SaasRole.name)).all()
current_tenant_id = session['current_tenant_id']
current_tenant_name = Tenant.query.filter(Tenant.id == current_tenant_id).first().name
confirmTitle = 'Confirm your choice:'
confirmMessage = 'Do you want to delete this record?'
return render_template('tenant_service_role_setting.html', title=_('Role List'),
tableName=_('Role List'), app_name_list=get_app_name_list(),
current_selected_app_name=get_current_selected_app_name(),
isCheck=isCheck, isEdit=isEdit, current_tenant_name=current_tenant_name,
isDelete=isDelete, tHead=tHead, data=data,
confirmTitle=confirmTitle, confirmMessage=confirmMessage)
@bp.route('/tenant_service_role_setting_delete_select', methods=['GET', 'POST'])
@login_required
def tenant_service_role_setting_delete_select():
flash(_('Batch delete operation are not allowed now.'))
return redirect(url_for('main.tenant_service_role_setting'))
@bp.route('/tenant_service_role_setting_edit/<id>', methods=['GET', 'POST'])
@login_required
def tenant_service_role_setting_edit(id):
current_tenant_id = session['current_tenant_id']
current_tenant_name = Tenant.query.filter(Tenant.id == current_tenant_id).first().name
if session.get('validate_role_name'):
form = AddRoleForm(session['validate_role_name'])
else:
form = AddRoleForm(None)
if form.validate_on_submit():
current_data = SaasRole.query.filter(SaasRole.id == id).first()
current_data.name = form.role_name.data
db.session.commit()
flash(_('Role have been edited.'))
return redirect(url_for('main.tenant_service_role_setting'))
elif request.method == 'GET':
current_data = SaasRole.query.filter(SaasRole.id == id).first()
form.role_name.data = current_data.name
form.creator.data = current_tenant_name
form.app_name.data = get_current_selected_app_name()
session['validate_role_name'] = form.role_name.data
isCheck = True
isEdit = True
isDelete = True
session['is_delete'] = 'false'
tHead = [_('Role Name'), _('Creator'), _('App Name')]
data = SaasRole.query.order_by(db.asc(SaasRole.name)).all()
current_tenant_id = session['current_tenant_id']
current_tenant_name = Tenant.query.filter(Tenant.id == current_tenant_id).first().name
return render_template('tenant_service_role_setting.html', title=_('Role List'), form=form,
tableName=_('Edit Role'), app_name_list=get_app_name_list(), editTitle=_('Edit Role'),
current_selected_app_name=get_current_selected_app_name(),
isCheck=isCheck, isEdit=isEdit, current_tenant_name=current_tenant_name,
isDelete=isDelete, tHead=tHead, data=data)
# ---------------------------------------------------------------------------------------
# tenant service customize function
# ---------------------------------------------------------------------------------------
@bp.route('/tenant_service_user_setting')
@login_required
def tenant_service_user_setting():
isCheck = True
isEdit = True
isDelete = True
session['is_delete'] = 'false'
tHead = [_('User Name'), _('Belonged Role'), _('Creator'), _('App Name')]
data = SaasUser.query.order_by(db.asc(SaasUser.name)).all()
current_tenant_id = session['current_tenant_id']
current_tenant_name = Tenant.query.filter(Tenant.id == current_tenant_id).first().name
return render_template('tenant_service_user_setting.html', title=_('User List'),
tableName=_('User List'), app_name_list=get_app_name_list(),
current_selected_app_name=get_current_selected_app_name(),
current_tenant_name=current_tenant_name,
isCheck=isCheck, isEdit=isEdit, SaasRole=SaasRole,
isDelete=isDelete, tHead=tHead, data=data)
@bp.route('/tenant_service_user_setting_add', methods=['GET', 'POST'])
@login_required
def tenant_service_user_setting_add():
form = AddUserForm(None)
current_tenant_id = session['current_tenant_id']
current_tenant_name = Tenant.query.filter(Tenant.id == current_tenant_id).first().name
if form.validate_on_submit():
role_id = SaasRole.query.filter(SaasRole.name == form.role_list.data).first().id
db.session.add(SaasUser(id=None, name=form.user_name.data,
password=generate_password_hash(form.user_password.data),
role_id=role_id))
db.session.commit()
flash(_('New user have been added.'))
return redirect(url_for('main.tenant_service_user_setting'))
elif request.method == 'GET':
form.creator.data = current_tenant_name
form.app_name.data = get_current_selected_app_name()
pass
isCheck = True
isEdit = True
isDelete = True
session['is_delete'] = 'false'
tHead = [_('User Name'), _('Belonged Role'), _('Creator'), _('App Name')]
# flash(_('Batch delete operation are not allowed now.'))
return render_template('tenant_service_user_setting.html', title=_('User List'), form=form,
tableName=_('Add User'), app_name_list=get_app_name_list(), addTitle=_('Add User'),
current_selected_app_name=get_current_selected_app_name(),
isCheck=isCheck, isEdit=isEdit,
isDelete=isDelete, tHead=tHead)
@bp.route('/tenant_service_user_setting_delete/<id>', methods=['GET', 'POST'])
@login_required
def tenant_service_user_setting_delete(id):
if request.method == 'GET':
session['current_delete_id'] = id
else:
data = request.get_json()
name = data.get('name')
if name == 'execute':
current_data = SaasUser.query.filter(SaasUser.id == session['current_delete_id']).first()
db.session.delete(current_data)
db.session.commit()
flash(_('Record have been deleted.'))
return jsonify({'result': 'success'})
isCheck = True
isEdit = True
isDelete = True
session['is_delete'] = 'false'
tHead = [_('User Name'), _('Belonged Role'), _('Creator'), _('App Name')]
data = SaasUser.query.order_by(db.asc(SaasUser.name)).all()
current_tenant_id = session['current_tenant_id']
current_tenant_name = Tenant.query.filter(Tenant.id == current_tenant_id).first().name
confirmTitle = 'Confirm your choice:'
confirmMessage = 'Do you want to delete this record?'
return render_template('tenant_service_user_setting.html', title=_('User List'),
tableName=_('User List'), app_name_list=get_app_name_list(),
current_selected_app_name=get_current_selected_app_name(),
isCheck=isCheck, isEdit=isEdit, current_tenant_name=current_tenant_name,
isDelete=isDelete, tHead=tHead, data=data, SaasRole=SaasRole,
confirmTitle=confirmTitle, confirmMessage=confirmMessage)
@bp.route('/tenant_service_user_setting_delete_select', methods=['GET', 'POST'])
@login_required
def tenant_service_user_setting_delete_select():
flash(_('Batch delete operation are not allowed now.'))
return redirect(url_for('main.tenant_service_user_setting'))
@bp.route('/tenant_service_user_setting_edit/<id>', methods=['GET', 'POST'])
@login_required
def tenant_service_user_setting_edit(id):
current_tenant_id = session['current_tenant_id']
current_tenant_name = Tenant.query.filter(Tenant.id == current_tenant_id).first().name
if session.get('validate_user_name'):
form = AddUserForm(session['validate_user_name'])
else:
form = AddUserForm(None)
if form.validate_on_submit():
current_data = SaasUser.query.filter(SaasUser.id == id).first()
current_data.name = form.user_name.data
if not form.user_password.data.strip() == '':
current_data.password = generate_password_hash(form.user_password.data)
current_data.role_id = SaasRole.query.filter(SaasRole.name==form.role_list.data).first().id
db.session.commit()
flash(_('Role have been edited.'))
return redirect(url_for('main.tenant_service_user_setting'))
elif request.method == 'GET':
current_data = SaasUser.query.filter(SaasUser.id == id).first()
form.user_name.data = current_data.name
form.role_list.data = SaasRole.query.filter(SaasRole.id==current_data.role_id).first().name
form.creator.data = current_tenant_name
form.app_name.data = get_current_selected_app_name()
session['validate_user_name'] = form.user_name.data
isCheck = True
isEdit = True
isDelete = True
session['is_delete'] = 'false'
tHead = [_('User Name'), _('Belonged Role'), _('Creator'), _('App Name')]
data = SaasUser.query.order_by(db.asc(SaasUser.name)).all()
current_tenant_id = session['current_tenant_id']
current_tenant_name = Tenant.query.filter(Tenant.id == current_tenant_id).first().name
return render_template('tenant_service_role_setting.html', title=_('User List'), form=form,
tableName=_('User List'), app_name_list=get_app_name_list(), editTitle=_('Edit User'),
current_selected_app_name=get_current_selected_app_name(),
isCheck=isCheck, isEdit=isEdit, current_tenant_name=current_tenant_name,
isDelete=isDelete, tHead=tHead, data=data)
# ---------------------------------------------------------------------------------------
# remote api service
# ---------------------------------------------------------------------------------------
HTTPMETHOD = {
'GET': "GET",
'POST': "POST",
'PUT': "PUT",
'DELETE': "DELETE",
'PATCH': "PATCH",
'OPTIONS': "OPTIONS",
'HEAD': "HEAD",
'TRACE': "TRACE",
'CONNECT': "CONNECT",
}
ErrMsgs = {
'FAILED': "Failed;",
'NOTFOUND': "Not found;",
'SUCCESS': "Success;",
'UNEXPECTED': "Something unexpected happened;",
'UNAUTHORIZED': "You are not authorized to do that;",
}
class ResponseBaseStruct():
Success = True
Errmsg = ErrMsgs['SUCCESS']
class ResponseStruct(ResponseBaseStruct):
Data = {}
HasMore = False
Next = ''
def obj2json(obj):
return {
"Success": obj.Success,
"Errmsg": obj.Errmsg,
"Data": obj.Data,
"HasMore": obj.HasMore,
"Next": obj.Next
}
@bp.route('/funcData', methods=['GET', 'POST'])
def getFuncData():
# print(appID, tenantID, userName, accessToken)
form = request.form
appID = form['appID']
tenantID = form['tenantID']
data_json = None
dataFile = os.path.join(os.path.join(os.path.join(
current_app.config['UPLOAD_FOLDERS']['tenant_service_customize_function'],
appID), tenantID), 'version2function.json')
if os.path.isfile(dataFile):
data_json = json.load(open(dataFile, 'r', encoding='utf-8'))
rs = ResponseStruct()
rs.Success = True
rs.Errmsg = ErrMsgs['SUCCESS']
rs.Data = {}
rs.Data['ModTime'] = str(time.time())
rs.Data['Info'] = []
for v in data_json:
print(v)
rs.Data['Info'].append({'data': {}, 'id': 'role_' + v['id']})
print(rs.Data)
response = current_app.make_response((json.dumps(rs, default=obj2json), 200))
# response = current_app.make_response((data_json, '200', 'application/json'))
return response
@bp.route('/funcDataCheck', methods=['GET', 'POST'])
def funcDataCheck():
form = request.form
appID = form['appID']
tenantID = form['tenantID']
data_json = None
dataFile = os.path.join(os.path.join(os.path.join(
current_app.config['UPLOAD_FOLDERS']['tenant_service_customize_function'],
appID), tenantID), 'version2function.json')
if os.path.isfile(dataFile):
data_json = json.load(open(dataFile, 'r', encoding='utf-8'))
print(data_json)
response = current_app.make_response("success", 200)
return response
| 1 |
8b011e89ce886f13558ab292073393f5329edff0
|
Python
|
from RFEM.initModel import *
from RFEM.enums import *
class MemberSetLoad():
def __init__(self,
no: int = 1,
load_case_no: int = 1,
member_sets: str = '1',
load_direction = LoadDirectionType.LOAD_DIRECTION_LOCAL_Z,
magnitude: float = 0,
comment: str = '',
params: dict = {}):
"""
Args:
no (int): Load Tag
load_case_no (int): Assigned Load Case
member_sets (str): Assigned Member Sets
load_direction (enum): Load Case Enumeration
magnitude (float): Load Magnitude
comment (str, optional): Comments
params (dict, optional): Parameters
"""
# Client model | Member Load
clientObject = clientModel.factory.create('ns0:member_set_load')
# Clears object atributes | Sets all atributes to None
clearAtributes(clientObject)
# Member Load No.
clientObject.no = no
# Load Case No.
clientObject.load_case = load_case_no
# Member Sets No. (e.g. '5 6 7 12')
clientObject.member_sets = ConvertToDlString(member_sets)
# Member Load Type
load_type = MemberSetLoadType.LOAD_TYPE_FORCE
clientObject.load_type = load_type.name
# Member Load Distribution
load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_UNIFORM
clientObject.load_distribution = load_distribution.name
# Member Load Direction
clientObject.load_direction = load_direction.name
#Load Magnitude
clientObject.magnitude = magnitude
# Comment
clientObject.comment = comment
# Adding optional parameters via dictionary
for key in params:
clientObject[key] = params[key]
# Add Load Member Load to client model
clientModel.service.set_member_set_load(load_case_no, clientObject)
def Force(self,
no: int = 1,
load_case_no: int = 1,
member_sets: str = '1',
load_distribution= MemberSetLoadDistribution.LOAD_DISTRIBUTION_UNIFORM,
load_direction= MemberSetLoadDirection.LOAD_DIRECTION_LOCAL_Z,
load_parameter = [],
force_eccentricity: bool= False,
comment: str = '',
params: dict = {}):
"""
Args:
no (int): Load Tag
load_case_no (int): Assigned Load Case
member_sets (str): Assigned Member Sets
load_distribution (enum): Load Distribution Enumeration
load_direction (enum): Load Direction Enumeration
load_parameter (list): Load Parameter
force_eccentricity (bool): Force Eccentricity Option
comment (str, optional): Comments
params (dict, optional): Parameters
for LOAD_DISTRIBUTION_UNIFORM:
load_parameter = [magnitude]
for LOAD_DISTRIBUTION_UNIFORM_TOTAL:
load_parameter = [magnitude]
for LOAD_DISTRIBUTION_CONCENTRATED_1:
load_parameter = [relative_distance = False, magnitude, distance_a]
for LOAD_DISTRIBUTION_CONCENTRATED_N:
load_parameter = [relative_distance_a = False, relative_distance_b = False, magnitude, count_n, distance_a, distance_b]
for LOAD_DISTRIBUTION_CONCENTRATED_2x2:
load_parameter = [relative_distance_a = False, relative_distance_b = False, relative_distance_c = False, magnitude, distance_a, distance_b, distance_c]
for LOAD_DISTRIBUTION_CONCENTRATED_2:
load_parameter = [relative_distance_a = False, relative_distance_b = False, magnitude_1, magnitude_2, distance_a, distance_b]
for LOAD_DISTRIBUTION_CONCENTRATED_VARYING:
load_parameter = [[distance, delta_distance, magnitude], ...]
for LOAD_DISTRIBUTION_TRAPEZOIDAL:
load_parameter = [relative_distance_a = False, relative_distance_b = False,magnitude_1, magnitude_2, distance_a, distance_b]
for LOAD_DISTRIBUTION_TAPERED:
load_parameter = [relative_distance_a = False, relative_distance_b = False,magnitude_1, magnitude_2, distance_a, distance_b]
for LOAD_DISTRIBUTION_PARABOLIC:
load_parameter = [magnitude_1, magnitude_2, magnitude_3]
for LOAD_DISTRIBUTION_VARYING:
load_parameter = [[distance, delta_distance, magnitude], ...]
for LOAD_DISTRIBUTION_VARYING_IN_Z:
load_parameter = [[distance, delta_distance, magnitude], ...]
params:
{'eccentricity_horizontal_alignment': MemberSetLoadEccentricityHorizontalAlignment.ALIGN_NONE,
'eccentricity_vertical_alignment': MemberSetLoadEccentricityVerticalAlignment.ALIGN_NONE,
'eccentricity_section_middle': MemberSetLoadEccentricitySectionMiddle.LOAD_ECCENTRICITY_SECTION_MIDDLE_CENTER_OF_GRAVITY,
'is_eccentricity_at_end_different_from_start': False,
'eccentricity_y_at_end': 0.0,
'eccentricity_y_at_start': 0.0,
'eccentricity_z_at_end': 0.0,
'eccentricity_z_at_start': 0.0}
"""
# Client model | Member Load
clientObject = clientModel.factory.create('ns0:member_set_load')
# Clears object atributes | Sets all atributes to None
clearAtributes(clientObject)
# Member Load No.
clientObject.no = no
# Load Case No.
clientObject.load_case = load_case_no
# Members No. (e.g. '5 6 7 12')
clientObject.member_sets = ConvertToDlString(member_sets)
# Member Load Type
load_type = MemberSetLoadType.LOAD_TYPE_FORCE
clientObject.load_type = load_type.name
# Member Load Distribution
clientObject.load_distribution= load_distribution.name
#Load Magnitude and Parameters
if load_parameter == []:
raise Exception("WARNING: Load parameter cannot be empty. Kindly check list inputs completeness and correctness.")
else:
if load_distribution.name == "LOAD_DISTRIBUTION_UNIFORM" or load_distribution.name == "LOAD_DISTRIBUTION_UNIFORM_TOTAL":
if len(load_parameter) == 1:
clientObject.magnitude = load_parameter[0]
else:
raise Exception("WARNING: Load parameter array length should be 1 for LOAD_DISTRIBUTION_UNIFORM. Kindly check list inputs completeness and correctness.")
elif load_distribution.name == "LOAD_DISTRIBUTION_CONCENTRATED_1":
if len(load_parameter) == 3:
clientObject.distance_a_is_defined_as_relative = load_parameter[0]
if load_parameter[0] == False:
clientObject.magnitude = load_parameter[1]
clientObject.distance_a_absolute = load_parameter[2]
else:
clientObject.magnitude = load_parameter[1]
clientObject.distance_a_relative = load_parameter[2]
else:
raise Exception("WARNING: Load parameter array length should be 3 for LOAD_DISTRIBUTION_CONCENTRATED_1. Kindly check list inputs completeness and correctness.")
elif load_distribution.name == "LOAD_DISTRIBUTION_CONCENTRATED_N":
if len(load_parameter) == 6:
clientObject.distance_a_is_defined_as_relative = load_parameter[0]
clientObject.distance_b_is_defined_as_relative = load_parameter[1]
clientObject.magnitude = load_parameter[2]
clientObject.count_n = load_parameter[3]
if load_parameter[0] == False:
clientObject.distance_a_absolute = load_parameter[4]
else:
clientObject.distance_a_relative = load_parameter[4]
if load_parameter[1] == False:
clientObject.distance_b_absolute = load_parameter[5]
else:
clientObject.distance_b_relative = load_parameter[5]
else:
raise Exception("WARNING: Load parameter array length should be 6 for LOAD_DISTRIBUTION_CONCENTRATED_N. Kindly check list inputs completeness and correctness.")
elif load_distribution.name == "LOAD_DISTRIBUTION_CONCENTRATED_2x2":
if len(load_parameter) == 7:
clientObject.distance_a_is_defined_as_relative = load_parameter[0]
clientObject.distance_b_is_defined_as_relative = load_parameter[1]
clientObject.distance_c_is_defined_as_relative = load_parameter[2]
clientObject.magnitude = load_parameter[3]
if load_parameter[0] == False:
clientObject.distance_a_absolute = load_parameter[4]
else:
clientObject.distance_a_relative = load_parameter[4]
if load_parameter[1] == False:
clientObject.distance_b_absolute = load_parameter[5]
else:
clientObject.distance_b_relative = load_parameter[5]
if load_parameter[2] == False:
clientObject.distance_c_absolute = load_parameter[6]
else:
clientObject.distance_c_relative = load_parameter[6]
else:
raise Exception("WARNING: Load parameter array length should be 7 for LOAD_DISTRIBUTION_CONCENTRATED_N. Kindly check list inputs completeness and correctness.")
elif load_distribution.name == "LOAD_DISTRIBUTION_CONCENTRATED_2":
if len(load_parameter) == 6:
clientObject.distance_a_is_defined_as_relative = load_parameter[0]
clientObject.distance_b_is_defined_as_relative = load_parameter[1]
clientObject.magnitude_1 = load_parameter[2]
clientObject.magnitude_2 = load_parameter[3]
if load_parameter[0] == False:
clientObject.distance_a_absolute = load_parameter[4]
else:
clientObject.distance_a_relative = load_parameter[4]
if load_parameter[1] == False:
clientObject.distance_b_absolute = load_parameter[5]
else:
clientObject.distance_b_relative = load_parameter[5]
else:
raise Exception("WARNING: Load parameter array length should be 6 for LOAD_DISTRIBUTION_CONCENTRATED_2. Kindly check list inputs completeness and correctness.")
elif load_distribution.name == "LOAD_DISTRIBUTION_CONCENTRATED_VARYING":
try:
len(load_parameter[0])==3
except:
print("WARNING: MemberSetLoad no: %x, load case: %x - Wrong data input." % (no, load_case_no))
clientObject.varying_load_parameters = clientModel.factory.create('ns0:member_set_load.varying_load_parameters')
for i in range(len(load_parameter)):
mlvlp = clientModel.factory.create('ns0:member_set_load_varying_load_parameters')
mlvlp.no = i+1
mlvlp.distance = load_parameter[i][0]
mlvlp.delta_distance = load_parameter[i][1]
mlvlp.magnitude = load_parameter[i][2]
mlvlp.note = None
mlvlp.magnitude_t_c = 0.0
mlvlp.magnitude_delta_t = 0.0
mlvlp.magnitude_t_t = 0.0
mlvlp.magnitude_t_b = 0.0
clientObject.varying_load_parameters.member_set_load_varying_load_parameters.append(mlvlp)
elif load_distribution.name == "LOAD_DISTRIBUTION_TRAPEZOIDAL":
if len(load_parameter) == 6:
clientObject.distance_a_is_defined_as_relative = load_parameter[0]
clientObject.distance_b_is_defined_as_relative = load_parameter[1]
clientObject.magnitude_1 = load_parameter[2]
clientObject.magnitude_2 = load_parameter[3]
if load_parameter[0] == False:
clientObject.distance_a_absolute = load_parameter[4]
else:
clientObject.distance_a_relative = load_parameter[4]
if load_parameter[1] == False:
clientObject.distance_b_absolute = load_parameter[5]
else:
clientObject.distance_b_relative = load_parameter[5]
else:
raise Exception("WARNING: Load parameter array length should be 6 for LOAD_DISTRIBUTION_TRAPEZOIDAL. Kindly check list inputs completeness and correctness.")
elif load_distribution.name == "LOAD_DISTRIBUTION_TAPERED":
if len(load_parameter)==6:
clientObject.distance_a_is_defined_as_relative = load_parameter[0]
clientObject.distance_b_is_defined_as_relative = load_parameter[1]
clientObject.magnitude_1 = load_parameter[2]
clientObject.magnitude_2 = load_parameter[3]
if load_parameter[0] == False:
clientObject.distance_a_absolute = load_parameter[4]
else:
clientObject.distance_a_relative = load_parameter[4]
if load_parameter[1] == False:
clientObject.distance_b_absolute = load_parameter[5]
else:
clientObject.distance_b_relative = load_parameter[5]
else:
raise Exception("WARNING: Load parameter array length should be 6 for LOAD_DISTRIBUTION_TAPERED. Kindly check list inputs completeness and correctness.")
elif load_distribution.name == "LOAD_DISTRIBUTION_PARABOLIC":
if len(load_parameter)==3:
clientObject.magnitude_1 = load_parameter[0]
clientObject.magnitude_2 = load_parameter[1]
clientObject.magnitude_3 = load_parameter[2]
else:
raise Exception("WARNING: Load parameter array length should be 3 for LOAD_DISTRIBUTION_PARABOLIC. Kindly check list inputs completeness and correctness.")
elif load_distribution.name == "LOAD_DISTRIBUTION_VARYING":
try:
len(load_parameter[0])==3
except:
print("WARNING: MemberSetLoad no: %x, load case: %x - Wrong data input." % (no, load_case_no))
clientObject.varying_load_parameters = clientModel.factory.create('ns0:member_set_load.varying_load_parameters')
for i in range(len(load_parameter)):
mlvlp = clientModel.factory.create('ns0:member_set_load_varying_load_parameters')
mlvlp.no = i+1
mlvlp.distance = load_parameter[i][0]
mlvlp.delta_distance = load_parameter[i][1]
mlvlp.magnitude = load_parameter[i][2]
mlvlp.note = None
mlvlp.magnitude_t_c = 0.0
mlvlp.magnitude_delta_t = 0.0
mlvlp.magnitude_t_t = 0.0
mlvlp.magnitude_t_b = 0.0
clientObject.varying_load_parameters.member_set_load_varying_load_parameters.append(mlvlp)
elif load_distribution.name == "LOAD_DISTRIBUTION_VARYING_IN_Z":
try:
len(load_parameter[0])==3
except:
print("WARNING: MemberSetLoad no: %x, load case: %x - Wrong data input." % (no, load_case_no))
clientObject.varying_load_parameters = clientModel.factory.create('ns0:member_set_load.varying_load_parameters')
for i in range(len(load_parameter)):
mlvlp = clientModel.factory.create('ns0:member_set_load_varying_load_parameters')
mlvlp.no = i+1
mlvlp.distance = load_parameter[i][0]
mlvlp.delta_distance = load_parameter[i][1]
mlvlp.magnitude = load_parameter[i][2]
mlvlp.note = None
mlvlp.magnitude_t_c = 0.0
mlvlp.magnitude_delta_t = 0.0
mlvlp.magnitude_t_t = 0.0
mlvlp.magnitude_t_b = 0.0
clientObject.varying_load_parameters.member_set_load_varying_load_parameters.append(mlvlp)
# Member Load Direction
clientObject.load_direction = load_direction.name
#Force Eccentiricity
clientObject.has_force_eccentricity = force_eccentricity
if force_eccentricity == True:
if 'eccentricity_horizontal_alignment' and 'eccentricity_vertical_alignment' and 'eccentricity_section_middle' \
'is_eccentricity_at_end_different_from_start' and 'eccentricity_y_at_end' and 'eccentricity_y_at_start' \
'eccentricity_z_at_end' and 'eccentricity_z_at_start' in params:
pass
else:
raise Exception("WARNING: Params does not contain all the necessary parameters. Kindly check dictionary")
params_ecc = {'eccentricity_horizontal_alignment': MemberSetLoadEccentricityHorizontalAlignment.ALIGN_NONE,
'eccentricity_vertical_alignment': MemberSetLoadEccentricityVerticalAlignment.ALIGN_NONE,
'eccentricity_section_middle': MemberSetLoadEccentricitySectionMiddle.LOAD_ECCENTRICITY_SECTION_MIDDLE_CENTER_OF_GRAVITY,
'is_eccentricity_at_end_different_from_start': False,
'eccentricity_y_at_end': 0.0,
'eccentricity_y_at_start': 0.0,
'eccentricity_z_at_end': 0.0,
'eccentricity_z_at_start': 0.0}
params_ecc.update(params)
if params_ecc['is_eccentricity_at_end_different_from_start'] == False:
clientObject.eccentricity_horizontal_alignment= params_ecc['eccentricity_horizontal_alignment'].name
clientObject.eccentricity_vertical_alignment= params_ecc['eccentricity_vertical_alignment'].name
clientObject.eccentricity_section_middle = params_ecc['eccentricity_section_middle'].name
clientObject.eccentricity_y_at_end= params_ecc['eccentricity_y_at_start']
clientObject.eccentricity_y_at_start= params_ecc['eccentricity_y_at_start']
clientObject.eccentricity_z_at_end= params_ecc['eccentricity_z_at_start']
clientObject.eccentricity_z_at_start= params_ecc['eccentricity_z_at_start']
elif params_ecc['is_eccentricity_at_end_different_from_start'] == True:
clientObject.eccentricity_horizontal_alignment= params_ecc['eccentricity_horizontal_alignment']
clientObject.eccentricity_vertical_alignment= params_ecc['eccentricity_vertical_alignment']
clientObject.eccentricity_section_middle = params_ecc['eccentricity_section_middle']
clientObject.eccentricity_y_at_end= params_ecc['eccentricity_y_at_end']
clientObject.eccentricity_y_at_start= params_ecc['eccentricity_y_at_start']
clientObject.eccentricity_z_at_end= params_ecc['eccentricity_z_at_end']
clientObject.eccentricity_z_at_start= params_ecc['eccentricity_z_at_start']
# Comment
clientObject.comment = comment
# Adding optional parameters via dictionary
if 'eccentricity_horizontal_alignment' or 'eccentricity_vertical_alignment' or 'eccentricity_section_middle' or 'is_eccentricity_at_end_different_from_start' or 'eccentricity_y_at_end' or 'eccentricity_y_at_start' or 'eccentricity_z_at_end' or 'eccentricity_z_at_start':
pass
else:
for key in params:
clientObject[key] = params[key]
# Add Load Member Load to client model
clientModel.service.set_member_set_load(load_case_no, clientObject)
def Moment(self,
no: int = 1,
load_case_no: int = 1,
member_sets: str = '1',
load_distribution= MemberSetLoadDistribution.LOAD_DISTRIBUTION_UNIFORM,
load_direction= MemberSetLoadDirection.LOAD_DIRECTION_LOCAL_Z,
load_parameter = [],
comment: str = '',
params: dict = {}):
"""
Args:
no (int): Load Tag
load_case_no (int): Assigned Load Case
member_sets (str): Assigned Member Sets
load_distribution (enum): Load Distribution Enumeration
load_direction (enum): Load Direction Enumeration
load_parameter (list): Load Parameters
comment (str, optional): Comments
params (dict, optional): Parameters
for LOAD_DISTRIBUTION_UNIFORM:
load_parameter = magnitude
for LOAD_DISTRIBUTION_CONCENTRATED_1:
load_parameter = [relative_distance = False, magnitude, distance_a]
for LOAD_DISTRIBUTION_CONCENTRATED_N:
load_parameter = [relative_distance_a = False, relative_distance_b = False, magnitude, count_n, distance_a, distance_b]
for LOAD_DISTRIBUTION_CONCENTRATED_2x2:
load_parameter = [relative_distance_a = False, relative_distance_b = False, relative_distance_c = False, magnitude, distance_a, distance_b, distance_c]
for LOAD_DISTRIBUTION_CONCENTRATED_2:
load_parameter = [relative_distance_a = False, relative_distance_b = False, magnitude_1, magnitude_2, distance_a, distance_b]
for LOAD_DISTRIBUTION_CONCENTRATED_VARYING:
load_parameter = [[distance, delta_distance, magnitude], ...]
for LOAD_DISTRIBUTION_TRAPEZOIDAL:
load_parameter = [relative_distance_a = False, relative_distance_b = False,magnitude_1, magnitude_2, distance_a, distance_b]
for LOAD_DISTRIBUTION_TAPERED:
load_parameter = [relative_distance_a = False, relative_distance_b = False,magnitude_1, magnitude_2, distance_a, distance_b]
for LOAD_DISTRIBUTION_PARABOLIC:
load_parameter = [magnitude_1, magnitude_2, magnitude_3]
for LOAD_DISTRIBUTION_VARYING:
load_parameter = [[distance, delta_distance, magnitude], ...]
"""
# Client model | Member Load
clientObject = clientModel.factory.create('ns0:member_set_load')
# Clears object atributes | Sets all atributes to None
clearAtributes(clientObject)
# Member Load No.
clientObject.no = no
# Load Case No.
clientObject.load_case = load_case_no
# Members No. (e.g. '5 6 7 12')
clientObject.member_sets = ConvertToDlString(member_sets)
# Member Load Type
load_type = MemberSetLoadType.LOAD_TYPE_MOMENT
clientObject.load_type = load_type.name
# Member Load Distribution
clientObject.load_distribution= load_distribution.name
#Load Magnitude and Parameters
if load_distribution.name == "LOAD_DISTRIBUTION_UNIFORM":
try:
len(load_parameter)==1
except:
raise Exception("WARNING: Load parameter array length should be 1 for LOAD_DISTRIBUTION_UNIFORM. Kindly check list inputs completeness and correctness.")
clientObject.magnitude = load_parameter[0]
elif load_distribution.name == "LOAD_DISTRIBUTION_CONCENTRATED_1":
try:
len(load_parameter)==3
except:
raise Exception("WARNING: Load parameter array length should be 3 for LOAD_DISTRIBUTION_CONCENTRATED_1. Kindly check list inputs completeness and correctness.")
clientObject.distance_a_is_defined_as_relative = load_parameter[0]
if load_parameter[0] == False:
clientObject.magnitude = load_parameter[1]
clientObject.distance_a_absolute = load_parameter[2]
else:
clientObject.magnitude = load_parameter[1]
clientObject.distance_a_relative = load_parameter[2]
elif load_distribution.name == "LOAD_DISTRIBUTION_CONCENTRATED_N":
try:
len(load_parameter)==6
except:
raise Exception("WARNING: Load parameter array length should be 6 for LOAD_DISTRIBUTION_CONCENTRATED_N. Kindly check list inputs completeness and correctness.")
clientObject.distance_a_is_defined_as_relative = load_parameter[0]
clientObject.distance_b_is_defined_as_relative = load_parameter[1]
clientObject.magnitude = load_parameter[2]
clientObject.count_n = load_parameter[3]
if load_parameter[0] == False:
clientObject.distance_a_absolute = load_parameter[4]
else:
clientObject.distance_a_relative = load_parameter[4]
if load_parameter[1] == False:
clientObject.distance_b_absolute = load_parameter[5]
else:
clientObject.distance_b_relative = load_parameter[5]
elif load_distribution.name == "LOAD_DISTRIBUTION_CONCENTRATED_2x2":
try:
len(load_parameter)==7
except:
raise Exception("WARNING: Load parameter array length should be 7 for LOAD_DISTRIBUTION_CONCENTRATED_2x2. Kindly check list inputs completeness and correctness.")
clientObject.distance_a_is_defined_as_relative = load_parameter[0]
clientObject.distance_b_is_defined_as_relative = load_parameter[1]
clientObject.distance_c_is_defined_as_relative = load_parameter[2]
clientObject.magnitude = load_parameter[3]
if load_parameter[0] == False:
clientObject.distance_a_absolute = load_parameter[4]
else:
clientObject.distance_a_relative = load_parameter[4]
if load_parameter[1] == False:
clientObject.distance_b_absolute = load_parameter[5]
else:
clientObject.distance_b_relative = load_parameter[5]
if load_parameter[2] == False:
clientObject.distance_c_absolute = load_parameter[6]
else:
clientObject.distance_c_relative = load_parameter[6]
elif load_distribution.name == "LOAD_DISTRIBUTION_CONCENTRATED_2":
try:
len(load_parameter)==6
except:
raise Exception("WARNING: Load parameter array length should be 6 for LOAD_DISTRIBUTION_CONCENTRATED_2. Kindly check list inputs completeness and correctness.")
clientObject.distance_a_is_defined_as_relative = load_parameter[0]
clientObject.distance_b_is_defined_as_relative = load_parameter[1]
clientObject.magnitude_1 = load_parameter[2]
clientObject.magnitude_2 = load_parameter[3]
if load_parameter[0] == False:
clientObject.distance_a_absolute = load_parameter[4]
else:
clientObject.distance_a_relative = load_parameter[4]
if load_parameter[1] == False:
clientObject.distance_b_absolute = load_parameter[5]
else:
clientObject.distance_b_relative = load_parameter[5]
elif load_distribution.name == "LOAD_DISTRIBUTION_CONCENTRATED_VARYING":
try:
len(load_parameter[0])==3
except:
print("WARNING: MemberSetLoad no: %x, load case: %x - Wrong data input." % (no, load_case_no))
clientObject.varying_load_parameters = clientModel.factory.create('ns0:member_set_load.varying_load_parameters')
for i in range(len(load_parameter)):
mlvlp = clientModel.factory.create('ns0:member_set_load_varying_load_parameters')
mlvlp.no = i+1
mlvlp.distance = load_parameter[i][0]
mlvlp.delta_distance = load_parameter[i][1]
mlvlp.magnitude = load_parameter[i][2]
mlvlp.note = None
mlvlp.magnitude_t_c = 0.0
mlvlp.magnitude_delta_t = 0.0
mlvlp.magnitude_t_t = 0.0
mlvlp.magnitude_t_b = 0.0
clientObject.varying_load_parameters.member_set_load_varying_load_parameters.append(mlvlp)
elif load_distribution.name == "LOAD_DISTRIBUTION_TRAPEZOIDAL":
try:
len(load_parameter)==6
except:
raise Exception("WARNING: Load parameter array length should be 6 for LOAD_DISTRIBUTION_TRAPEZOIDAL. Kindly check list inputs completeness and correctness.")
clientObject.distance_a_is_defined_as_relative = load_parameter[0]
clientObject.distance_b_is_defined_as_relative = load_parameter[1]
clientObject.magnitude_1 = load_parameter[2]
clientObject.magnitude_2 = load_parameter[3]
if load_parameter[0] == False:
clientObject.distance_a_absolute = load_parameter[4]
else:
clientObject.distance_a_relative = load_parameter[4]
if load_parameter[1] == False:
clientObject.distance_b_absolute = load_parameter[5]
else:
clientObject.distance_b_relative = load_parameter[5]
elif load_distribution.name == "LOAD_DISTRIBUTION_TAPERED":
try:
len(load_parameter)==4
except:
raise Exception("WARNING: Load parameter array length should be 4 for LOAD_DISTRIBUTION_TAPERED. Kindly check list inputs completeness and correctness.")
clientObject.distance_a_is_defined_as_relative = load_parameter[0]
clientObject.distance_b_is_defined_as_relative = load_parameter[1]
clientObject.magnitude_1 = load_parameter[2]
clientObject.magnitude_2 = load_parameter[3]
if load_parameter[0] == False:
clientObject.distance_a_absolute = load_parameter[4]
else:
clientObject.distance_a_relative = load_parameter[4]
if load_parameter[1] == False:
clientObject.distance_b_absolute = load_parameter[5]
else:
clientObject.distance_b_relative = load_parameter[5]
elif load_distribution.name == "LOAD_DISTRIBUTION_PARABOLIC":
try:
len(load_parameter)==3
except:
raise Exception("WARNING: Load parameter array length should be 3 for LOAD_DISTRIBUTION_PARABOLIC. Kindly check list inputs completeness and correctness.")
clientObject.magnitude_1 = load_parameter[0]
clientObject.magnitude_2 = load_parameter[1]
clientObject.magnitude_3 = load_parameter[2]
elif load_distribution.name == "LOAD_DISTRIBUTION_VARYING":
try:
len(load_parameter[0])==3
except:
print("WARNING: MemberSetLoad no: %x, load case: %x - Wrong data input." % (no, load_case_no))
clientObject.varying_load_parameters = clientModel.factory.create('ns0:member_set_load.varying_load_parameters')
for i in range(len(load_parameter)):
mlvlp = clientModel.factory.create('ns0:member_set_load_varying_load_parameters')
mlvlp.no = i+1
mlvlp.distance = load_parameter[i][0]
mlvlp.delta_distance = load_parameter[i][1]
mlvlp.magnitude = load_parameter[i][2]
mlvlp.note = None
mlvlp.magnitude_t_c = 0.0
mlvlp.magnitude_delta_t = 0.0
mlvlp.magnitude_t_t = 0.0
mlvlp.magnitude_t_b = 0.0
clientObject.varying_load_parameters.member_set_load_varying_load_parameters.append(mlvlp)
# Member Load Direction
clientObject.load_direction = load_direction.name
# Comment
clientObject.comment = comment
# Adding optional parameters via dictionary
for key in params:
clientObject[key] = params[key]
# Add Load Member Load to client model
clientModel.service.set_member_set_load(load_case_no, clientObject)
def Mass(self,
no: int = 1,
load_case_no: int = 1,
member_sets: str = '1',
individual_mass_components: bool=False,
mass_components = [],
comment: str = '',
params: dict = {}):
"""
Args:
no (int): Load Tag
load_case_no (int): Assigned Load Case
member_sets (str): Assigned Member Sets
individual_mass_components (bool): Individiual Mass Components Option
mass_components (list): Mass Components
comment (str, optional): Comment
params (dict, optional): Parameters
"""
# Client model | Member Load
clientObject = clientModel.factory.create('ns0:member_set_load')
# Clears object atributes | Sets all atributes to None
clearAtributes(clientObject)
# Member Load No.
clientObject.no = no
# Load Case No.
clientObject.load_case = load_case_no
# Members No. (e.g. '5 6 7 12')
clientObject.member_sets = ConvertToDlString(member_sets)
# Member Load Type
clientObject.load_type = MemberSetLoadType.E_TYPE_MASS.name
# Member Load Distribution
clientObject.load_distribution= MemberSetLoadDistribution.LOAD_DISTRIBUTION_UNIFORM.name
# Individual Mass Components
if type(individual_mass_components) == bool:
pass
else:
raise Exception("WARNING: Type of individual mass components should be bool. Kindly check inputs correctness.")
clientObject.individual_mass_components = individual_mass_components
# Mass magnitude
if individual_mass_components == False:
clientObject.mass_global = mass_components[0]
else:
clientObject.mass_x = mass_components[0]
clientObject.mass_y = mass_components[1]
clientObject.mass_z = mass_components[2]
# Comment
clientObject.comment = comment
# Adding optional parameters via dictionary
for key in params:
clientObject[key] = params[key]
# Add Load Member Load to client model
clientModel.service.set_member_set_load(load_case_no, clientObject)
def Temperature(self,
no: int = 1,
load_case_no: int = 1,
member_sets: str = '1',
load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_UNIFORM,
load_direction = MemberSetLoadDirection.LOAD_DIRECTION_LOCAL_Z,
load_parameter = [],
load_over_total_length: bool= False,
comment: str = '',
params: dict = {}):
"""
Args:
no (int): Load Tag
load_case_no (int): Assigned Load Case
member_sets (str): Assigned Member Sets
load_distribution (enum): Load Distribution Enumeration
load_direction (enum): Load Direction Enumeration
load_parameter (list): Load Parameters
load_over_total_length (bool): Load Over Total Length Option
comment (str, optional): Comment
params (dict, optional): Parameters
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_UNIFORM:
load_parameter = [tt, tb]
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_TRAPEZIODAL:
for load_over_total_length: bool= False:
load_parameter = [tt1, tt2, tb1, tb2, distance_a_relative = False, distance_a_relative = False, a_distance, b_distance]
for load_over_total_length: bool= True:
load_parameter = [tt1, tt2, tb1, tb2]
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_TAPERED:
load_parameter = [tt1, tt2, tb1, tb2, distance_a_relative = False, distance_a_relative = False, a_distance, b_distance]
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_PARABOLIC:
load_parameter = [tb1, tb2, tb3, tt1, tt2, tt3]
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_VARYING:
load_parameter = [[distance, delta_distance, magnitude], ...]
"""
# Client model | Member Load
clientObject = clientModel.factory.create('ns0:member_set_load')
# Clears object atributes | Sets all atributes to None
clearAtributes(clientObject)
# Member Load No.
clientObject.no = no
# Load Case No.
clientObject.load_case = load_case_no
# Members No. (e.g. '5 6 7 12')
clientObject.member_sets = ConvertToDlString(member_sets)
# Member Load Type
load_type = MemberSetLoadType.LOAD_TYPE_TEMPERATURE
clientObject.load_type = load_type.name
# Member Load Distribution
clientObject.load_distribution = load_distribution.name
# Member Load Direction
clientObject.load_direction = load_direction.name
#Load Magnitude
if load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_UNIFORM:
try:
len(load_parameter)==2
except:
raise Exception("WARNING: Load parameter array length should be 2 for LOAD_DISTRIBUTION_UNIFORM. Kindly check list inputs completeness and correctness.")
clientObject.magnitude_t_b = load_parameter[0]
clientObject.magnitude_t_t = load_parameter[1]
elif load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_TRAPEZOIDAL:
try:
len(load_parameter)==8
except:
raise Exception("WARNING: Load parameter array length should be 8 for LOAD_DISTRIBUTION_TRAPEZOIDAL. Kindly check list inputs completeness and correctness.")
clientObject.magnitude_t_b_1 = load_parameter[0]
clientObject.magnitude_t_b_2 = load_parameter[1]
clientObject.magnitude_t_t_1 = load_parameter[2]
clientObject.magnitude_t_t_2 = load_parameter[3]
if type(load_over_total_length) == bool:
pass
else:
raise Exception("WARNING: Type of load over total length should be bool. Kindly check inputs correctness.")
if load_over_total_length == False:
if load_parameter[4] == True:
clientObject.distance_a_is_defined_as_relative = True
clientObject.distance_a_relative = load_parameter[6]
else:
clientObject.distance_a_is_defined_as_relative = False
clientObject.distance_a_absolute = load_parameter[6]
if load_parameter[5] == True:
clientObject.distance_b_is_defined_as_relative = True
clientObject.distance_b_relative = load_parameter[7]
else:
clientObject.distance_b_is_defined_as_relative = False
clientObject.distance_b_absolute = load_parameter[7]
else:
clientObject.load_is_over_total_length = True
elif load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_TAPERED:
try:
len(load_parameter)==8
except:
raise Exception("WARNING: Load parameter array length should be 8 for LOAD_DISTRIBUTION_TAPERED. Kindly check list inputs completeness and correctness.")
clientObject.magnitude_t_b_1 = load_parameter[0]
clientObject.magnitude_t_b_2 = load_parameter[1]
clientObject.magnitude_t_t_1 = load_parameter[2]
clientObject.magnitude_t_t_2 = load_parameter[3]
if type(load_parameter[4]) == bool:
pass
else:
raise Exception("WARNING: Type of the fourth load parameter should be bool. Kindly check inputs correctness.")
if load_parameter[4] == True:
clientObject.distance_a_is_defined_as_relative = True
clientObject.distance_a_relative = load_parameter[6]
else:
clientObject.distance_a_is_defined_as_relative = False
clientObject.distance_a_absolute = load_parameter[6]
if type(load_parameter[5]) == bool:
pass
else:
raise Exception("WARNING: Type of the fifth load parameter should be bool. Kindly check inputs correctness.")
if load_parameter[5] == True:
clientObject.distance_b_is_defined_as_relative = True
clientObject.distance_b_relative = load_parameter[7]
else:
clientObject.distance_b_is_defined_as_relative = False
clientObject.distance_b_absolute = load_parameter[7]
elif load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_PARABOLIC:
try:
len(load_parameter)==6
except:
raise Exception("WARNING: Load parameter array length should be 6 for LOAD_DISTRIBUTION_PARABOLIC. Kindly check list inputs completeness and correctness.")
clientObject.magnitude_t_b_1 = load_parameter[0]
clientObject.magnitude_t_b_2 = load_parameter[1]
clientObject.magnitude_t_b_3 = load_parameter[2]
clientObject.magnitude_t_t_1 = load_parameter[3]
clientObject.magnitude_t_t_2 = load_parameter[4]
clientObject.magnitude_t_t_3 = load_parameter[5]
elif load_distribution.name == "LOAD_DISTRIBUTION_VARYING":
try:
len(load_parameter[0])==4
except:
print("WARNING: MemberSetLoad no: %x, load case: %x - Wrong data input." % (no, load_case_no))
clientObject.varying_load_parameters = clientModel.factory.create('ns0:member_set_load.varying_load_parameters')
for i in range(len(load_parameter)):
mlvlp = clientModel.factory.create('ns0:member_set_load_varying_load_parameters')
mlvlp.no = i+1
mlvlp.distance = load_parameter[i][0]
mlvlp.delta_distance = load_parameter[i][1]
mlvlp.magnitude = load_parameter[i][2]
mlvlp.note = None
mlvlp.magnitude_t_c = load_parameter[i][2]
mlvlp.magnitude_delta_t = load_parameter[i][3]
mlvlp.magnitude_t_t = load_parameter[i][2]
mlvlp.magnitude_t_b = load_parameter[i][3]
clientObject.varying_load_parameters.member_set_load_varying_load_parameters.append(mlvlp)
# Comment
clientObject.comment = comment
# Adding optional parameters via dictionary
for key in params:
clientObject[key] = params[key]
# Add Load Member Load to client model
clientModel.service.set_member_set_load(load_case_no, clientObject)
def TemperatureChange(self,
no: int = 1,
load_case_no: int = 1,
member_sets: str = '1',
load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_UNIFORM,
load_direction = MemberSetLoadDirection.LOAD_DIRECTION_LOCAL_Z,
load_parameter = [],
load_over_total_length: bool= False,
comment: str = '',
params: dict = {}):
"""
Args:
no (int): Load Tag
load_case_no (int): Assigned Load Case
member_sets (str): Assigned Member Sets
load_distribution (enum): Load Distribution Enumeration
load_direction (enum): Load Direction Enumeration
load_parameter (list): Load Parameters
load_over_total_length (bool): Load Over Total Length Option
comment (str, optional): Comment
params (dict, optional): Parameters
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_UNIFORM:
load_parameter = [tc, delta_t]
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_TRAPEZIODAL:
for load_over_total_length: bool= False:
load_parameter = [delta_t_1, delta_t_2, t_c_1, t_c_2, distance_a_relative = False, distance_a_relative = False, a_distance, b_distance]
for load_over_total_length: bool= True:
load_parameter = [delta_t_1, delta_t_2, t_c_1, t_c_2]
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_TAPERED:
load_parameter = [delta_t_1, delta_t_2, t_c_1, t_c_2, distance_a_relative = False, distance_a_relative = False, a_distance, b_distance]
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_PARABOLIC:
load_parameter = [delta_t_1, delta_t_2, delta_t_3, t_c_1, t_c_2, t_c_3]
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_VARYING:
load_parameter = [[distance, delta_distance, magnitude], ...]
"""
# Client model | Member Load
clientObject = clientModel.factory.create('ns0:member_set_load')
# Clears object atributes | Sets all atributes to None
clearAtributes(clientObject)
# Member Load No.
clientObject.no = no
# Load Case No.
clientObject.load_case = load_case_no
# Members No. (e.g. '5 6 7 12')
clientObject.member_sets = ConvertToDlString(member_sets)
# Member Load Type
load_type = MemberSetLoadType.LOAD_TYPE_TEMPERATURE_CHANGE
clientObject.load_type = load_type.name
# Member Load Distribution
clientObject.load_distribution = load_distribution.name
# Member Load Direction
clientObject.load_direction = load_direction.name
#Load Magnitude
if load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_UNIFORM:
try:
len(load_parameter)==2
except:
raise Exception("WARNING: Load parameter array length should be 2 for LOAD_DISTRIBUTION_UNIFORM. Kindly check list inputs completeness and correctness.")
clientObject.magnitude_delta_t = load_parameter[0]
clientObject.magnitude_t_c = load_parameter[1]
elif load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_TRAPEZOIDAL:
try:
len(load_parameter)==8
except:
raise Exception("WARNING: Load parameter array length should be 8 for LOAD_DISTRIBUTION_TRAPEZOIDAL. Kindly check list inputs completeness and correctness.")
clientObject.magnitude_delta_t_1 = load_parameter[0]
clientObject.magnitude_delta_t_2 = load_parameter[1]
clientObject.magnitude_t_c_1 = load_parameter[2]
clientObject.magnitude_t_c_2 = load_parameter[3]
if type(load_over_total_length) == bool:
pass
else:
raise Exception("WARNING: Type of the load over total length should be bool. Kindly check inputs correctness.")
if load_over_total_length == False:
if load_parameter[4] == True:
clientObject.distance_a_is_defined_as_relative = True
clientObject.distance_a_relative = load_parameter[6]
else:
clientObject.distance_a_is_defined_as_relative = False
clientObject.distance_a_absolute = load_parameter[6]
if load_parameter[5] == True:
clientObject.distance_b_is_defined_as_relative = True
clientObject.distance_b_relative = load_parameter[7]
else:
clientObject.distance_b_is_defined_as_relative = False
clientObject.distance_b_absolute = load_parameter[7]
else:
clientObject.load_is_over_total_length = True
elif load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_TAPERED:
try:
len(load_parameter)==8
except:
raise Exception("WARNING: Load parameter array length should be 8 for LOAD_DISTRIBUTION_TAPERED. Kindly check list inputs completeness and correctness.")
clientObject.magnitude_delta_t_1 = load_parameter[0]
clientObject.magnitude_delta_t_2 = load_parameter[1]
clientObject.magnitude_t_c_1 = load_parameter[2]
clientObject.magnitude_t_c_2 = load_parameter[3]
if load_parameter[4] == True:
clientObject.distance_a_is_defined_as_relative = True
clientObject.distance_a_relative = load_parameter[6]
else:
clientObject.distance_a_is_defined_as_relative = False
clientObject.distance_a_absolute = load_parameter[6]
if load_parameter[5] == True:
clientObject.distance_b_is_defined_as_relative = True
clientObject.distance_b_relative = load_parameter[7]
else:
clientObject.distance_b_is_defined_as_relative = False
clientObject.distance_b_absolute = load_parameter[7]
elif load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_PARABOLIC:
try:
len(load_parameter)==6
except:
raise Exception("WARNING: Load parameter array length should be 6 for LOAD_DISTRIBUTION_PARABOLIC. Kindly check list inputs completeness and correctness.")
clientObject.magnitude_delta_t_1 = load_parameter[0]
clientObject.magnitude_delta_t_2 = load_parameter[1]
clientObject.magnitude_delta_t_3 = load_parameter[2]
clientObject.magnitude_t_c_1 = load_parameter[3]
clientObject.magnitude_t_c_2 = load_parameter[4]
clientObject.magnitude_t_c_3 = load_parameter[5]
elif load_distribution.name == "LOAD_DISTRIBUTION_VARYING":
try:
len(load_parameter[0])==4
except:
print("WARNING: MemberSetLoad no: %x, load case: %x - Wrong data input." % (no, load_case_no))
clientObject.varying_load_parameters = clientModel.factory.create('ns0:member_set_load.varying_load_parameters')
for i in range(len(load_parameter)):
mlvlp = clientModel.factory.create('ns0:member_set_load_varying_load_parameters')
mlvlp.no = i+1
mlvlp.distance = load_parameter[i][0]
mlvlp.delta_distance = load_parameter[i][1]
mlvlp.magnitude = load_parameter[i][2]
mlvlp.note = None
mlvlp.magnitude_t_c = load_parameter[i][2]
mlvlp.magnitude_delta_t = load_parameter[i][3]
mlvlp.magnitude_t_t = load_parameter[i][2]
mlvlp.magnitude_t_b = load_parameter[i][3]
clientObject.varying_load_parameters.member_set_load_varying_load_parameters.append(mlvlp)
# Comment
clientObject.comment = comment
# Adding optional parameters via dictionary
for key in params:
clientObject[key] = params[key]
# Add Load Member Load to client model
clientModel.service.set_member_set_load(load_case_no, clientObject)
def AxialStrain(self,
no: int = 1,
load_case_no: int = 1,
member_sets: str = '1',
load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_UNIFORM,
load_direction = MemberSetLoadDirection.LOAD_DIRECTION_LOCAL_X,
load_parameter = [],
load_over_total_length: bool= False,
comment: str = '',
params: dict = {}):
"""
Args:
no (int): Load Tag
load_case_no (int): Assigned Load Case
member_sets (str): Assigned Member Sets
load_distribution (enum): Load Distribution Enumeration
load_direction (enum): Load Direction Enumeration
load_parameter (list): Load Parameters
load_over_total_length (bool): Load Over Total Length Option
comment (str, optional): Comment
params (dict, optional): Parameters
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_UNIFORM:
load_parameter = [epsilon]
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_TRAPEZIODAL:
load_parameter = [epsilon1, epsilon2, distance_a_relative = False, distance_a_relative = False, a_distance, b_distance]
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_TAPERED:
load_parameter = [epsilon1, epsilon2, distance_a_relative = False, distance_a_relative = False, a_distance, b_distance]
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_TAPERED:
load_parameter = [epsilon1, epsilon2, epsilon3]
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_VARYING:
load_parameter = [[distance, delta_distance, magnitude], ...]
"""
# Client model | Member Load
clientObject = clientModel.factory.create('ns0:member_set_load')
# Clears object atributes | Sets all atributes to None
clearAtributes(clientObject)
# Member Load No.
clientObject.no = no
# Load Case No.
clientObject.load_case = load_case_no
# Members No. (e.g. '5 6 7 12')
clientObject.member_sets = ConvertToDlString(member_sets)
# Member Load Type
load_type = MemberSetLoadType.LOAD_TYPE_AXIAL_STRAIN
clientObject.load_type = load_type.name
# Member Load Distribution
clientObject.load_distribution = load_distribution.name
# Member Load Direction
clientObject.load_direction = load_direction.name
#Load Magnitude
if load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_UNIFORM:
try:
len(load_parameter)==1
except:
raise Exception("WARNING: Load parameter array length should be 1 for LOAD_DISTRIBUTION_UNIFORM. Kindly check list inputs completeness and correctness.")
clientObject.magnitude = load_parameter[0]
elif load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_TRAPEZOIDAL:
try:
len(load_parameter)==6
except:
raise Exception("WARNING: Load parameter array length should be 6 for LOAD_DISTRIBUTION_TRAPEZOIDAL. Kindly check list inputs completeness and correctness.")
clientObject.magnitude_1 = load_parameter[0]
clientObject.magnitude_2 = load_parameter[1]
if type(load_over_total_length) == bool:
pass
else:
raise Exception("WARNING: Type of the load over total length should be bool. Kindly check inputs correctness.")
if load_over_total_length == False:
if load_parameter[2] == True:
clientObject.distance_a_is_defined_as_relative = True
clientObject.distance_a_relative = load_parameter[4]
else:
clientObject.distance_a_is_defined_as_relative = False
clientObject.distance_a_absolute = load_parameter[4]
if load_parameter[3] == True:
clientObject.distance_b_is_defined_as_relative = True
clientObject
| 0 |
8b011e89ce886f13558ab292073393f5329edff0
|
Python
|
.distance_b_relative = load_parameter[5]
else:
clientObject.distance_b_is_defined_as_relative = False
clientObject.distance_b_absolute = load_parameter[5]
else:
clientObject.load_is_over_total_length = True
elif load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_TAPERED:
try:
len(load_parameter)==6
except:
raise Exception("WARNING: Load parameter array length should be 6 for LOAD_DISTRIBUTION_TAPERED. Kindly check list inputs completeness and correctness.")
clientObject.magnitude_1 = load_parameter[0]
clientObject.magnitude_2 = load_parameter[1]
if load_parameter[2] == True:
clientObject.distance_a_is_defined_as_relative = True
clientObject.distance_a_relative = load_parameter[4]
else:
clientObject.distance_a_is_defined_as_relative = False
clientObject.distance_a_absolute = load_parameter[4]
if load_parameter[3] == True:
clientObject.distance_b_is_defined_as_relative = True
clientObject.distance_b_relative = load_parameter[5]
else:
clientObject.distance_b_is_defined_as_relative = False
clientObject.distance_b_absolute = load_parameter[5]
elif load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_PARABOLIC:
try:
len(load_parameter)==3
except:
raise Exception("WARNING: Load parameter array length should be 3 for LOAD_DISTRIBUTION_PARABOLIC. Kindly check list inputs completeness and correctness.")
clientObject.magnitude_1 = load_parameter[0]
clientObject.magnitude_2 = load_parameter[1]
clientObject.magnitude_3 = load_parameter[2]
elif load_distribution.name == "LOAD_DISTRIBUTION_VARYING":
try:
len(load_parameter[0])==3
except:
print("WARNING: MemberSetLoad no: %x, load case: %x - Wrong data input." % (no, load_case_no))
clientObject.varying_load_parameters = clientModel.factory.create('ns0:member_set_load.varying_load_parameters')
for i in range(len(load_parameter)):
mlvlp = clientModel.factory.create('ns0:member_set_load_varying_load_parameters')
mlvlp.no = i+1
mlvlp.distance = load_parameter[i][0]
mlvlp.delta_distance = load_parameter[i][1]
mlvlp.magnitude = load_parameter[i][2]
mlvlp.note = None
mlvlp.magnitude_t_c = 0.0
mlvlp.magnitude_delta_t = 0.0
mlvlp.magnitude_t_t = 0.0
mlvlp.magnitude_t_b = 0.0
clientObject.varying_load_parameters.member_set_load_varying_load_parameters.append(mlvlp)
# Comment
clientObject.comment = comment
# Adding optional parameters via dictionary
for key in params:
clientObject[key] = params[key]
# Add Load Member Load to client model
clientModel.service.set_member_set_load(load_case_no, clientObject)
def AxialDisplacement(self,
no: int = 1,
load_case_no: int = 1,
member_sets: str = '1',
load_direction = MemberSetLoadDirection.LOAD_DIRECTION_LOCAL_X,
magnitude : float = 0.0,
comment: str = '',
params: dict = {}):
"""
Args:
no (int): Load Tag
load_case_no (int): Assigned Load Case
member_sets (str): Assigned Member Set
load_direction (enum): Load Direction Enumeration
magnitude (float): Load Magnitude
comment (str, optional): Comments
params (dict, optional): Parameters
"""
# Client model | Member Load
clientObject = clientModel.factory.create('ns0:member_set_load')
# Clears object atributes | Sets all atributes to None
clearAtributes(clientObject)
# Member Load No.
clientObject.no = no
# Load Case No.
clientObject.load_case = load_case_no
# Members No. (e.g. '5 6 7 12')
clientObject.member_sets = ConvertToDlString(member_sets)
# Member Load Type
load_type = MemberSetLoadType.LOAD_TYPE_AXIAL_DISPLACEMENT
clientObject.load_type = load_type.name
# Member Load Distribution
clientObject.load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_UNIFORM.name
# Member Load Direction
clientObject.load_direction = load_direction.name
#Load Magnitude
clientObject.magnitude = magnitude
# Comment
clientObject.comment = comment
# Adding optional parameters via dictionary
for key in params:
clientObject[key] = params[key]
# Add Load Member Load to client model
clientModel.service.set_member_set_load(load_case_no, clientObject)
def Precamber(self,
no: int = 1,
load_case_no: int = 1,
member_sets: str = '1',
load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_UNIFORM,
load_direction = MemberSetLoadDirection.LOAD_DIRECTION_LOCAL_Z,
load_parameter = [],
load_over_total_length: bool= False,
comment: str = '',
params: dict = {}):
"""
Args:
no (int): Load Tag
load_case_no (int): Assigned Load Case
member_sets (str): Assigned Member Sets
load_distribution (enum): Load Distribution Enumeration
load_direction (enum): Load Direction Enumeration
load_parameter (enum): Load Parameters
load_over_total_length (bool): Load Over Total Lenth Option
comment (str, optional): Comment
params (dict, optional): Parameters
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_UNIFORM:
load_parameter = [magnitude]
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_TRAPEZIODAL:
load_parameter = [magnitude_1, magnitude_2, distance_a_relative = False, distance_a_relative = False, a_distance, b_distance]
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_TAPERED:
load_parameter = [magnitude_1, magnitude_2, distance_a_relative = False, distance_a_relative = False, a_distance, b_distance]
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_PARABOLIC:
load_parameter = [magnitude_1, magnitude_2, magnitude_3]
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_VARYING:
load_parameter = [[distance, delta_distance, magnitude], ...]
"""
# Client model | Member Load
clientObject = clientModel.factory.create('ns0:member_set_load')
# Clears object atributes | Sets all atributes to None
clearAtributes(clientObject)
# Member Load No.
clientObject.no = no
# Load Case No.
clientObject.load_case = load_case_no
# Members No. (e.g. '5 6 7 12')
clientObject.member_sets = ConvertToDlString(member_sets)
# Member Load Type
load_type = MemberSetLoadType.LOAD_TYPE_PRECAMBER
clientObject.load_type = load_type.name
# Member Load Distribution
clientObject.load_distribution = load_distribution.name
# Member Load Direction
clientObject.load_direction = load_direction.name
#Load Magnitude
if load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_UNIFORM:
try:
len(load_parameter)==1
except:
raise Exception("WARNING: Load parameter array length should be 1 for LOAD_DISTRIBUTION_UNIFORM. Kindly check list inputs completeness and correctness.")
clientObject.magnitude = load_parameter[0]
elif load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_TRAPEZOIDAL:
try:
len(load_parameter)==6
except:
raise Exception("WARNING: Load parameter array length should be 6 for LOAD_DISTRIBUTION_TRAPEZOIDAL. Kindly check list inputs completeness and correctness.")
clientObject.magnitude_1 = load_parameter[0]
clientObject.magnitude_2 = load_parameter[1]
if type(load_over_total_length) == bool:
pass
else:
raise Exception("WARNING: Type of the load over total length should be bool. Kindly check inputs correctness.")
if load_over_total_length == False:
if load_parameter[2] == True:
clientObject.distance_a_is_defined_as_relative = True
clientObject.distance_a_relative = load_parameter[4]
else:
clientObject.distance_a_is_defined_as_relative = False
clientObject.distance_a_absolute = load_parameter[4]
if load_parameter[3] == True:
clientObject.distance_b_is_defined_as_relative = True
clientObject.distance_b_relative = load_parameter[5]
else:
clientObject.distance_b_is_defined_as_relative = False
clientObject.distance_b_absolute = load_parameter[5]
else:
clientObject.load_is_over_total_length = True
elif load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_TAPERED:
try:
len(load_parameter)==6
except:
raise Exception("WARNING: Load parameter array length should be 6 for LOAD_DISTRIBUTION_TAPERED. Kindly check list inputs completeness and correctness.")
clientObject.magnitude_1 = load_parameter[0]
clientObject.magnitude_2 = load_parameter[1]
if load_parameter[2] == True:
clientObject.distance_a_is_defined_as_relative = True
clientObject.distance_a_relative = load_parameter[4]
else:
clientObject.distance_a_is_defined_as_relative = False
clientObject.distance_a_absolute = load_parameter[4]
if load_parameter[3] == True:
clientObject.distance_b_is_defined_as_relative = True
clientObject.distance_b_relative = load_parameter[5]
else:
clientObject.distance_b_is_defined_as_relative = False
clientObject.distance_b_absolute = load_parameter[5]
elif load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_PARABOLIC:
try:
len(load_parameter)==3
except:
raise Exception("WARNING: Load parameter array length should be 3 for LOAD_DISTRIBUTION_PARABOLIC. Kindly check list inputs completeness and correctness.")
clientObject.magnitude_1 = load_parameter[0]
clientObject.magnitude_2 = load_parameter[1]
clientObject.magnitude_3 = load_parameter[2]
elif load_distribution.name == "LOAD_DISTRIBUTION_VARYING":
try:
len(load_parameter[0])==3
except:
print("WARNING: MemberSetLoad no: %x, load case: %x - Wrong data input." % (no, load_case_no))
clientObject.varying_load_parameters = clientModel.factory.create('ns0:member_set_load.varying_load_parameters')
for i in range(len(load_parameter)):
mlvlp = clientModel.factory.create('ns0:member_set_load_varying_load_parameters')
mlvlp.no = i+1
mlvlp.distance = load_parameter[i][0]
mlvlp.delta_distance = load_parameter[i][1]
mlvlp.magnitude = load_parameter[i][2]
mlvlp.note = None
mlvlp.magnitude_t_c = 0.0
mlvlp.magnitude_delta_t = 0.0
mlvlp.magnitude_t_t = 0.0
mlvlp.magnitude_t_b = 0.0
clientObject.varying_load_parameters.member_set_load_varying_load_parameters.append(mlvlp)
# Comment
clientObject.comment = comment
# Adding optional parameters via dictionary
for key in params:
clientObject[key] = params[key]
# Add Load Member Load to client model
clientModel.service.set_member_set_load(load_case_no, clientObject)
def InitialPrestress(self,
no: int = 1,
load_case_no: int = 1,
member_sets: str = '1',
load_direction = MemberSetLoadDirection.LOAD_DIRECTION_LOCAL_X,
magnitude : float = 0.0,
comment: str = '',
params: dict = {}):
"""
Args:
no (int): Load Tag
load_case_no (int): Assigned Load Case
member_sets (str): Assigned Member Sets
load_direction (enum): Load Direction Enumeration
magnitude (float): Load Magnitude
comment (str, optional): Comment
params (dict, optional): Parameters
"""
# Client model | Member Load
clientObject = clientModel.factory.create('ns0:member_set_load')
# Clears object atributes | Sets all atributes to None
clearAtributes(clientObject)
# Member Load No.
clientObject.no = no
# Load Case No.
clientObject.load_case = load_case_no
# Members No. (e.g. '5 6 7 12')
clientObject.member_sets = ConvertToDlString(member_sets)
# Member Load Type
load_type = MemberSetLoadType.LOAD_TYPE_INITIAL_PRESTRESS
clientObject.load_type = load_type.name
# Member Load Distribution
clientObject.load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_UNIFORM.name
# Member Load Direction
clientObject.load_direction = load_direction.name
#Load Magnitude
clientObject.magnitude = magnitude
# Comment
clientObject.comment = comment
# Adding optional parameters via dictionary
for key in params:
clientObject[key] = params[key]
# Add Load Member Load to client model
clientModel.service.set_member_set_load(load_case_no, clientObject)
def Displacement(self,
no: int = 1,
load_case_no: int = 1,
member_sets: str = '1',
load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_UNIFORM,
load_direction = MemberSetLoadDirection.LOAD_DIRECTION_LOCAL_Z,
load_parameter = [],
load_over_total_length: bool= False,
comment: str = '',
params: dict = {}):
"""
Args:
no (int): Load Tag
load_case_no (int): Assigned Load Case
member_sets (str): Assigned Member Sets
load_distribution (enum): Load Distribution Enumeration
load_direction (enum): Load Direction Enumeration
load_parameter (list): Load Parameters
load_over_total_length (bool): Load Over Total Length Option
comment (str, optional): Comment
params (dict, optional): Parameters
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_UNIFORM:
load_parameter = [magnitude]
for load_distrubition = MemberSetLoadDistribution.LOAD_DISTRIBUTION_CONCENTRATED_1:
load_parameter = [magnitude, distance_a_is_defined_as_relative = False, distance_a]
for load_distrubition = MemberSetLoadDistribution.LOAD_DISTRIBUTION_CONCENTRATED_N:
load_parameter = [magnitude, distance_a_is_defined_as_relative = False, distance_b_is_defined_as_relative = False, distance_a, distance_b]
for load_distrubition = MemberSetLoadDistribution.LOAD_DISTRIBUTION_CONCENTRATED_2x2:
load_parameter = [magnitude, distance_a_is_defined_as_relative = False, distance_b_is_defined_as_relative = False, distance_c_is_defined_as_relative = False, distance_a, distance_b, distance_c]
for load_distrubition = MemberSetLoadDistribution.LOAD_DISTRIBUTION_CONCENTRATED_2:
load_parameter = [magnitude_1, magnitude_2, distance_a_is_defined_as_relative = False, distance_b_is_defined_as_relative = False, distance_a, distance_b]
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_CONCENTRATED_VARYING:
load_parameter = [[distance, delta_distance, magnitude], ...]
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_TRAPEZIODAL:
load_parameter = [magnitude_1, magnitude_2, distance_a_relative = False, distance_a_relative = False, a_distance, b_distance]
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_TAPERED:
load_parameter = [magnitude_1, magnitude_2, distance_a_relative = False, distance_a_relative = False, a_distance, b_distance]
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_PARABOLIC:
load_parameter = [magnitude_1, magnitude_2, magnitude_3]
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_VARYING:
load_parameter = [[distance, delta_distance, magnitude], ...]
"""
# Client model | Member Load
clientObject = clientModel.factory.create('ns0:member_set_load')
# Clears object atributes | Sets all atributes to None
clearAtributes(clientObject)
# Member Load No.
clientObject.no = no
# Load Case No.
clientObject.load_case = load_case_no
# Members No. (e.g. '5 6 7 12')
clientObject.member_sets = ConvertToDlString(member_sets)
# Member Load Type
load_type = MemberSetLoadType.LOAD_TYPE_DISPLACEMENT
clientObject.load_type = load_type.name
# Member Load Distribution
clientObject.load_distribution = load_distribution.name
# Member Load Direction
clientObject.load_direction = load_direction.name
#Load Magnitude
if load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_UNIFORM:
try:
len(load_parameter)==1
except:
raise Exception("WARNING: Load parameter array length should be 1 for LOAD_DISTRIBUTION_UNIFORM. Kindly check list inputs completeness and correctness.")
clientObject.magnitude = load_parameter[0]
elif load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_CONCENTRATED_1:
try:
len(load_parameter)==3
except:
raise Exception("WARNING: Load parameter array length should be 3 for LOAD_DISTRIBUTION_CONCENTRATED_1. Kindly check list inputs completeness and correctness.")
clientObject.magnitude = load_parameter[0]
clientObject.distance_a_is_defined_as_relative = load_parameter[1]
if load_parameter[1]:
clientObject.distance_a_relative = load_parameter[2]
else:
clientObject.distance_a_absolute = load_parameter[2]
elif load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_CONCENTRATED_N:
try:
len(load_parameter)==5
except:
raise Exception("WARNING: Load parameter array length should be 5 for LOAD_DISTRIBUTION_CONCENTRATED_N. Kindly check list inputs completeness and correctness.")
clientObject.magnitude = load_parameter[0]
clientObject.distance_a_is_defined_as_relative = load_parameter[1]
clientObject.distance_b_is_defined_as_relative = load_parameter[2]
if load_parameter[1]:
clientObject.distance_a_relative = load_parameter[3]
else:
clientObject.distance_a_absolute = load_parameter[3]
if load_parameter[2]:
clientObject.distance_b_relative = load_parameter[4]
else:
clientObject.distance_b_absolute = load_parameter[4]
elif load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_CONCENTRATED_2x2:
try:
len(load_parameter)==7
except:
raise Exception("WARNING: Load parameter array length should be 7 for LOAD_DISTRIBUTION_CONCENTRATED_2x2. Kindly check list inputs completeness and correctness.")
clientObject.magnitude = load_parameter[0]
clientObject.distance_a_is_defined_as_relative = load_parameter[1]
clientObject.distance_b_is_defined_as_relative = load_parameter[2]
clientObject.distance_c_is_defined_as_relative = load_parameter[3]
if load_parameter[1]:
clientObject.distance_a_relative = load_parameter[4]
else:
clientObject.distance_a_absolute = load_parameter[4]
if load_parameter[2]:
clientObject.distance_b_relative = load_parameter[5]
else:
clientObject.distance_b_absolute = load_parameter[5]
if load_parameter[3]:
clientObject.distance_c_relative = load_parameter[6]
else:
clientObject.distance_c_absolute = load_parameter[6]
elif load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_CONCENTRATED_2:
try:
len(load_parameter)==6
except:
raise Exception("WARNING: Load parameter array length should be 6 for LOAD_DISTRIBUTION_CONCENTRATED_2. Kindly check list inputs completeness and correctness.")
clientObject.magnitude_1 = load_parameter[0]
clientObject.magnitude_2 = load_parameter[1]
clientObject.distance_a_is_defined_as_relative = load_parameter[2]
clientObject.distance_b_is_defined_as_relative = load_parameter[3]
if load_parameter[2]:
clientObject.distance_a_relative = load_parameter[4]
else:
clientObject.distance_a_absolute = load_parameter[4]
if load_parameter[3]:
clientObject.distance_b_relative = load_parameter[5]
else:
clientObject.distance_b_absolute = load_parameter[5]
elif load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_CONCENTRATED_VARYING:
try:
len(load_parameter[0])==3
except:
print("WARNING: MemberSetLoad no: %x, load case: %x - Wrong data input." % (no, load_case_no))
clientObject.varying_load_parameters = clientModel.factory.create('ns0:member_set_load.varying_load_parameters')
for i in range(len(load_parameter)):
mlvlp = clientModel.factory.create('ns0:member_set_load_varying_load_parameters')
mlvlp.no = i+1
mlvlp.distance = load_parameter[i][0]
mlvlp.delta_distance = load_parameter[i][1]
mlvlp.magnitude = load_parameter[i][2]
mlvlp.note = None
mlvlp.magnitude_t_c = 0.0
mlvlp.magnitude_delta_t = 0.0
mlvlp.magnitude_t_t = 0.0
mlvlp.magnitude_t_b = 0.0
clientObject.varying_load_parameters.member_set_load_varying_load_parameters.append(mlvlp)
elif load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_TRAPEZOIDAL:
try:
len(load_parameter)==6
except:
raise Exception("WARNING: Load parameter array length should be 6 for LOAD_DISTRIBUTION_TRAPEZOIDAL. Kindly check list inputs completeness and correctness.")
clientObject.magnitude_1 = load_parameter[0]
clientObject.magnitude_2 = load_parameter[1]
if type(load_over_total_length) == bool:
pass
else:
raise Exception("WARNING: Type of the load over total length should be bool. Kindly check inputs correctness.")
if load_over_total_length == False:
clientObject.distance_a_is_defined_as_relative = load_parameter[2]
clientObject.distance_b_is_defined_as_relative = load_parameter[3]
if load_parameter[2]:
clientObject.distance_a_relative = load_parameter[4]
else:
clientObject.distance_a_absolute = load_parameter[4]
if load_parameter[3]:
clientObject.distance_b_relative = load_parameter[5]
else:
clientObject.distance_b_absolute = load_parameter[5]
else:
clientObject.load_is_over_total_length = True
elif load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_TAPERED:
try:
len(load_parameter)==6
except:
raise Exception("WARNING: Load parameter array length should be 6 for LOAD_DISTRIBUTION_TAPERED. Kindly check list inputs completeness and correctness.")
clientObject.magnitude_1 = load_parameter[0]
clientObject.magnitude_2 = load_parameter[1]
clientObject.distance_a_is_defined_as_relative = load_parameter[2]
clientObject.distance_b_is_defined_as_relative = load_parameter[3]
if load_parameter[2]:
clientObject.distance_a_relative = load_parameter[4]
else:
clientObject.distance_a_absolute = load_parameter[4]
if load_parameter[3]:
clientObject.distance_b_relative = load_parameter[5]
else:
clientObject.distance_b_absolute = load_parameter[5]
elif load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_PARABOLIC:
try:
len(load_parameter)==3
except:
raise Exception("WARNING: Load parameter array length should be 6 for LOAD_DISTRIBUTION_PARABOLIC. Kindly check list inputs completeness and correctness.")
clientObject.magnitude_1 = load_parameter[0]
clientObject.magnitude_2 = load_parameter[1]
clientObject.magnitude_3 = load_parameter[2]
elif load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_VARYING:
try:
len(load_parameter[0])==3
except:
print("WARNING: MemberSetLoad no: %x, load case: %x - Wrong data input." % (no, load_case_no))
clientObject.varying_load_parameters = clientModel.factory.create('ns0:member_set_load.varying_load_parameters')
for i in range(len(load_parameter)):
mlvlp = clientModel.factory.create('ns0:member_set_load_varying_load_parameters')
mlvlp.no = i+1
mlvlp.distance = load_parameter[i][0]
mlvlp.delta_distance = load_parameter[i][1]
mlvlp.magnitude = load_parameter[i][2]
mlvlp.note = None
mlvlp.magnitude_t_c = 0.0
mlvlp.magnitude_delta_t = 0.0
mlvlp.magnitude_t_t = 0.0
mlvlp.magnitude_t_b = 0.0
clientObject.varying_load_parameters.member_set_load_varying_load_parameters.append(mlvlp)
# Comment
clientObject.comment = comment
# Adding optional parameters via dictionary
for key in params:
clientObject[key] = params[key]
# Add Load Member Load to client model
clientModel.service.set_member_set_load(load_case_no, clientObject)
def Rotation(self,
no: int = 1,
load_case_no: int = 1,
member_sets: str = '1',
load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_UNIFORM,
load_direction = MemberSetLoadDirection.LOAD_DIRECTION_LOCAL_Z,
load_parameter = [],
load_over_total_length: bool= False,
comment: str = '',
params: dict = {}):
"""
Args:
no (int): Load Tag
load_case_no (int): Assigned Load Case
member_sets (str): Assigned Member Sets
load_distribution (enum): Load Distribution Enumeration
load_direction (enum): Load Direction Enumeration
load_parameter (list): Load Parameters
load_over_total_length (bool): Load Over Total Length
comment (str, optional): Comment
params (dict, optional): Parameters
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_UNIFORM:
load_parameter = [magnitude]
for load_distrubition = MemberSetLoadDistribution.LOAD_DISTRIBUTION_CONCENTRATED_1:
load_parameter = [magnitude, distance_a_is_defined_as_relative = False, distance_a]
for load_distrubition = MemberSetLoadDistribution.LOAD_DISTRIBUTION_CONCENTRATED_N:
load_parameter = [magnitude, distance_a_is_defined_as_relative = False, distance_b_is_defined_as_relative = False, distance_a, distance_b]
for load_distrubition = MemberSetLoadDistribution.LOAD_DISTRIBUTION_CONCENTRATED_2x2:
load_parameter = [magnitude, distance_a_is_defined_as_relative = False, distance_b_is_defined_as_relative = False, distance_c_is_defined_as_relative = False, distance_a, distance_b, distance_c]
for load_distrubition = MemberSetLoadDistribution.LOAD_DISTRIBUTION_CONCENTRATED_2:
load_parameter = [magnitude_1, magnitude_2, distance_a_is_defined_as_relative = False, distance_b_is_defined_as_relative = False, distance_a, distance_b]
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_CONCENTRATED_VARYING:
load_parameter = [[distance, delta_distance, magnitude], ...]
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_TRAPEZIODAL:
load_parameter = [magnitude_1, magnitude_2, distance_a_relative = False, distance_a_relative = False, a_distance, b_distance]
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_TAPERED:
load_parameter = [magnitude_1, magnitude_2, distance_a_relative = False, distance_a_relative = False, a_distance, b_distance]
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_PARABOLIC:
load_parameter = [magnitude_1, magnitude_2, magnitude_3]
for load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_VARYING:
load_parameter = [[distance, delta_distance, magnitude], ...]
"""
# Client model | Member Load
clientObject = clientModel.factory.create('ns0:member_set_load')
# Clears object atributes | Sets all atributes to None
clearAtributes(clientObject)
# Member Load No.
clientObject.no = no
# Load Case No.
clientObject.load_case = load_case_no
# Members No. (e.g. '5 6 7 12')
clientObject.member_sets = ConvertToDlString(member_sets)
# Member Load Type
load_type = MemberSetLoadType.LOAD_TYPE_ROTATION
clientObject.load_type = load_type.name
# Member Load Distribution
clientObject.load_distribution = load_distribution.name
# Member Load Direction
clientObject.load_direction = load_direction.name
#Load Magnitude
if load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_UNIFORM:
try:
len(load_parameter)==1
except:
raise Exception("WARNING: Load parameter array length should be 1 for LOAD_DISTRIBUTION_UNIFORM. Kindly check list inputs completeness and correctness.")
clientObject.magnitude = load_parameter[0]
elif load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_CONCENTRATED_1:
try:
len(load_parameter) == 3
except:
raise Exception("WARNING: Load parameter array length should be 3 for LOAD_DISTRIBUTION_CONCENTRATED_1. Kindly check list inputs completeness and correctness.")
clientObject.magnitude = load_parameter[0]
clientObject.distance_a_is_defined_as_relative = load_parameter[1]
if load_parameter[1]:
clientObject.distance_a_relative = load_parameter[2]
else:
clientObject.distance_a_absolute = load_parameter[2]
elif load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_CONCENTRATED_N:
try:
len(load_parameter) == 5
except:
raise Exception("WARNING: Load parameter array length should be 5 for LOAD_DISTRIBUTION_CONCENTRATED_N. Kindly check list inputs completeness and correctness.")
clientObject.magnitude = load_parameter[0]
clientObject.distance_a_is_defined_as_relative = load_parameter[1]
clientObject.distance_b_is_defined_as_relative = load_parameter[2]
if load_parameter[1]:
clientObject.distance_a_relative = load_parameter[3]
else:
clientObject.distance_a_absolute = load_parameter[3]
if load_parameter[2]:
clientObject.distance_b_relative = load_parameter[4]
else:
clientObject.distance_b_absolute = load_parameter[4]
elif load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_CONCENTRATED_2x2:
try:
len(load_parameter) == 7
except:
raise Exception("WARNING: Load parameter array length should be 7 for LOAD_DISTRIBUTION_CONCENTRATED_2x2. Kindly check list inputs completeness and correctness.")
clientObject.magnitude = load_parameter[0]
clientObject.distance_a_is_defined_as_relative = load_parameter[1]
clientObject.distance_b_is_defined_as_relative = load_parameter[2]
clientObject.distance_c_is_defined_as_relative = load_parameter[3]
if load_parameter[1]:
clientObject.distance_a_relative = load_parameter[4]
else:
clientObject.distance_a_absolute = load_parameter[4]
if load_parameter[2]:
clientObject.distance_b_relative = load_parameter[5]
else:
clientObject.distance_b_absolute = load_parameter[5]
if load_parameter[3]:
clientObject.distance_c_relative = load_parameter[6]
else:
clientObject.distance_c_absolute = load_parameter[6]
elif load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_CONCENTRATED_2:
try:
len(load_parameter) == 6
except:
raise Exception("WARNING: Load parameter array length should be 6 for LOAD_DISTRIBUTION_CONCENTRATED_2. Kindly check list inputs completeness and correctness.")
clientObject.magnitude_1 = load_parameter[0]
clientObject.magnitude_2 = load_parameter[1]
clientObject.distance_a_is_defined_as_relative = load_parameter[2]
clientObject.distance_b_is_defined_as_relative = load_parameter[3]
if load_parameter[2]:
clientObject.distance_a_relative = load_parameter[4]
else:
clientObject.distance_a_absolute = load_parameter[4]
if load_parameter[3]:
clientObject.distance_b_relative = load_parameter[5]
else:
clientObject.distance_b_absolute = load_parameter[5]
elif load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_CONCENTRATED_VARYING:
try:
len(load_parameter[0])==3
except:
print("WARNING: MemberSetLoad no: %x, load case: %x - Wrong data input." % (no, load_case_no))
clientObject.varying_load_parameters = clientModel.factory.create('ns0:member_set_load.varying_load_parameters')
for i in range(len(load_parameter)):
mlvlp = clientModel.factory.create('ns0:member_set_load_varying_load_parameters')
mlvlp.no = i+1
mlvlp.distance = load_parameter[i][0]
mlvlp.delta_distance = load_parameter[i][1]
mlvlp.magnitude = load_parameter[i][2]
mlvlp.note = None
mlvlp.magnitude_t_c = 0.0
mlvlp.magnitude_delta_t = 0.0
mlvlp.magnitude_t_t = 0.0
mlvlp.magnitude_t_b = 0.0
clientObject.varying_load_parameters.member_set_load_varying_load_parameters.append(mlvlp)
elif load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_TRAPEZOIDAL:
try:
len(load_parameter) == 6
except:
raise Exception("WARNING: Load parameter array length should be 6 for LOAD_DISTRIBUTION_TRAPEZOIDAL. Kindly check list inputs completeness and correctness.")
clientObject.magnitude_1 = load_parameter[0]
clientObject.magnitude_2 = load_parameter[1]
if type(load_over_total_length) == bool:
pass
else:
raise Exception("WARNING: Type of the load over total length should be bool. Kindly check inputs correctness.")
if load_over_total_length == False:
clientObject.distance_a_is_defined_as_relative = load_parameter[2]
clientObject.distance_b_is_defined_as_relative = load_parameter[3]
if load_parameter[2]:
clientObject.distance_a_relative = load_parameter[4]
else:
clientObject.distance_a_absolute = load_parameter[4]
if load_parameter[3]:
clientObject.distance_b_relative = load_parameter[5]
else:
clientObject.distance_b_absolute = load_parameter[5]
else:
clientObject.load_is_over_total_length = True
elif load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_TAPERED:
try:
len(load_parameter) == 6
except:
raise Exception("WARNING: Load parameter array length should be 6 for LOAD_DISTRIBUTION_TAPERED. Kindly check list inputs completeness and correctness.")
clientObject.magnitude_1 = load_parameter[0]
clientObject.magnitude_2 = load_parameter[1]
clientObject.distance_a_is_defined_as_relative = load_parameter[2]
clientObject.distance_b_is_defined_as_relative = load_parameter[3]
if load_parameter[2]:
clientObject.distance_a_relative = load_parameter[4]
else:
clientObject.distance_a_absolute = load_parameter[4]
if load_parameter[3]:
clientObject.distance_b_relative = load_parameter[5]
else:
clientObject.distance_b_absolute = load_parameter[5]
elif load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_PARABOLIC:
try:
len(load_parameter) == 3
except:
raise Exception("WARNING: Load parameter array length should be 3 for LOAD_DISTRIBUTION_PARABOLIC. Kindly check list inputs completeness and correctness.")
clientObject.magnitude_1 = load_parameter[0]
clientObject.magnitude_2 = load_parameter[1]
clientObject.magnitude_3 = load_parameter[2]
elif load_distribution == MemberSetLoadDistribution.LOAD_DISTRIBUTION_VARYING:
try:
len(load_parameter[0])==3
except:
print("WARNING: MemberSetLoad no: %x, load case: %x - Wrong data input." % (no, load_case_no))
clientObject.varying_load_parameters = clientModel.factory.create('ns0:member_set_load.varying_load_parameters')
for i in range(len(load_parameter)):
mlvlp = clientModel.factory.create('ns0:member_set_load_varying_load_parameters')
mlvlp.no = i+1
mlvlp.distance = load_parameter[i][0]
mlvlp.delta_distance = load_parameter[i][1]
mlvlp.magnitude = load_parameter[i][2]
mlvlp.note = None
mlvlp.magnitude_t_c = 0.0
mlvlp.magnitude_delta_t = 0.0
mlvlp.magnitude_t_t = 0.0
mlvlp.magnitude_t_b = 0.0
clientObject.varying_load_parameters.member_set_load_varying_load_parameters.append(mlvlp)
# Comment
clientObject.comment = comment
# Adding optional parameters via dictionary
for key in params:
clientObject[key] = params[key]
# Add Load Member Load to client model
clientModel.service.set_member_set_load(load_case_no, clientObject)
def PipeContentFull(self,
no: int = 1,
load_case_no: int = 1,
member_sets: str = '1',
load_direction_orientation = MemberSetLoadDirectionOrientation.LOAD_DIRECTION_FORWARD,
specific_weight : float = 0.0,
comment: str = '',
params: dict = {}):
"""
Args:
no (int): Load Tag
load_case_no (int): Assigned Load Case
member_sets (str): Assigned Member Sets
load_direction_orientation (enum): Load Direction Orientation Enumeration
specific_weight (float): Specific Weight
comment (str, optional): Comment
params (dict, optional): Parameters
"""
# Client model | Member Load
clientObject = clientModel.factory.create('ns0:member_set_load')
# Clears object atributes | Sets all atributes to None
clearAtributes(clientObject)
# Member Load No.
clientObject.no = no
# Load Case No.
clientObject.load_case = load_case_no
# Members No. (e.g. '5 6 7 12')
clientObject.member_sets = ConvertToDlString(member_sets)
# Member Load Type
load_type = MemberSetLoadType.LOAD_TYPE_PIPE_CONTENT_FULL
clientObject.load_type = load_type.name
# Member Load Distribution
clientObject.load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_UNIFORM.name
# Member Load Direction
clientObject.load_direction = MemberSetLoadDirection.LOAD_DIRECTION_GLOBAL_Z_OR_USER_DEFINED_W_TRUE.name
#Member Load Orientation
clientObject.load_direction_orientation = load_direction_orientation.name
#Load Magnitude
clientObject.magnitude = specific_weight
# Comment
clientObject.comment = comment
# Adding optional parameters via dictionary
for key in params:
clientObject[key] = params[key]
# Add Load Member Load to client model
clientModel.service.set_member_set_load(load_case_no, clientObject)
def PipeContentPartial(self,
no: int = 1,
load_case_no: int = 1,
member_sets: str = '1',
load_direction_orientation = MemberSetLoadDirectionOrientation.LOAD_DIRECTION_FORWARD,
specific_weight : float = 0.0,
filling_height : float = 0.0,
comment: str = '',
params: dict = {}):
"""
Args:
no (int): Load Tag
load_case_no (int): Assigned Load Case
member_sets (str): Assigned Member Sets
load_direction_orientation (enum): Load Direction Orientation Enumeration
specific_weight (float): Specific Weight
filling_height (float): Filling Height
comment (str, optional): Comment
params (dict, optional): Parameters
"""
# Client model | Member Load
clientObject = clientModel.factory.create('ns0:member_set_load')
# Clears object atributes | Sets all atributes to None
clearAtributes(clientObject)
# Member Load No.
clientObject.no = no
# Load Case No.
clientObject.load_case = load_case_no
# Members No. (e.g. '5 6 7 12')
clientObject.member_sets = ConvertToDlString(member_sets)
# Member Load Type
load_type = MemberSetLoadType.LOAD_TYPE_PIPE_CONTENT_PARTIAL
clientObject.load_type = load_type.name
# Member Load Distribution
clientObject.load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_UNIFORM.name
# Member Load Direction
clientObject.load_direction = MemberSetLoadDirection.LOAD_DIRECTION_GLOBAL_Z_OR_USER_DEFINED_W_TRUE.name
#Member Load Orientation
clientObject.load_direction_orientation = load_direction_orientation.name
#Load Magnitude
clientObject.magnitude = specific_weight
#Filling Height
clientObject.filling_height = filling_height
# Comment
clientObject.comment = comment
# Adding optional parameters via dictionary
for key in params:
clientObject[key] = params[key]
# Add Load Member Load to client model
clientModel.service.set_member_set_load(load_case_no, clientObject)
def PipeInternalPressure(self,
no: int = 1,
load_case_no: int = 1,
member_sets: str = '1',
pressure : float = 0.0,
comment: str = '',
params: dict = {}):
"""
Args:
no (int): Load Tag
load_case_no (int): Assigned Load Case
member_sets (str): Assigned Member Sets
pressure (float): Pressure
comment (str, optional): Comment
params (dict, optional): Parameters
"""
# Client model | Member Load
clientObject = clientModel.factory.create('ns0:member_set_load')
# Clears object atributes | Sets all atributes to None
clearAtributes(clientObject)
# Member Load No.
clientObject.no = no
# Load Case No.
clientObject.load_case = load_case_no
# Members No. (e.g. '5 6 7 12')
clientObject.member_sets = ConvertToDlString(member_sets)
# Member Load Type
load_type = MemberSetLoadType.LOAD_TYPE_PIPE_INTERNAL_PRESSURE
clientObject.load_type = load_type.name
# Member Load Distribution
clientObject.load_distribution = MemberSetLoadDistribution.LOAD_DISTRIBUTION_UNIFORM.name
# Member Load Direction
clientObject.load_direction = MemberSetLoadDirection.LOAD_DIRECTION_LOCAL_X.name
#Load Magnitude
clientObject.magnitude = pressure
# Comment
clientObject.comment = comment
# Adding optional parameters via dictionary
for key in params:
clientObject[key] = params[key]
# Add Load Member Load to client model
clientModel.service.set_member_set_load(load_case_no, clientObject)
def RotaryMotion(self,
no: int = 1,
load_case_no: int = 1,
member_sets: str = '1',
angular_acceleration : float = 0.0,
angular_velocity : float = 0.0,
axis_definition_type = MemberSetLoadAxisDefinitionType.AXIS_DEFINITION_TWO_POINTS,
axis_orientation = MemberSetLoadAxisDefinitionAxisOrientation.AXIS_POSITIVE,
axis_definition = MemberSetLoadAxisDefinition.AXIS_X,
axis_definition_p1 = [],
axis_definition_p2 = [],
comment: str = '',
params: dict = {}):
"""
Args:
no (int): Load Tag
load_case_no (int): Assigned Load Case
member_sets (str): Assigned Member Sets
angular_acceleration (float): Angular Acceleration
angular_velocity (float): Angular Velocity
axis_definition_type (enum): Axis Definition Type Enumeration
axis_orientation (enum): Axis Orientation Enumeration
axis_definition (enum): Axis Definition Enumeration
axis_definition_p1 (list):Axis Definition First Point
axis_definition_p2 (list): Axis Definition Second Point
comment (str, optional): Comment
params (dict, optional): Parameters
"""
# Client model | Member Load
clientObject = clientModel.factory.create('ns0:member_set_load')
# Clears object atributes | Sets all atributes to None
clearAtributes(clientObject)
# Member Load No.
clientObject.no = no
# Load Case No.
clientObject.load_case = load_case_no
# Members No. (e.g. '5 6 7 12')
clientObject.member_sets = ConvertToDlString(member_sets)
# Member Load Type
load_type = MemberSetLoadType.LOAD_TYPE_ROTARY_MOTION
clientObject.load_type = load_type.name
#Angular Acceleration
clientObject.angular_acceleration = angular_acceleration
#Angular Velocity
clientObject.angular_velocity = angular_velocity
#Axis Definition Type
clientObject.axis_definition_type = axis_definition_type.name
#Axis definition
if axis_definition_type == MemberSetLoadAxisDefinitionType.AXIS_DEFINITION_TWO_POINTS.name:
clientObject.axis_definition_p1_x = axis_definition_p1[0]
clientObject.axis_definition_p1_y = axis_definition_p1[1]
clientObject.axis_definition_p1_z = axis_definition_p1[2]
clientObject.axis_definition_p2_x = axis_definition_p2[0]
clientObject.axis_definition_p2_y = axis_definition_p2[1]
clientObject.axis_definition_p2_z = axis_definition_p2[2]
elif axis_definition_type == MemberSetLoadAxisDefinitionType.AXIS_DEFINITION_POINT_AND_AXIS.name:
clientObject.axis_definition_p1_x = axis_definition_p1[0]
clientObject.axis_definition_p1_y = axis_definition_p1[1]
clientObject.axis_definition_p1_z = axis_definition_p1[2]
clientObject.axis_definition_axis = axis_definition.name
clientObject.axis_definition_axis_orientation = axis_orientation.name
# Comment
clientObject.comment = comment
# Adding optional parameters via dictionary
for key in params:
clientObject[key] = params[key]
# Add Load Member Load to client model
clientModel.service.set_member_set_load(load_case_no, clientObject)
| 1 |
0084b5137df5a7e6e6f04e0ca2ae84d6185cadfb
|
Python
|
from abc import abstractmethod
from .base import OperatorConverter
class ATenPackSequenceSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::_pack_sequence(Tensor output, Tensor batch_sizes, Tensor? sorted_indices, Tensor? unsorted_indices) -> (Tensor, Tensor, Tensor?, Tensor?)'''
pass
class ATenAsTensorSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::as_tensor(Tensor(a) data, *, int? dtype=None, Device? device=None) -> (Tensor(a|b))'''
pass
class ATenUpsampleSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::__upsample(Tensor input, int? size=None, int? scale_factor=None, str mode="nearest", bool? align_corners=None) -> (Tensor)
aten::__upsample.size_list(Tensor input, int[]? size=None, int? scale_factor=None, str mode="nearest", bool? align_corners=None) -> (Tensor)'''
pass
class ATenHspmmSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::hspmm(Tensor mat1, Tensor mat2) -> (Tensor)'''
pass
class ATenValuesSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::values(Tensor(a) self) -> (Tensor(a))
aten::_values(Tensor(a) self) -> (Tensor(a))'''
pass
class ATenIndicesSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::indices(Tensor(a) self) -> (Tensor(a))
aten::_indices(Tensor(a) self) -> (Tensor(a))'''
pass
class ATenNativeNormSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::native_norm(Tensor self, Scalar p=2) -> (Tensor)
aten::native_norm.ScalarOpt_dim_dtype(Tensor self, Scalar? p, int[1] dim, bool keepdim, int? dtype) -> (Tensor)'''
pass
class ATenQuantizedMaxPool1dSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::quantized_max_pool1d(Tensor self, int[1] kernel_size, int[1] stride=[], int[1] padding=[0], int[1] dilation=[1], bool ceil_mode=False) -> (Tensor)'''
pass
class ATenToDenseSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::to_dense(Tensor self, int? dtype=None) -> (Tensor)'''
pass
class ATenFlattenDenseTensorsSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::flatten_dense_tensors(Tensor[] tensors) -> (Tensor)'''
pass
class ATenLinalgMatrixRankSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::linalg_matrix_rank(Tensor self, float? tol=None, bool hermitian=False) -> (Tensor)
aten::linalg_matrix_rank.tol_tensor(Tensor input, Tensor tol, bool hermitian=False) -> (Tensor)'''
pass
class ATenLinalgTensorinvSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::linalg_tensorinv(Tensor self, int ind=2) -> (Tensor)'''
pass
class ATenLinalgPinvSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::linalg_pinv(Tensor self, float rcond=1.0000000000000001e-15, bool hermitian=False) -> (Tensor)
aten::linalg_pinv.rcond_tensor(Tensor self, Tensor rcond, bool hermitian=False) -> (Tensor)'''
pass
class ATenLinalgCondSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::linalg_cond(Tensor self, Scalar? p=None) -> (Tensor)
aten::linalg_cond.p_str(Tensor self, str p) -> (Tensor)'''
pass
class ATenLinalgSvdvalsSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::linalg_svdvals(Tensor input) -> (Tensor)'''
pass
class ATenLinalgSvdSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::linalg_svd.U(Tensor self, bool full_matrices=True, *, Tensor(a!) U, Tensor(b!) S, Tensor(c!) Vh) -> (Tensor(a!) U, Tensor(b!) S, Tensor(c!) Vh)
aten::linalg_svd(Tensor self, bool full_matrices=True) -> (Tensor U, Tensor S, Tensor Vh)'''
pass
class ATenInnerSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::inner(Tensor self, Tensor other) -> (Tensor)'''
pass
class ATenLinalgInvSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::linalg_inv(Tensor self) -> (Tensor)'''
pass
class ATenLinalgEigvalshSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::linalg_eigvalsh(Tensor self, str UPLO="L") -> (Tensor)'''
pass
class ATenLinalgEigvalsSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::linalg_eigvals(Tensor self) -> (Tensor)'''
pass
class ATenLinalgCholeskySchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::linalg_cholesky(Tensor self) -> (Tensor)'''
pass
class ATenFftIfftshiftSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::fft_ifftshift(Tensor self, int[1]? dim=None) -> (Tensor)'''
pass
class ATenFftFftshiftSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::fft_fftshift(Tensor self, int[1]? dim=None) -> (Tensor)'''
pass
class ATenFftIrfftnSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::fft_irfftn(Tensor self, int[1]? s=None, int[1]? dim=None, str? norm=None) -> (Tensor)'''
pass
class ATenFftRfftnSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::fft_rfftn(Tensor self, int[1]? s=None, int[1]? dim=None, str? norm=None) -> (Tensor)'''
pass
class ATenFftIrfft2Schema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::fft_irfft2(Tensor self, int[1]? s=None, int[1] dim=[-2, -1], str? norm=None) -> (Tensor)'''
pass
class ATenFftRfft2Schema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::fft_rfft2(Tensor self, int[1]? s=None, int[1] dim=[-2, -1], str? norm=None) -> (Tensor)'''
pass
class ATenFftFft2Schema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::fft_fft2(Tensor self, int[1]? s=None, int[1] dim=[-2, -1], str? norm=None) -> (Tensor)'''
pass
class ATenFftIhfftSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::fft_ihfft(Tensor self, int? n=None, int dim=-1, str? norm=None) -> (Tensor)'''
pass
class ATenFftHfftSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::fft_hfft(Tensor self, int? n=None, int dim=-1, str? norm=None) -> (Tensor)'''
pass
class ATenFftIrfftSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::fft_irfft(Tensor self, int? n=None, int dim=-1, str? norm=None) -> (Tensor)'''
pass
class ATenFftRfftSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::fft_rfft(Tensor self, int? n=None, int dim=-1, str? norm=None) -> (Tensor)'''
pass
class ATenFftIfftSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::fft_ifft(Tensor self, int? n=None, int dim=-1, str? norm=None) -> (Tensor)'''
pass
class ATenSlowConv3dSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::slow_conv3d(Tensor self, Tensor weight, int[3] kernel_size, Tensor? bias=None, int[3] stride=[1, 1, 1], int[3] padding=[0, 0, 0]) -> (Tensor)'''
pass
class ATenThnnConvDepthwise2dSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::thnn_conv_depthwise2d(Tensor self, Tensor weight, int[2] kernel_size, Tensor? bias=None, int[2] stride=[1, 1], int[2] padding=[0, 0], int[2] dilation=[1, 1]) -> (Tensor)'''
pass
class ATenThnnConv2dSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::thnn_conv2d(Tensor self, Tensor weight, int[2] kernel_size, Tensor? bias=None, int[2] stride=[1, 1], int[2] padding=[0, 0]) -> (Tensor)'''
pass
class ATenLogSigmoidSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::log_sigmoid(Tensor self) -> (Tensor)'''
pass
class ATenFloatPowerSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::float_power.Tensor_Tensor(Tensor self, Tensor exponent) -> (Tensor)
aten::float_power.Scalar(Scalar self, Tensor exponent) -> (Tensor)
aten::float_power.Tensor_Scalar(Tensor self, Scalar exponent) -> (Tensor)'''
pass
class ATenArgsortSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::argsort(Tensor self, int dim=-1, bool descending=False) -> (Tensor)
aten::argsort.dimname(Tensor self, str dim, bool descending=False) -> (Tensor)'''
pass
class ATenMsortSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::msort(Tensor self) -> (Tensor)'''
pass
class ATenNanquantileSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::nanquantile.scalar(Tensor self, float q, int? dim=None, bool keepdim=False) -> (Tensor)
aten::nanquantile(Tensor self, Tensor q, int? dim=None, bool keepdim=False) -> (Tensor)
aten::nanquantile.new_scalar(Tensor self, float q, int? dim, bool keepdim, *, str interpolation) -> (Tensor)
aten::nanquantile.new(Tensor self, Tensor q, int? dim, bool keepdim, *, str interpolation) -> (Tensor)'''
pass
class ATenQuantileSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::quantile.scalar(Tensor self, float q, int? dim=None, bool keepdim=False) -> (Tensor)
aten::quantile(Tensor self, Tensor q, int? dim=None, bool keepdim=False) -> (Tensor)
aten::quantile.new_scalar(Tensor self, float q, int? dim, bool keepdim, *, str interpolation) -> (Tensor)
aten::quantile.new(Tensor self, Tensor q, int? dim, bool keepdim, *, str interpolation) -> (Tensor)'''
pass
class ATenQrSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::qr.Q(Tensor self, bool some=True, *, Tensor(a!) Q, Tensor(b!) R) -> (Tensor(a!) Q, Tensor(b!) R)
aten::qr(Tensor self, bool some=True) -> (Tensor Q, Tensor R)'''
pass
class ATenSvdSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::svd.U(Tensor self, bool some=True, bool compute_uv=True, *, Tensor(a!) U, Tensor(b!) S, Tensor(c!) V) -> (Tensor(a!) U, Tensor(b!) S, Tensor(c!) V)
aten::svd(Tensor self, bool some=True, bool compute_uv=True) -> (Tensor U, Tensor S, Tensor V)'''
pass
class ATenCrossEntropyLossSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::cross_entropy_loss(Tensor self, Tensor target, Tensor? weight=None, int reduction=1, int ignore_index=-100) -> (Tensor)'''
pass
class ATenNonzeroNumpySchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::nonzero_numpy(Tensor self) -> (Tensor[])'''
pass
class ATenTakeAlongDimSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::take_along_dim(Tensor self, Tensor indices, int? dim=None) -> (Tensor)'''
pass
class ATenScatterSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::scatter.src(Tensor self, int dim, Tensor index, Tensor src) -> (Tensor)
aten::scatter.value(Tensor self, int dim, Tensor index, Scalar value) -> (Tensor)
aten::scatter.dimname_src(Tensor self, str dim, Tensor index, Tensor src) -> (Tensor)
aten::scatter.dimname_value(Tensor self, str dim, Tensor index, Scalar value) -> (Tensor)'''
pass
class ATenIndexAddSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::index_add(Tensor self, int dim, Tensor index, Tensor source) -> (Tensor)
aten::index_add.alpha(Tensor self, int dim, Tensor index, Tensor source, *, Scalar alpha) -> (Tensor)
aten::index_add.dimname(Tensor self, str dim, Tensor index, Tensor source, *, Scalar alpha=1) -> (Tensor)'''
pass
class ATenPutSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::put(Tensor self, Tensor index, Tensor source, bool accumulate=False) -> (Tensor)'''
pass
class ATenMaskedScatterSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::masked_scatter(Tensor self, Tensor mask, Tensor source) -> (Tensor)'''
pass
class ATenQuantizedRnnReluCellSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::quantized_rnn_relu_cell(Tensor input, Tensor hx, Tensor w_ih, Tensor w_hh, Tensor b_ih, Tensor b_hh, Tensor packed_ih, Tensor packed_hh, Tensor col_offsets_ih, Tensor col_offsets_hh, Scalar scale_ih, Scalar scale_hh, Scalar zero_point_ih, Scalar zero_point_hh) -> (Tensor)'''
pass
class ATenQuantizedGruCellSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::quantized_gru_cell(Tensor input, Tensor hx, Tensor w_ih, Tensor w_hh, Tensor b_ih, Tensor b_hh, Tensor packed_ih, Tensor packed_hh, Tensor col_offsets_ih, Tensor col_offsets_hh, Scalar scale_ih, Scalar scale_hh, Scalar zero_point_ih, Scalar zero_point_hh) -> (Tensor)'''
pass
class ATenQuantizedLstmCellSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::quantized_lstm_cell(Tensor input, Tensor[] hx, Tensor w_ih, Tensor w_hh, Tensor b_ih, Tensor b_hh, Tensor packed_ih, Tensor packed_hh, Tensor col_offsets_ih, Tensor col_offsets_hh, Scalar scale_ih, Scalar scale_hh, Scalar zero_point_ih, Scalar zero_point_hh) -> (Tensor, Tensor)'''
pass
class ATenRnnReluSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::rnn_relu.input(Tensor input, Tensor hx, Tensor[] params, bool has_biases, int num_layers, float dropout, bool train, bool bidirectional, bool batch_first) -> (Tensor, Tensor)
aten::rnn_relu.data(Tensor data, Tensor batch_sizes, Tensor hx, Tensor[] params, bool has_biases, int num_layers, float dropout, bool train, bool bidirectional) -> (Tensor, Tensor)'''
pass
class ATenRnnTanhSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::rnn_tanh.input(Tensor input, Tensor hx, Tensor[] params, bool has_biases, int num_layers, float dropout, bool train, bool bidirectional, bool batch_first) -> (Tensor, Tensor)
aten::rnn_tanh.data(Tensor data, Tensor batch_sizes, Tensor hx, Tensor[] params, bool has_biases, int num_layers, float dropout, bool train, bool bidirectional) -> (Tensor, Tensor)'''
pass
class ATenGruSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::gru.input(Tensor input, Tensor hx, Tensor[] params, bool has_biases, int num_layers, float dropout, bool train, bool bidirectional, bool batch_first) -> (Tensor, Tensor)
aten::gru.data(Tensor data, Tensor batch_sizes, Tensor hx, Tensor[] params, bool has_biases, int num_layers, float dropout, bool train, bool bidirectional) -> (Tensor, Tensor)'''
pass
class ATenLstmSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::lstm.input(Tensor input, Tensor[] hx, Tensor[] params, bool has_biases, int num_layers, float dropout, bool train, bool bidirectional, bool batch_first) -> (Tensor, Tensor, Tensor)
aten::lstm.data(Tensor data, Tensor batch_sizes, Tensor[] hx, Tensor[] params, bool has_biases, int num_layers, float dropout, bool train, bool bidirectional) -> (Tensor, Tensor, Tensor)'''
pass
class ATenPadPackedSequenceSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::_pad_packed_sequence(Tensor data, Tensor batch_sizes, bool batch_first, Scalar padding_value, int total_length) -> (Tensor, Tensor)'''
pass
class ATenCombinationsSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::combinations(Tensor self, int r=2, bool with_replacement=False) -> (Tensor)'''
pass
class ATenCartesianProdSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::cartesian_prod(Tensor[] tensors) -> (Tensor)'''
pass
class ATenMeshgridSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::meshgrid(Tensor[] tensors) -> (Tensor[])'''
pass
class ATenMaskedScaleSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::_masked_scale(Tensor self, Tensor mask, float scale) -> (Tensor)'''
pass
class ATenFakeQuantizePerChannelAffineSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::fake_quantize_per_channel_affine(Tensor self, Tensor scale, Tensor zero_point, int axis, int quant_min, int quant_max) -> (Tensor)'''
pass
class ATenFakeQuantizePerTensorAffineSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::fake_quantize_per_tensor_affine(Tensor self, float scale, int zero_point, int quant_min, int quant_max) -> (Tensor)'''
pass
class ATenCoalesceSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::coalesce(Tensor(a) self) -> (Tensor(a))
aten::_coalesce(Tensor self) -> (Tensor)'''
pass
class ATenWeightNormSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::_weight_norm(Tensor v, Tensor g, int dim=0) -> (Tensor)'''
pass
class ATenNormExceptDimSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::norm_except_dim(Tensor v, int pow=2, int dim=0) -> (Tensor)'''
pass
class ATenWhereSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::where.self(Tensor condition, Tensor self, Tensor other) -> (Tensor)
aten::where.ScalarSelf(Tensor condition, Scalar self, Tensor other) -> (Tensor)
aten::where.ScalarOther(Tensor condition, Tensor self, Scalar other) -> (Tensor)
aten::where.Scalar(Tensor condition, Scalar self, Scalar other) -> (Tensor)
aten::where(Tensor condition) -> (Tensor[])'''
pass
class ATenTypeAsSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::type_as(Tensor self, Tensor other) -> (Tensor)'''
pass
class ATenFlipudSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::flipud(Tensor self) -> (Tensor)'''
pass
class ATenFliplrSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::fliplr(Tensor self) -> (Tensor)'''
pass
class ATenOneHotSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::one_hot(Tensor self, int num_classes=-1) -> (Tensor)'''
pass
class ATenTileSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::tile(Tensor self, int[] dims) -> (Tensor)'''
pass
class ATenSumToSizeSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::sum_to_size(Tensor self, int[] size) -> (Tensor)'''
pass
class ATenIstftSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::istft(Tensor self, int n_fft, int? hop_length=None, int? win_length=None, Tensor? window=None, bool center=True, bool normalized=False, bool? onesided=None, int? length=None, bool return_complex=False) -> (Tensor)'''
pass
class ATenStftSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::stft(Tensor self, int n_fft, int? hop_length=None, int? win_length=None, Tensor? window=None, bool normalized=False, bool? onesided=None, bool? return_complex=None) -> (Tensor)'''
pass
class ATenDstackSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::dstack(Tensor[] tensors) -> (Tensor)'''
pass
class ATenHstackSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::hstack(Tensor[] tensors) -> (Tensor)'''
pass
class ATenDsplitSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::dsplit.int(Tensor(a) self, int sections) -> (Tensor[])
aten::dsplit.array(Tensor(a) self, int[] indices) -> (Tensor[])'''
pass
class ATenVsplitSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::vsplit.int(Tensor(a) self, int sections) -> (Tensor[])
aten::vsplit.array(Tensor(a) self, int[] indices) -> (Tensor[])'''
pass
class ATenHsplitSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::hsplit.int(Tensor(a) self, int sections) -> (Tensor[])
aten::hsplit.array(Tensor(a) self, int[] indices) -> (Tensor[])'''
pass
class ATenSmmSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::smm(Tensor self, Tensor mat2) -> (Tensor)'''
pass
class ATenSeluSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::selu(Tensor self) -> (Tensor)'''
pass
class ATenRreluSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::rrelu(Tensor self, Scalar lower=0.125, Scalar upper=0.33333333333333331, bool training=False, Generator? generator=None) -> (Tensor)'''
pass
class ATenRavelSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::ravel(Tensor(a) self) -> (Tensor(a))'''
pass
class ATenPinverseSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::pinverse(Tensor self, float rcond=1.0000000000000001e-15) -> (Tensor)'''
pass
class ATenPinMemorySchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::pin_memory(Tensor(a) self) -> (Tensor(a))'''
pass
class ATenPixelUnshuffleSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::pixel_unshuffle(Tensor self, int downscale_factor) -> (Tensor)'''
pass
class ATenPixelShuffleSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::pixel_shuffle(Tensor self, int upscale_factor) -> (Tensor)'''
pass
class ATenPairwiseDistanceSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::pairwise_distance(Tensor x1, Tensor x2, float p=2., float eps=9.9999999999999995e-07, bool keepdim=False) -> (Tensor)'''
pass
class ATenMatrixRankSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::matrix_rank.tol(Tensor self, float tol, bool symmetric=False) -> (Tensor)
aten::matrix_rank(Tensor self, bool symmetric=False) -> (Tensor)'''
pass
class ATenKronSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::kron(Tensor self, Tensor other) -> (Tensor)'''
pass
class ATenInstanceNormSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::instance_norm(Tensor input, Tensor? weight, Tensor? bias, Tensor? running_mean, Tensor? running_var, bool use_input_stats, float momentum, float eps, bool cudnn_enabled) -> (Tensor)'''
pass
class ATenIndexCopySchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::index_copy(Tensor self, int dim, Tensor index, Tensor source) -> (Tensor)
aten::index_copy.dimname(Tensor self, str dim, Tensor index, Tensor source) -> (Tensor)'''
pass
class ATenLdexpSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::ldexp.Tensor(Tensor self, Tensor other) -> (Tensor)'''
pass
class ATenEmbeddingBagSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::embedding_bag(Tensor weight, Tensor indices, Tensor offsets, bool scale_grad_by_freq=False, int mode=0, bool sparse=False, Tensor? per_sample_weights=None, bool include_last_offset=False) -> (Tensor, Tensor, Tensor, Tensor)
aten::embedding_bag.padding_idx(Tensor weight, Tensor indices, Tensor offsets, bool scale_grad_by_freq, int mode, bool sparse, Tensor? per_sample_weights, bool include_last_offset, int? padding_idx) -> (Tensor, Tensor, Tensor, Tensor)
aten::_embedding_bag(Tensor weight, Tensor indices, Tensor offsets, bool scale_grad_by_freq=False, int mode=0, bool sparse=False, Tensor? per_sample_weights=None, bool include_last_offset=False, int padding_idx=-1) -> (Tensor, Tensor, Tensor, Tensor)'''
pass
class ATenEinsumSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::einsum(str equation, Tensor[] tensors) -> (Tensor)'''
pass
class ATenDiffSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::diff(Tensor self, int n=1, int dim=-1, Tensor? prepend=None, Tensor? append=None) -> (Tensor)'''
pass
class ATenDiagflatSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::diagflat(Tensor self, int offset=0) -> (Tensor)'''
pass
class ATenDiagEmbedSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::diag_embed(Tensor self, int offset=0, int dim1=-2, int dim2=-1) -> (Tensor)'''
pass
class ATenCtcLossSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::ctc_loss.IntList(Tensor log_probs, Tensor targets, int[] input_lengths, int[] target_lengths, int blank=0, int reduction=1, bool zero_infinity=False) -> (Tensor)
aten::ctc_loss.Tensor(Tensor log_probs, Tensor targets, Tensor input_lengths, Tensor target_lengths, int blank=0, int reduction=1, bool zero_infinity=False) -> (Tensor)
aten::_ctc_loss(Tensor log_probs, Tensor targets, int[] input_lengths, int[] target_lengths, int blank=0, bool zero_infinity=False) -> (Tensor, Tensor)'''
pass
class ATenConvolutionModeSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::_convolution_mode(Tensor input, Tensor weight, Tensor? bias, int[] stride, str padding, int[] dilation, int groups) -> (Tensor)'''
pass
class ATenCpuSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::cpu(Tensor(a) self) -> (Tensor(a|b))'''
pass
class ATenBlockDiagSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::block_diag(Tensor[] tensors) -> (Tensor)'''
pass
class ATenBroadcastToSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::broadcast_to(Tensor(a) self, int[] size) -> (Tensor(a))'''
pass
class ATenBroadcastTensorsSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::broadcast_tensors(Tensor[] tensors) -> (Tensor[])'''
pass
class ATenBatchNormImplIndexSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::_batch_norm_impl_index(Tensor input, Tensor? weight, Tensor? bias, Tensor? running_mean, Tensor? running_var, bool training, float momentum, float eps, bool cudnn_enabled) -> (Tensor, Tensor, Tensor, Tensor, int)'''
pass
class ATenBatchNormSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::batch_norm(Tensor input, Tensor? weight, Tensor? bias, Tensor? running_mean, Tensor? running_var, bool training, float momentum, float eps, bool cudnn_enabled) -> (Tensor)'''
pass
class ATenAtleast3dSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::atleast_3d(Tensor self) -> (Tensor)
aten::atleast_3d.Sequence(Tensor[] tensors) -> (Tensor[])'''
pass
class ATenAtleast2dSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::atleast_2d(Tensor self) -> (Tensor)
aten::atleast_2d.Sequence(Tensor[] tensors) -> (Tensor[])'''
pass
class ATenAtleast1dSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::atleast_1d(Tensor self) -> (Tensor)
aten::atleast_1d.Sequence(Tensor[] tensors) -> (Tensor[])'''
pass
class ATenDimArangeSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::_dim_arange(Tensor like, int dim) -> (Tensor)'''
pass
class ATenBatchNormStatsSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::batch_norm_stats(Tensor input, float eps) -> (Tensor, Tensor)'''
pass
class ATenCopyFromSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::_copy_from(Tensor self, Tensor dst, bool non_blocking=False) -> (Tensor)'''
pass
class ATenAdaptiveMaxPool1dSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::adaptive_max_pool1d(Tensor self, int[1] output_size) -> (Tensor, Tensor)'''
pass
class ATenAdaptiveAvgPool1dSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::adaptive_avg_pool1d(Tensor self, int[1] output_size) -> (Tensor)'''
pass
class ATenCrowIndicesSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::crow_indices(Tensor(a) self) -> (Tensor(a))'''
pass
class ATenAvgPool1dSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::avg_pool1d(Tensor self, int[1] kernel_size, int[1] stride=[], int[1] padding=[0], bool ceil_mode=False, bool count_include_pad=True) -> (Tensor)'''
pass
class ATenFeatureAlphaDropoutSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::feature_alpha_dropout(Tensor input, float p, bool train) -> (Tensor)'''
pass
class ATenBatchNormElemtSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::batch_norm_elemt(Tensor input, Tensor? weight, Tensor? bias, Tensor mean, Tensor invstd, float eps) -> (Tensor)'''
pass
class ATenAlphaDropoutSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::alpha_dropout(Tensor input, float p, bool train) -> (Tensor)'''
pass
class ATenFeatureDropoutSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::feature_dropout(Tensor input, float p, bool train) -> (Tensor)'''
pass
class ATenShapeAsTensorSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::_shape_as_tensor(Tensor self) -> (Tensor)'''
pass
class ATenQuantizedRnnTanhCellSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::quantized_rnn_tanh_cell(Tensor input, Tensor hx, Tensor w_ih, Tensor w_hh, Tensor b_ih, Tensor b_hh, Tensor packed_ih, Tensor packed_hh, Tensor col_offsets_ih, Tensor col_offsets_hh, Scalar scale_ih, Scalar scale_hh, Scalar zero_point_ih, Scalar zero_point_hh) -> (Tensor)'''
pass
class ATenReshapeFromTensorSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::_reshape_from_tensor(Tensor self, Tensor shape) -> (Tensor)'''
pass
class ATenSobolEngineDrawSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::_sobol_engine_draw(Tensor quasi, int n, Tensor sobolstate, int dimension, int num_generated, int? dtype) -> (Tensor, Tensor)'''
pass
class ATenLinalgQrSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::linalg_qr.out(Tensor self, str mode="reduced", *, Tensor(a!) Q, Tensor(b!) R) -> (Tensor(a!) Q, Tensor(b!) R)
aten::linalg_qr(Tensor self, str mode="reduced") -> (Tensor Q, Tensor R)'''
pass
class ATenLinalgInvExSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::linalg_inv_ex.inverse(Tensor self, *, bool check_errors=False, Tensor(a!) inverse, Tensor(b!) info) -> (Tensor(a!) inverse, Tensor(b!) info)
aten::linalg_inv_ex(Tensor self, *, bool check_errors=False) -> (Tensor inverse, Tensor info)'''
pass
class ATenLinalgEighSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::linalg_eigh.eigvals(Tensor self, str UPLO="L", *, Tensor(a!) eigvals, Tensor(b!) eigvecs) -> (Tensor(a!) eigenvalues, Tensor(b!) eigenvectors)
aten::linalg_eigh(Tensor self, str UPLO="L") -> (Tensor eigenvalues, Tensor eigenvectors)'''
pass
class ATenLuSolveSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::lu_solve(Tensor self, Tensor LU_data, Tensor LU_pivots) -> (Tensor)'''
pass
class ATenSolveSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::solve.solution(Tensor self, Tensor A, *, Tensor(a!) solution, Tensor(b!) lu) -> (Tensor(a!) solution, Tensor(b!) LU)
aten::solve(Tensor self, Tensor A) -> (Tensor solution, Tensor LU)'''
pass
class ATenCholeskySolveSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::cholesky_solve(Tensor self, Tensor input2, bool upper=False) -> (Tensor)'''
pass
class ATenEigSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::eig.e(Tensor self, bool eigenvectors=False, *, Tensor(a!) e, Tensor(b!) v) -> (Tensor(a!) eigenvalues, Tensor(b!) eigenvectors)
aten::eig(Tensor self, bool eigenvectors=False) -> (Tensor eigenvalues, Tensor eigenvectors)'''
pass
class ATenSymeigSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::symeig.e(Tensor self, bool eigenvectors=False, bool upper=True, *, Tensor(a!) e, Tensor(b!) V) -> (Tensor(a!) eigenvalues, Tensor(b!) eigenvectors)
aten::symeig(Tensor self, bool eigenvectors=False, bool upper=True) -> (Tensor eigenvalues, Tensor eigenvectors)'''
pass
class ATenChooseQparamsOptimizedSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::choose_qparams_optimized(Tensor input, int numel, int n_bins, float ratio, int bit_width) -> (Tensor, Tensor)'''
pass
class ATenPackPaddedSequenceSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::_pack_padded_sequence(Tensor input, Tensor lengths, bool batch_first) -> (Tensor, Tensor)'''
pass
class ATenFftIfft2Schema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::fft_ifft2(Tensor self, int[1]? s=None, int[1] dim=[-2, -1], str? norm=None) -> (Tensor)'''
pass
class ATenUnsafeViewSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::_unsafe_view(Tensor self, int[] size) -> (Tensor)'''
pass
class ATenPadSequenceSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::pad_sequence(Tensor[] sequences, bool batch_first=False, float padding_value=0.) -> (Tensor)'''
pass
class ATenTrilinearSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::_trilinear(Tensor i1, Tensor i2, Tensor i3, int[] expand1, int[] expand2, int[] expand3, int[] sumdim, int unroll_dim=1) -> (Tensor)'''
pass
class ATenRot90Schema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::rot90(Tensor self, int k=1, int[] dims=[0, 1]) -> (Tensor)'''
pass
class ATenSlogdetSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::slogdet(Tensor self) -> (Tensor sign, Tensor logabsdet)'''
pass
class ATenCeluSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::celu(Tensor self, Scalar alpha=1.) -> (Tensor)'''
pass
class ATenRepeatSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::repeat(Tensor self, int[] repeats) -> (Tensor)'''
pass
class ATenEuclideanDistSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::_euclidean_dist(Tensor x1, Tensor x2) -> (Tensor)'''
pass
class ATenMvlgammaSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::mvlgamma(Tensor self, int p) -> (Tensor)'''
pass
class ATenLogdetSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::logdet(Tensor self) -> (Tensor)'''
pass
class ATenInverseSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::inverse(Tensor self) -> (Tensor)'''
pass
class ATenGridSampler2dCpuFallbackSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::_grid_sampler_2d_cpu_fallback(Tensor input, Tensor grid, int interpolation_mode, int padding_mode, bool align_corners) -> (Tensor)'''
pass
class ATenEmbeddingSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::embedding(Tensor weight, Tensor indices, int padding_idx=-1, bool scale_grad_by_freq=False, bool sparse=False) -> (Tensor)'''
pass
class ATenUnpackDualSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::_unpack_dual(Tensor(a) dual, int level) -> (Tensor(a) primal, Tensor tangent)'''
pass
class ATenConvolutionBackwardOverrideableSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::convolution_backward_overrideable(Tensor grad_output, Tensor input, Tensor weight, int[] stride, int[] padding, int[] dilation, bool transposed, int[] output_padding, int groups, bool[3] output_mask) -> (Tensor grad_input, Tensor grad_weight, Tensor grad_bias)'''
pass
class ATenMakeDualSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::_make_dual(Tensor(a) primal, Tensor tangent, int level) -> (Tensor(a))'''
pass
class ATenConvolutionOverrideableSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::convolution_overrideable(Tensor input, Tensor weight, Tensor? bias, int[] stride, int[] padding, int[] dilation, bool transposed, int[] output_padding, int groups) -> (Tensor)'''
pass
class ATenConstantPadNdSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::constant_pad_nd(Tensor self, int[] pad, Scalar value=0) -> (Tensor)'''
pass
class ATenAffineGridGeneratorSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::affine_grid_generator(Tensor theta, int[] size, bool align_corners) -> (Tensor)'''
pass
class ATenSegmentReduceSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::segment_reduce(Tensor data, str reduce, *, Tensor? lengths=None, Tensor? indices=None, int axis=0, bool unsafe=False, Scalar? initial=None) -> (Tensor)'''
pass
class ATenLinalgQrHelperSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::_linalg_qr_helper(Tensor self, str mode) -> (Tensor, Tensor)'''
pass
class ATenXorSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::__xor__.Scalar(Tensor self, Scalar other) -> (Tensor)
aten::__xor__.Tensor(Tensor self, Tensor other) -> (Tensor)'''
pass
class ATenLinalgEigSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::linalg_eig.out(Tensor self, *, Tensor(a!) eigenvalues, Tensor(b!) eigenvectors) -> (Tensor(a!) eigenvalues, Tensor(b!) eigenvectors)
aten::linalg_eig(Tensor self) -> (Tensor eigenvalues, Tensor eigenvectors)'''
pass
class ATenOrSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::__or__.Scalar(Tensor self, Scalar other) -> (Tensor)
aten::__or__.Tensor(Tensor self, Tensor other) -> (Tensor)'''
pass
class ATenLinalgLstsqSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::linalg_lstsq.out(Tensor self, Tensor b, float? rcond=None, *, str? driver=None, Tensor(a!) solution, Tensor(b!) residuals, Tensor(c!) rank, Tensor(d!) singular_values) -> (Tensor(a!) solution, Tensor(b!) residuals, Tensor(c!) rank, Tensor(d!) singular_values)
aten::linalg_lstsq(Tensor self, Tensor b, float? rcond=None, *, str? driver=None) -> (Tensor solution, Tensor residuals, Tensor rank, Tensor singular_values)'''
pass
class ATenSpecialXlog1pySchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::special_xlog1py(Tensor self, Tensor other) -> (Tensor)
aten::special_xlog1py.self_scalar(Scalar self, Tensor other) -> (Tensor)
aten::special_xlog1py.other_scalar(Tensor self, Scalar other) -> (Tensor)'''
pass
class ATenSpecialEntrSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::special_entr(Tensor self) -> (Tensor)'''
pass
class ATenSlowConvDilated3dSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::slow_conv_dilated3d(Tensor self, Tensor weight, int[3] kernel_size, Tensor? bias=None, int[3] stride=[1, 1, 1], int[3] padding=[0, 0, 0], int[3] dilation=[1, 1, 1]) -> (Tensor)'''
pass
class ATenSlowConvDilated2dSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::slow_conv_dilated2d(Tensor self, Tensor weight, int[2] kernel_size, Tensor? bias=None, int[2] stride=[1, 1], int[2] padding=[0, 0], int[2] dilation=[1, 1]) -> (Tensor)'''
pass
class ATenSlowConvTranspose3dSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::slow_conv_transpose3d(Tensor self, Tensor weight, int[3] kernel_size, Tensor? bias=None, int[3] stride=[1, 1, 1], int[3] padding=[0, 0, 0], int[3] output_padding=[0, 0, 0], int[3] dilation=[1, 1, 1]) -> (Tensor)'''
pass
class ATenSlowConvTranspose2dSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::slow_conv_transpose2d(Tensor self, Tensor weight, int[2] kernel_size, Tensor? bias=None, int[2] stride=[1, 1], int[2] padding=[0, 0], int[2] output_padding=[0, 0], int[2] dilation=[1, 1]) -> (Tensor)'''
pass
class ATenAndSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::__and__.Scalar(Tensor self, Scalar other) -> (Tensor)
aten::__and__.Tensor(Tensor self, Tensor other) -> (Tensor)'''
pass
class ATenUpsampleNearest2dSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::upsample_nearest2d(Tensor self, int[2] output_size, float? scales_h=None, float? scales_w=None) -> (Tensor)
aten::upsample_nearest2d.vec(Tensor input, int[]? output_size, float[]? scale_factors) -> (Tensor
| 0 |
0084b5137df5a7e6e6f04e0ca2ae84d6185cadfb
|
Python
|
)'''
pass
class ATenUpsampleNearest1dSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::upsample_nearest1d(Tensor self, int[1] output_size, float? scales=None) -> (Tensor)
aten::upsample_nearest1d.vec(Tensor input, int[]? output_size, float[]? scale_factors) -> (Tensor)'''
pass
class ATenUpsampleTrilinear3dSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::upsample_trilinear3d(Tensor self, int[3] output_size, bool align_corners, float? scales_d=None, float? scales_h=None, float? scales_w=None) -> (Tensor)
aten::upsample_trilinear3d.vec(Tensor input, int[]? output_size, bool align_corners, float[]? scale_factors) -> (Tensor)'''
pass
class ATenUpsampleBicubic2dSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::upsample_bicubic2d(Tensor self, int[2] output_size, bool align_corners, float? scales_h=None, float? scales_w=None) -> (Tensor)
aten::upsample_bicubic2d.vec(Tensor input, int[]? output_size, bool align_corners, float[]? scale_factors) -> (Tensor)'''
pass
class ATenUpsampleBilinear2dSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::upsample_bilinear2d(Tensor self, int[2] output_size, bool align_corners, float? scales_h=None, float? scales_w=None) -> (Tensor)
aten::upsample_bilinear2d.vec(Tensor input, int[]? output_size, bool align_corners, float[]? scale_factors) -> (Tensor)'''
pass
class ATenUpsampleLinear1dSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::upsample_linear1d(Tensor self, int[1] output_size, bool align_corners, float? scales=None) -> (Tensor)
aten::upsample_linear1d.vec(Tensor input, int[]? output_size, bool align_corners, float[]? scale_factors) -> (Tensor)'''
pass
class ATenUpsampleNearest3dSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::upsample_nearest3d.vec(Tensor input, int[]? output_size, float[]? scale_factors) -> (Tensor)
aten::upsample_nearest3d(Tensor self, int[3] output_size, float? scales_d=None, float? scales_h=None, float? scales_w=None) -> (Tensor)'''
pass
class ATenReplicationPad3dSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::replication_pad3d(Tensor self, int[6] padding) -> (Tensor)'''
pass
class ATenReplicationPad2dSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::replication_pad2d(Tensor self, int[4] padding) -> (Tensor)'''
pass
class ATenReplicationPad1dSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::replication_pad1d(Tensor self, int[2] padding) -> (Tensor)'''
pass
class ATenReflectionPad2dSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::reflection_pad2d(Tensor self, int[4] padding) -> (Tensor)'''
pass
class ATenReflectionPad1dSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::reflection_pad1d(Tensor self, int[2] padding) -> (Tensor)'''
pass
class ATenMaxUnpool3dSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::max_unpool3d(Tensor self, Tensor indices, int[3] output_size, int[3] stride, int[3] padding) -> (Tensor)'''
pass
class ATenMaxUnpool2dSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::max_unpool2d(Tensor self, Tensor indices, int[2] output_size) -> (Tensor)'''
pass
class ATenFractionalMaxPool2dSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::fractional_max_pool2d.output(Tensor self, int[2] kernel_size, int[2] output_size, Tensor random_samples, *, Tensor(a!) output, Tensor(b!) indices) -> (Tensor(a!), Tensor(b!))
aten::fractional_max_pool2d(Tensor self, int[2] kernel_size, int[2] output_size, Tensor random_samples) -> (Tensor, Tensor)'''
pass
class ATenAvgPool3dSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::avg_pool3d(Tensor self, int[3] kernel_size, int[3] stride=[], int[3] padding=[0, 0, 0], bool ceil_mode=False, bool count_include_pad=True, int? divisor_override=None) -> (Tensor)'''
pass
class ATenConvDepthwise3dSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::conv_depthwise3d(Tensor self, Tensor weight, int[3] kernel_size, Tensor? bias, int[3] stride, int[3] padding, int[3] dilation) -> (Tensor)'''
pass
class ATenColIndicesSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::col_indices(Tensor(a) self) -> (Tensor(a))'''
pass
class ATenAvgPool2dSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::avg_pool2d(Tensor self, int[2] kernel_size, int[2] stride=[], int[2] padding=[0, 0], bool ceil_mode=False, bool count_include_pad=True, int? divisor_override=None) -> (Tensor)'''
pass
class ATenEmptyQuantizedSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::empty_quantized(int[] size, Tensor qtensor) -> (Tensor)'''
pass
class ATenQuantizedBatchNormSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::quantized_batch_norm(Tensor input, Tensor? weight, Tensor? bias, Tensor mean, Tensor var, float eps, float output_scale, int output_zero_point) -> (Tensor)'''
pass
class ATenAdaptiveMaxPool3dSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::adaptive_max_pool3d.out(Tensor self, int[3] output_size, *, Tensor(a!) out, Tensor(b!) indices) -> (Tensor(a!), Tensor(b!))
aten::adaptive_max_pool3d(Tensor self, int[3] output_size) -> (Tensor, Tensor)'''
pass
class ATenAdaptiveMaxPool2dSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::adaptive_max_pool2d.out(Tensor self, int[2] output_size, *, Tensor(a!) out, Tensor(b!) indices) -> (Tensor(a!), Tensor(b!))
aten::adaptive_max_pool2d(Tensor self, int[2] output_size) -> (Tensor, Tensor)'''
pass
class ATenAdaptiveAvgPool3dSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::adaptive_avg_pool3d(Tensor self, int[3] output_size) -> (Tensor)
aten::_adaptive_avg_pool3d(Tensor self, int[3] output_size) -> (Tensor)'''
pass
class ATenSpecialI0eSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::special_i0e(Tensor self) -> (Tensor)'''
pass
class ATenAdaptiveAvgPool2dSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::_adaptive_avg_pool2d(Tensor self, int[2] output_size) -> (Tensor)
aten::adaptive_avg_pool2d(Tensor self, int[2] output_size) -> (Tensor)'''
pass
class ATenSoftshrinkSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::softshrink(Tensor self, Scalar lambd=0.5) -> (Tensor)'''
pass
class ATenSpecialExp2Schema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::special_exp2(Tensor self) -> (Tensor)'''
pass
class ATenRreluWithNoiseSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::rrelu_with_noise(Tensor self, Tensor noise, Scalar lower=0.125, Scalar upper=0.33333333333333331, bool training=False, Generator? generator=None) -> (Tensor)'''
pass
class ATenLeakyReluSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::leaky_relu(Tensor self, Scalar negative_slope=0.01) -> (Tensor)'''
pass
class ATenSpecialExpm1Schema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::special_expm1(Tensor self) -> (Tensor)'''
pass
class ATenHardswishSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::hardswish(Tensor self) -> (Tensor)'''
pass
class ATenHardtanhSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::hardtanh(Tensor self, Scalar min_val=-1, Scalar max_val=1) -> (Tensor)'''
pass
class ATenFractionalMaxPool3dSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::fractional_max_pool3d.output(Tensor self, int[3] kernel_size, int[3] output_size, Tensor random_samples, *, Tensor(a!) output, Tensor(b!) indices) -> (Tensor(a!), Tensor(b!))
aten::fractional_max_pool3d(Tensor self, int[3] kernel_size, int[3] output_size, Tensor random_samples) -> (Tensor, Tensor)'''
pass
class ATenHardsigmoidSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::hardsigmoid(Tensor self) -> (Tensor)'''
pass
class ATenGluSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::glu(Tensor self, int dim=-1) -> (Tensor)'''
pass
class ATenEluSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::elu(Tensor self, Scalar alpha=1, Scalar scale=1, Scalar input_scale=1) -> (Tensor)'''
pass
class ATenSpecialExpitSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::special_expit(Tensor self) -> (Tensor)'''
pass
class ATenSpecialLogitSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::special_logit(Tensor self, float? eps=None) -> (Tensor)'''
pass
class ATenBucketizeSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::bucketize.Tensor(Tensor self, Tensor boundaries, *, bool out_int32=False, bool right=False) -> (Tensor)
aten::bucketize.Scalar(Scalar self, Tensor boundaries, *, bool out_int32=False, bool right=False) -> (Tensor)'''
pass
class ATenSpecialErfinvSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::special_erfinv(Tensor self) -> (Tensor)'''
pass
class ATenSpecialErfcSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::special_erfc(Tensor self) -> (Tensor)'''
pass
class ATenSpecialErfSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::special_erf(Tensor self) -> (Tensor)'''
pass
class ATenSpecialGammalnSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::special_gammaln(Tensor self) -> (Tensor)'''
pass
class ATenMoveaxisSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::moveaxis.intlist(Tensor(a) self, int[] source, int[] destination) -> (Tensor(a))
aten::moveaxis.int(Tensor(a) self, int source, int destination) -> (Tensor(a))'''
pass
class ATenSwapdimsSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::swapdims(Tensor(a) self, int dim0, int dim1) -> (Tensor(a))'''
pass
class ATenSwapaxesSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::swapaxes(Tensor(a) self, int axis0, int axis1) -> (Tensor(a))'''
pass
class ATenRowStackSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::row_stack(Tensor[] tensors) -> (Tensor)'''
pass
class ATenVstackSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::vstack(Tensor[] tensors) -> (Tensor)'''
pass
class ATenNegativeSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::negative(Tensor self) -> (Tensor)'''
pass
class ATenTruncSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::trunc(Tensor self) -> (Tensor)'''
pass
class ATenKeysSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::keys.Tensor(Dict(Tensor, t) self) -> (Tensor[](*))'''
pass
class ATenSubtractSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::subtract.Tensor(Tensor self, Tensor other, *, Scalar alpha=1) -> (Tensor)
aten::subtract.Scalar(Tensor self, Scalar other, Scalar alpha=1) -> (Tensor)'''
pass
class ATenSubSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::sub.Tensor(Tensor self, Tensor other, *, Scalar alpha=1) -> (Tensor)
aten::sub.Scalar(Tensor self, Scalar other, Scalar alpha=1) -> (Tensor)'''
pass
class ATenTransposeSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::transpose.int(Tensor(a) self, int dim0, int dim1) -> (Tensor(a))
aten::transpose.Dimname(Tensor(a) self, str dim0, str dim1) -> (Tensor(a))'''
pass
class ATenLinalgHouseholderProductSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::linalg_householder_product(Tensor input, Tensor tau) -> (Tensor)'''
pass
class ATenOrgqrSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::orgqr(Tensor self, Tensor input2) -> (Tensor)'''
pass
class ATenRowwisePruneSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::_rowwise_prune(Tensor weight, Tensor mask, int compressed_indices_dtype) -> (Tensor, Tensor)'''
pass
class ATenNewFullSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::new_full(Tensor self, int[] size, Scalar fill_value, *, int? dtype=None, int? layout=None, Device? device=None, bool? pin_memory=None) -> (Tensor)'''
pass
class ATenNotEqualSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::not_equal.Scalar(Tensor self, Scalar other) -> (Tensor)
aten::not_equal.Tensor(Tensor self, Tensor other) -> (Tensor)'''
pass
class ATenMinimumSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::minimum(Tensor self, Tensor other) -> (Tensor)'''
pass
class ATenFmaxSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::fmax(Tensor self, Tensor other) -> (Tensor)'''
pass
class ATenFminSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::fmin(Tensor self, Tensor other) -> (Tensor)'''
pass
class ATenHistcSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::histc(Tensor self, int bins=100, Scalar min=0, Scalar max=0) -> (Tensor)'''
pass
class ATenLuWithInfoSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::_lu_with_info(Tensor self, bool pivot=True, bool check_errors=True) -> (Tensor, Tensor, Tensor)'''
pass
class ATenGeqrfSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::geqrf.a(Tensor self, *, Tensor(a!) a, Tensor(b!) tau) -> (Tensor(a!) a, Tensor(b!) tau)
aten::geqrf(Tensor self) -> (Tensor a, Tensor tau)'''
pass
class ATenCholeskyInverseSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::cholesky_inverse(Tensor self, bool upper=False) -> (Tensor)'''
pass
class ATenSolveHelperSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::_solve_helper(Tensor self, Tensor A) -> (Tensor, Tensor)'''
pass
class ATenCholeskySchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::cholesky(Tensor self, bool upper=False) -> (Tensor)'''
pass
class ATenGatherSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::gather(Tensor self, int dim, Tensor index, *, bool sparse_grad=False) -> (Tensor)
aten::gather.dimname(Tensor self, str dim, Tensor index, *, bool sparse_grad=False) -> (Tensor)'''
pass
class ATenDiagSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::diag(Tensor self, int diagonal=0) -> (Tensor)'''
pass
class ATenTriangularSolveSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::triangular_solve.X(Tensor self, Tensor A, bool upper=True, bool transpose=False, bool unitriangular=False, *, Tensor(a!) X, Tensor(b!) M) -> (Tensor(a!) solution, Tensor(b!) cloned_coefficient)
aten::triangular_solve(Tensor self, Tensor A, bool upper=True, bool transpose=False, bool unitriangular=False) -> (Tensor solution, Tensor cloned_coefficient)'''
pass
class ATenFmodSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::fmod.Scalar(Tensor self, Scalar other) -> (Tensor)
aten::fmod.Tensor(Tensor self, Tensor other) -> (Tensor)'''
pass
class ATenMultiplySchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::multiply.Tensor(Tensor self, Tensor other) -> (Tensor)
aten::multiply.Scalar(Tensor self, Scalar other) -> (Tensor)'''
pass
class ATenCholeskySolveHelperSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::_cholesky_solve_helper(Tensor self, Tensor A, bool upper) -> (Tensor)'''
pass
class ATenTriuSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::triu(Tensor self, int diagonal=0) -> (Tensor)'''
pass
class ATenMulSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::mul.Tensor(Tensor self, Tensor other) -> (Tensor)
aten::mul.Scalar(Tensor self, Scalar other) -> (Tensor)'''
pass
class ATenSymeigHelperSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::_symeig_helper(Tensor self, bool eigenvectors, bool upper) -> (Tensor, Tensor)'''
pass
class ATenTrilSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::tril(Tensor self, int diagonal=0) -> (Tensor)'''
pass
class ATenIm2colSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::im2col(Tensor self, int[2] kernel_size, int[2] dilation, int[2] padding, int[2] stride) -> (Tensor)'''
pass
class ATenLinalgSlogdetSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::linalg_slogdet.out(Tensor self, *, Tensor(a!) sign, Tensor(b!) logabsdet) -> (Tensor(a!) sign, Tensor(b!) logabsdet)
aten::linalg_slogdet(Tensor self) -> (Tensor sign, Tensor logabsdet)'''
pass
class ATenRshiftSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::__rshift__.Scalar(Tensor self, Scalar other) -> (Tensor)
aten::__rshift__.Tensor(Tensor self, Tensor other) -> (Tensor)'''
pass
class ATenCol2imSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::col2im(Tensor self, int[2] output_size, int[2] kernel_size, int[2] dilation, int[2] padding, int[2] stride) -> (Tensor)'''
pass
class ATenLinalgCholeskyExSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::linalg_cholesky_ex.L(Tensor self, *, bool check_errors=False, Tensor(a!) L, Tensor(b!) info) -> (Tensor(a!) L, Tensor(b!) info)
aten::linalg_cholesky_ex(Tensor self, *, bool check_errors=False) -> (Tensor L, Tensor info)'''
pass
class ATenLshiftSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::__lshift__.Scalar(Tensor self, Scalar other) -> (Tensor)
aten::__lshift__.Tensor(Tensor self, Tensor other) -> (Tensor)'''
pass
class ATenLeSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::le.Tensor(Tensor self, Tensor other) -> (Tensor)
aten::le.Scalar(Tensor self, Scalar other) -> (Tensor)'''
pass
class ATenFakeQuantizeLearnablePerChannelAffineSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::_fake_quantize_learnable_per_channel_affine(Tensor self, Tensor scale, Tensor zero_point, int axis, int quant_min, int quant_max, float grad_factor=1.) -> (Tensor)'''
pass
class ATenFakeQuantizePerChannelAffineCachemaskSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::fake_quantize_per_channel_affine_cachemask(Tensor self, Tensor scale, Tensor zero_point, int axis, int quant_min, int quant_max) -> (Tensor output, Tensor mask)'''
pass
class ATenGreaterSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::greater.Scalar(Tensor self, Scalar other) -> (Tensor)
aten::greater.Tensor(Tensor self, Tensor other) -> (Tensor)'''
pass
class ATenFakeQuantizeLearnablePerTensorAffineSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::_fake_quantize_learnable_per_tensor_affine(Tensor self, Tensor scale, Tensor zero_point, int quant_min, int quant_max, float grad_factor=1.) -> (Tensor)'''
pass
class ATenGtSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::gt.Tensor(Tensor self, Tensor other) -> (Tensor)
aten::gt.Scalar(Tensor self, Scalar other) -> (Tensor)'''
pass
class ATenMakePerChannelQuantizedTensorSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::_make_per_channel_quantized_tensor(Tensor self, Tensor scale, Tensor zero_point, int axis) -> (Tensor)'''
pass
class ATenMakePerTensorQuantizedTensorSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::_make_per_tensor_quantized_tensor(Tensor self, float scale, int zero_point) -> (Tensor)'''
pass
class ATenGreaterEqualSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::greater_equal.Scalar(Tensor self, Scalar other) -> (Tensor)
aten::greater_equal.Tensor(Tensor self, Tensor other) -> (Tensor)'''
pass
class ATenDequantizeSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::dequantize.self(Tensor self) -> (Tensor)
aten::dequantize.tensors(Tensor[] tensors) -> (Tensor[])
aten::dequantize.tensor(Tensor qtensor) -> (Tensor)
aten::dequantize.list(Tensor[] qtensors) -> (Tensor[])'''
pass
class ATenGeSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::ge.Tensor(Tensor self, Tensor other) -> (Tensor)
aten::ge.Scalar(Tensor self, Scalar other) -> (Tensor)'''
pass
class ATenQuantizePerTensorSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::quantize_per_tensor(Tensor self, float scale, int zero_point, int dtype) -> (Tensor)
aten::quantize_per_tensor.tensors(Tensor[] tensors, Tensor scales, Tensor zero_points, int dtype) -> (Tensor[])'''
pass
class ATenLtSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::lt.Tensor(Tensor self, Tensor other) -> (Tensor)
aten::lt.Scalar(Tensor self, Scalar other) -> (Tensor)'''
pass
class ATenHeavisideSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::heaviside(Tensor self, Tensor values) -> (Tensor)'''
pass
class ATenFrexpSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::frexp.Tensor(Tensor self) -> (Tensor mantissa, Tensor exponent)'''
pass
class ATenBinomialSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::binomial(Tensor count, Tensor prob, Generator? generator=None) -> (Tensor)'''
pass
class ATenFftFftnSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::fft_fftn(Tensor self, int[1]? s=None, int[1]? dim=None, str? norm=None) -> (Tensor)'''
pass
class ATenStandardGammaSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::_standard_gamma(Tensor self, Generator? generator=None) -> (Tensor)'''
pass
class ATenFftIfftnSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::fft_ifftn(Tensor self, int[1]? s=None, int[1]? dim=None, str? norm=None) -> (Tensor)'''
pass
class ATenSWhereSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::_s_where(Tensor condition, Tensor self, Tensor other) -> (Tensor)'''
pass
class ATenUniqueDimConsecutiveSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::unique_dim_consecutive(Tensor self, int dim, bool return_inverse=False, bool return_counts=False) -> (Tensor, Tensor, Tensor)'''
pass
class ATenUnflattenDenseTensorsSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::unflatten_dense_tensors(Tensor flat, Tensor[] tensors) -> (Tensor[])'''
pass
class ATenUniqueSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::_unique(Tensor self, bool sorted=True, bool return_inverse=False) -> (Tensor, Tensor)'''
pass
class ATenFlipSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::flip(Tensor self, int[] dims) -> (Tensor)'''
pass
class ATenNansumSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::nansum(Tensor self, *, int? dtype=None) -> (Tensor)
aten::nansum.dim_IntList(Tensor self, int[1] dim, bool keepdim=False, *, int? dtype=None) -> (Tensor)'''
pass
class ATenUniqueConsecutiveSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::unique_consecutive(Tensor self, bool return_inverse=False, bool return_counts=False, int? dim=None) -> (Tensor, Tensor, Tensor)'''
pass
class ATenTrueDivideSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::true_divide.Scalar(Tensor self, Scalar other) -> (Tensor)
aten::true_divide.Tensor(Tensor self, Tensor other) -> (Tensor)'''
pass
class ATenLogitSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::logit(Tensor self, float? eps=None) -> (Tensor)'''
pass
class ATenDivSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::div.Tensor(Tensor self, Tensor other) -> (Tensor)
aten::div.Scalar(Tensor self, Scalar other) -> (Tensor)
aten::div.Tensor_mode(Tensor self, Tensor other, *, str? rounding_mode) -> (Tensor)
aten::div.Scalar_mode(Tensor self, Scalar other, *, str? rounding_mode) -> (Tensor)'''
pass
class ATenHardshrinkSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::hardshrink(Tensor self, Scalar lambd=0.5) -> (Tensor)'''
pass
class ATenAminSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::amin(Tensor self, int[1] dim=[], bool keepdim=False) -> (Tensor)'''
pass
class ATenTakeSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::take(Tensor self, Tensor index) -> (Tensor)'''
pass
class ATenAmaxSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::amax(Tensor self, int[1] dim=[], bool keepdim=False) -> (Tensor)'''
pass
class ATenLinalgNormSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::linalg_norm(Tensor self, Scalar? ord=None, int[1]? dim=None, bool keepdim=False, *, int? dtype=None) -> (Tensor)
aten::linalg_norm.ord_str(Tensor self, str ord, int[1]? dim=None, bool keepdim=False, *, int? dtype=None) -> (Tensor)'''
pass
class ATenLinalgMultiDotSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::linalg_multi_dot(Tensor[] tensors) -> (Tensor)'''
pass
class ATenIndexSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::index.Tensor(Tensor self, Tensor?[] indices) -> (Tensor)
aten::index.Tensor_hacked_twin(Tensor self, Tensor[] indices) -> (Tensor)'''
pass
class ATenDetSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::det(Tensor self) -> (Tensor)'''
pass
class ATenFftC2rSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::_fft_c2r(Tensor self, int[] dim, int normalization, int last_dim_size) -> (Tensor)'''
pass
class ATenFftR2cSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::_fft_r2c(Tensor self, int[] dim, int normalization, bool onesided) -> (Tensor)'''
pass
class ATenGridSampler3dSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::grid_sampler_3d(Tensor input, Tensor grid, int interpolation_mode, int padding_mode, bool align_corners) -> (Tensor)'''
pass
class ATenGridSampler2dSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::grid_sampler_2d(Tensor input, Tensor grid, int interpolation_mode, int padding_mode, bool align_corners) -> (Tensor)'''
pass
class ATenSspaddmmSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::sspaddmm(Tensor self, Tensor mat1, Tensor mat2, *, Scalar beta=1, Scalar alpha=1) -> (Tensor)'''
pass
class ATenClampSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::clamp(Tensor self, Scalar? min=None, Scalar? max=None) -> (Tensor)
aten::clamp.Tensor(Tensor self, Tensor? min=None, Tensor? max=None) -> (Tensor)'''
pass
class ATenGcdSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::gcd(Tensor self, Tensor other) -> (Tensor)'''
pass
class ATenExp2Schema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::exp2(Tensor self) -> (Tensor)'''
pass
class ATenAtanSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::atan(Tensor self) -> (Tensor)'''
pass
class ATenCountNonzeroSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::count_nonzero.dim_IntList(Tensor self, int[] dim) -> (Tensor)
aten::count_nonzero(Tensor self, int? dim=None) -> (Tensor)'''
pass
class ATenPolarSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::polar(Tensor abs, Tensor angle) -> (Tensor)'''
pass
class ATenComplexSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::complex(Tensor real, Tensor imag) -> (Tensor)'''
pass
class ATenCopysignSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::copysign.Tensor(Tensor self, Tensor other) -> (Tensor)
aten::copysign.Scalar(Tensor self, Scalar other) -> (Tensor)'''
pass
class ATenBincountSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::bincount(Tensor self, Tensor? weights=None, int minlength=0) -> (Tensor)'''
pass
class ATenUnique2Schema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::_unique2(Tensor self, bool sorted=True, bool return_inverse=False, bool return_counts=False) -> (Tensor, Tensor, Tensor)'''
pass
class ATenBatchNormBackwardElemtSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::batch_norm_backward_elemt(Tensor grad_out, Tensor input, Tensor mean, Tensor invstd, Tensor? weight, Tensor mean_dy, Tensor mean_dy_xmu, Tensor count) -> (Tensor)'''
pass
class ATenArgminSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::argmin(Tensor self, int? dim=None, bool keepdim=False) -> (Tensor)'''
pass
class ATenBatchNormBackwardReduceSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::batch_norm_backward_reduce(Tensor grad_out, Tensor input, Tensor mean, Tensor invstd, Tensor? weight, bool input_g, bool weight_g, bool bias_g) -> (Tensor, Tensor, Tensor, Tensor)'''
pass
class ATenArgmaxSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::argmax(Tensor self, int? dim=None, bool keepdim=False) -> (Tensor)'''
pass
class ATenAsinhSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::asinh(Tensor self) -> (Tensor)'''
pass
class ATenColumnStackSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::column_stack(Tensor[] tensors) -> (Tensor)'''
pass
class ATenNllLossNdSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::nll_loss_nd(Tensor self, Tensor target, Tensor? weight=None, int reduction=1, int ignore_index=-100) -> (Tensor)'''
pass
class ATenFftFftSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::fft_fft(Tensor self, int? n=None, int dim=-1, str? norm=None) -> (Tensor)'''
pass
class ATenFixSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::fix(Tensor self) -> (Tensor)'''
pass
class ATenAsinSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::asin(Tensor self) -> (Tensor)'''
pass
class ATenUpsampleBilinearSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::__upsample_bilinear(Tensor input, int? size=None, int? scale_factor=None) -> (Tensor)
aten::__upsample_bilinear.size_list(Tensor input, int[]? size=None, int? scale_factor=None) -> (Tensor)
aten::__upsample_bilinear.scale_list(Tensor input, int? size=None, int[]? scale_factor=None) -> (Tensor)
aten::__upsample_bilinear.size_list_scale_list(Tensor input, int[]? size=None, int[]? scale_factor=None) -> (Tensor)'''
pass
class ATenBatchNormGatherStatsWithCountsSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::batch_norm_gather_stats_with_counts(Tensor input, Tensor mean, Tensor invstd, Tensor? running_mean, Tensor? running_var, float momentum, float eps, Tensor counts) -> (Tensor, Tensor)'''
pass
class ATenAcosSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::acos(Tensor self) -> (Tensor)'''
pass
class ATenSincSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::sinc(Tensor self) -> (Tensor)'''
pass
class ATenSgnSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::sgn(Tensor self) -> (Tensor)'''
pass
class ATenSiluSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::silu(Tensor self) -> (Tensor)'''
pass
class ATenRemainderSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::remainder.Scalar(Tensor self, Scalar other) -> (Tensor)
aten::remainder.Tensor(Tensor self, Tensor other) -> (Tensor)'''
pass
class ATenOrmqrSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::ormqr(Tensor self, Tensor input2, Tensor input3, bool left=True, bool transpose=False) -> (Tensor)'''
pass
class ATenNonzeroSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::nonzero(Tensor self) -> (Tensor)'''
pass
class ATenBitwiseXorSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::bitwise_xor.Tensor(Tensor self, Tensor other) -> (Tensor)
aten::bitwise_xor.Scalar(Tensor self, Scalar other) -> (Tensor)'''
pass
class ATenBitwiseOrSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::bitwise_or.Tensor(Tensor self, Tensor other) -> (Tensor)
aten::bitwise_or.Scalar(Tensor self, Scalar other) -> (Tensor)'''
pass
class ATenBitwiseAndSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::bitwise_and.Tensor(Tensor self, Tensor other) -> (Tensor)
aten::bitwise_and.Scalar(Tensor self, Scalar other) -> (Tensor)'''
pass
class ATenNativeBatchNormSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::native_batch_norm(Tensor input, Tensor? weight, Tensor? bias, Tensor? running_mean, Tensor? running_var, bool training, float momentum, float eps) -> (Tensor, Tensor, Tensor)
aten::native_batch_norm.out(Tensor input, Tensor? weight, Tensor? bias, Tensor? running_mean, Tensor? running_var, bool training, float momentum, float eps, *, Tensor(a!) out, Tensor(b!) save_mean, Tensor(c!) save_invstd) -> (Tensor(a!), Tensor(b!), Tensor(c!))'''
pass
class ATenNarrowCopySchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::narrow_copy(Tensor self, int dim, int start, int length) -> (Tensor)'''
pass
class ATenNanToNumSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::nan_to_num(Tensor self, float? nan=None, float? posinf=None, float? neginf=None) -> (Tensor)'''
pass
class ATenDataSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::data(Tensor self) -> (Tensor)'''
pass
class ATenNegSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::neg(Tensor self) -> (Tensor)'''
pass
class ATenZerosLikeSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::zeros_like(Tensor self, *, int? dtype=None, int? layout=None, Device? device=None, bool? pin_memory=None, int? memory_format=None) -> (Tensor)'''
pass
class ATenVarSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::var(Tensor self, bool unbiased=True) -> (Tensor)
aten::var.dim(Tensor self, int[1] dim, bool unbiased=True, bool keepdim=False) -> (Tensor)
aten::var.names_dim(Tensor self, str[1] dim, bool unbiased=True, bool keepdim=False) -> (Tensor)
aten::var.correction(Tensor self, int[1]? dim, *, int? correction, bool keepdim=False) -> (Tensor)
aten::var.correction_names(Tensor self, str[1] dim, *, int? correction, bool keepdim=False) -> (Tensor)'''
pass
class ATenGerSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::ger(Tensor self, Tensor vec2) -> (Tensor)'''
pass
class ATenUnsafeSplitWithSizesSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::unsafe_split_with_sizes(Tensor self, int[] split_sizes, int dim=0) -> (Tensor[])'''
pass
class ATenUnsafeSplitSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::unsafe_split.Tensor(Tensor self, int split_size, int dim=0) -> (Tensor[])'''
pass
class ATenUnflattenSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::unflatten.Dimname(Tensor(a) self, str dim, int[] sizes, str[] names) -> (Tensor(a))
aten::unflatten.int(Tensor(a) self, int dim, int[] sizes, str[]? names=None) -> (Tensor(a))'''
pass
class ATenVanderSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::vander(Tensor x, int? N=None, bool increasing=False) -> (Tensor)'''
pass
class ATenViewAsSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::view_as(Tensor(a) self, Tensor other) -> (Tensor(a))'''
pass
class ATenDivideSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::divide.Tensor(Tensor self, Tensor other) -> (Tensor)
aten::divide.Scalar(Tensor self, Scalar other) -> (Tensor)
aten::divide.Tensor_mode(Tensor self, Tensor other, *, str? rounding_mode) -> (Tensor)
aten::divide.Scalar_mode(Tensor self, Scalar other, *, str? rounding_mode) -> (Tensor)'''
pass
class ATenRollSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::roll(Tensor self, int[1] shifts, int[1] dims=[]) -> (Tensor)'''
pass
class ATenLinalgDetSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::linalg_det(Tensor self) -> (Tensor)'''
pass
class ATenFftC2cSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::_fft_c2c(Tensor self, int[] dim, int normalization, bool forward) -> (Tensor)'''
pass
class ATenChainMatmulSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::chain_matmul(Tensor[] matrices) -> (Tensor)'''
pass
class ATenArctanhSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::arctanh(Tensor self) -> (Tensor)'''
pass
class ATenNativeGroupNormSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::native_group_norm(Tensor input, Tensor? weight, Tensor? bias, int N, int C, int HxW, int group, float eps) -> (Tensor, Tensor, Tensor)'''
pass
class ATenSquareSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::square(Tensor self) -> (Tensor)'''
pass
class ATenMinSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::min(Tensor self) -> (Tensor)
aten::min.dim(Tensor self, int dim, bool keepdim=False) -> (Tensor values, Tensor indices)
aten::min.dim_min(Tensor self, int dim, bool keepdim=False, *, Tensor(a!) min, Tensor(b!) min_indices) -> (Tensor(a!) values, Tensor(b!) indices)
aten::min.names_dim(Tensor self, str dim, bool keepdim=False) -> (Tensor values, Tensor indices)
aten::min.names_dim_min(Tensor self, str dim, bool keepdim=False, *, Tensor(a!) min, Tensor(b!) min_indices) -> (Tensor(a!) values, Tensor(b!) indices)
aten::min.other(Tensor self, Tensor other) -> (Tensor)'''
pass
class ATenNanmedianSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::nanmedian(Tensor self) -> (Tensor)
aten::nanmedian.dim(Tensor self, int dim, bool keepdim=False) -> (Tensor values, Tensor indices)
aten::nanmedian.dim_values(Tensor self, int dim, bool keepdim=False, *, Tensor(a!) values, Tensor(b!) indices) -> (Tensor(a!) values, Tensor(b!) indices)
aten::nanmedian.names_dim(Tensor self, str dim, bool keepdim=False) -> (Tensor values, Tensor indices)
aten::nanmedian.names_dim_values(Tensor self, str dim, bool keepdim=False, *, Tensor(a!) values, Tensor(b!) indices) -> (Tensor(a!) values, Tensor(b!) indices)'''
pass
class ATenMeanSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::mean(Tensor self, *, int? dtype=None) -> (Tensor)
aten::mean.dim(Tensor self, int[1] dim, bool keepdim=False
| 1 |
0084b5137df5a7e6e6f04e0ca2ae84d6185cadfb
|
Python
|
, *, int? dtype=None) -> (Tensor)
aten::mean.names_dim(Tensor self, str[1] dim, bool keepdim=False, *, int? dtype=None) -> (Tensor)'''
pass
class ATenPowSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::pow.Tensor_Tensor(Tensor self, Tensor exponent) -> (Tensor)
aten::pow.Tensor_Scalar(Tensor self, Scalar exponent) -> (Tensor)
aten::pow.Scalar(Scalar self, Tensor exponent) -> (Tensor)'''
pass
class ATenPolygammaSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::polygamma(int n, Tensor self) -> (Tensor)'''
pass
class ATenOnesLikeSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::ones_like(Tensor self, *, int? dtype=None, int? layout=None, Device? device=None, bool? pin_memory=None, int? memory_format=None) -> (Tensor)'''
pass
class ATenNextafterSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::nextafter(Tensor self, Tensor other) -> (Tensor)'''
pass
class ATenRenameSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::rename(Tensor(a) self, str[]? names) -> (Tensor(a))'''
pass
class ATenRefineNamesSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::refine_names(Tensor(a) self, str[] names) -> (Tensor(a))'''
pass
class ATenMedianSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::median(Tensor self) -> (Tensor)
aten::median.dim(Tensor self, int dim, bool keepdim=False) -> (Tensor values, Tensor indices)
aten::median.dim_values(Tensor self, int dim, bool keepdim=False, *, Tensor(a!) values, Tensor(b!) indices) -> (Tensor(a!) values, Tensor(b!) indices)
aten::median.names_dim(Tensor self, str dim, bool keepdim=False) -> (Tensor values, Tensor indices)
aten::median.names_dim_values(Tensor self, str dim, bool keepdim=False, *, Tensor(a!) values, Tensor(b!) indices) -> (Tensor(a!) values, Tensor(b!) indices)'''
pass
class ATenMaxPool3dWithIndicesSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::max_pool3d_with_indices(Tensor self, int[3] kernel_size, int[3] stride=[], int[3] padding=[0, 0, 0], int[3] dilation=[1, 1, 1], bool ceil_mode=False) -> (Tensor, Tensor)
aten::max_pool3d_with_indices.out(Tensor self, int[3] kernel_size, int[3] stride=[], int[3] padding=[0, 0, 0], int[3] dilation=[1, 1, 1], bool ceil_mode=False, *, Tensor(a!) out, Tensor(b!) indices) -> (Tensor(a!), Tensor(b!))'''
pass
class ATenLogicalXorSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::logical_xor(Tensor self, Tensor other) -> (Tensor)'''
pass
class ATenMaxPool3dSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::max_pool3d(Tensor self, int[3] kernel_size, int[3] stride=[], int[3] padding=[0, 0, 0], int[3] dilation=[1, 1, 1], bool ceil_mode=False) -> (Tensor)'''
pass
class ATenLogicalOrSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::logical_or(Tensor self, Tensor other) -> (Tensor)'''
pass
class ATenMaxPool2dWithIndicesSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::max_pool2d_with_indices(Tensor self, int[2] kernel_size, int[2] stride=[], int[2] padding=[0, 0], int[2] dilation=[1, 1], bool ceil_mode=False) -> (Tensor, Tensor)
aten::max_pool2d_with_indices.out(Tensor self, int[2] kernel_size, int[2] stride=[], int[2] padding=[0, 0], int[2] dilation=[1, 1], bool ceil_mode=False, *, Tensor(a!) out, Tensor(b!) indices) -> (Tensor(a!), Tensor(b!))'''
pass
class ATenLogicalNotSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::logical_not(Tensor self) -> (Tensor)'''
pass
class ATenMaxPool2dSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::max_pool2d(Tensor self, int[2] kernel_size, int[2] stride=[], int[2] padding=[0, 0], int[2] dilation=[1, 1], bool ceil_mode=False) -> (Tensor)'''
pass
class ATenMaxPool1dWithIndicesSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::max_pool1d_with_indices(Tensor self, int[1] kernel_size, int[1] stride=[], int[1] padding=[0], int[1] dilation=[1], bool ceil_mode=False) -> (Tensor, Tensor)'''
pass
class ATenLogicalAndSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::logical_and(Tensor self, Tensor other) -> (Tensor)'''
pass
class ATenMaxPool1dSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::max_pool1d(Tensor self, int[1] kernel_size, int[1] stride=[], int[1] padding=[0], int[1] dilation=[1], bool ceil_mode=False) -> (Tensor)'''
pass
class ATenLogaddexp2Schema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::logaddexp2(Tensor self, Tensor other) -> (Tensor)'''
pass
class ATenMaxSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::max(Tensor self) -> (Tensor)
aten::max.dim(Tensor self, int dim, bool keepdim=False) -> (Tensor values, Tensor indices)
aten::max.dim_max(Tensor self, int dim, bool keepdim=False, *, Tensor(a!) max, Tensor(b!) max_values) -> (Tensor(a!) values, Tensor(b!) indices)
aten::max.names_dim(Tensor self, str dim, bool keepdim=False) -> (Tensor values, Tensor indices)
aten::max.names_dim_max(Tensor self, str dim, bool keepdim=False, *, Tensor(a!) max, Tensor(b!) max_values) -> (Tensor(a!) values, Tensor(b!) indices)
aten::max.other(Tensor self, Tensor other) -> (Tensor)'''
pass
class ATenLogaddexpSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::logaddexp(Tensor self, Tensor other) -> (Tensor)'''
pass
class ATenMatrixExpSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::matrix_exp(Tensor self) -> (Tensor)'''
pass
class ATenMatmulSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::matmul(Tensor self, Tensor other) -> (Tensor)'''
pass
class ATenMaskedSelectSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::masked_select(Tensor self, Tensor mask) -> (Tensor)'''
pass
class ATenMarginRankingLossSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::margin_ranking_loss(Tensor input1, Tensor input2, Tensor target, float margin=0., int reduction=1) -> (Tensor)'''
pass
class ATenPoissonSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::poisson(Tensor self, Generator? generator=None) -> (Tensor)'''
pass
class ATenIndexFillSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::index_fill.Dimname_Scalar(Tensor self, str dim, Tensor index, Scalar value) -> (Tensor)
aten::index_fill.Dimname_Tensor(Tensor self, str dim, Tensor index, Tensor value) -> (Tensor)
aten::index_fill.int_Scalar(Tensor self, int dim, Tensor index, Scalar value) -> (Tensor)
aten::index_fill.int_Tensor(Tensor self, int dim, Tensor index, Tensor value) -> (Tensor)'''
pass
class ATenIgammacSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::igammac(Tensor self, Tensor other) -> (Tensor)'''
pass
class ATenIgammaSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::igamma(Tensor self, Tensor other) -> (Tensor)'''
pass
class ATenI0Schema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::i0(Tensor self) -> (Tensor)'''
pass
class ATenMaskedFillSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::masked_fill.Scalar(Tensor self, Tensor mask, Scalar value) -> (Tensor)
aten::masked_fill.Tensor(Tensor self, Tensor mask, Tensor value) -> (Tensor)'''
pass
class ATenLstsqSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::lstsq.X(Tensor self, Tensor A, *, Tensor(a!) X, Tensor(b!) qr) -> (Tensor(a!) solution, Tensor(b!) QR)
aten::lstsq(Tensor self, Tensor A) -> (Tensor solution, Tensor QR)'''
pass
class ATenHypotSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::hypot(Tensor self, Tensor other) -> (Tensor)'''
pass
class ATenFullLikeSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::full_like(Tensor self, Scalar fill_value, *, int? dtype=None, int? layout=None, Device? device=None, bool? pin_memory=None, int? memory_format=None) -> (Tensor)'''
pass
class ATenFloorDivideSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::floor_divide(Tensor self, Tensor other) -> (Tensor)
aten::floor_divide.Scalar(Tensor self, Scalar other) -> (Tensor)'''
pass
class ATenFlattenSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::flatten.DimnameList(Tensor(a) self, str[] dims, str out_dim) -> (Tensor(a))
aten::flatten.named_out_dim(Tensor(a) self, int start_dim, int end_dim, str out_dim) -> (Tensor(a))
aten::flatten.using_ints(Tensor(a) self, int start_dim=0, int end_dim=-1) -> (Tensor(a))
aten::flatten.using_names(Tensor(a) self, str start_dim, str end_dim, str out_dim) -> (Tensor(a))'''
pass
class ATenLogsumexpSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::logsumexp(Tensor self, int[1] dim, bool keepdim=False) -> (Tensor)
aten::logsumexp.names(Tensor self, str[1] dim, bool keepdim=False) -> (Tensor)'''
pass
class ATenLogcumsumexpSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::logcumsumexp(Tensor self, int dim) -> (Tensor)
aten::logcumsumexp.dimname(Tensor self, str dim) -> (Tensor)
aten::_logcumsumexp(Tensor self, int dim) -> (Tensor)'''
pass
class ATenLogSoftmaxBackwardDataSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::_log_softmax_backward_data(Tensor grad_output, Tensor output, int dim, Tensor self) -> (Tensor)'''
pass
class ATenLessEqualSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::less_equal.Scalar(Tensor self, Scalar other) -> (Tensor)
aten::less_equal.Tensor(Tensor self, Tensor other) -> (Tensor)'''
pass
class ATenThresholdSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::threshold(Tensor self, Scalar threshold, Scalar value) -> (Tensor)'''
pass
class ATenEmptyLikeSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::empty_like(Tensor self, *, int? dtype=None, int? layout=None, Device? device=None, bool? pin_memory=None, int? memory_format=None) -> (Tensor)'''
pass
class ATenProdSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::prod(Tensor self, *, int? dtype=None) -> (Tensor)
aten::prod.dim_int(Tensor self, int dim, bool keepdim=False, *, int? dtype=None) -> (Tensor)
aten::prod.dim_Dimname(Tensor self, str dim, bool keepdim=False, *, int? dtype=None) -> (Tensor)'''
pass
class ATenDropoutSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::dropout(Tensor input, float p, bool train) -> (Tensor)'''
pass
class ATenDetachSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::detach(Tensor(a) self) -> (Tensor(a))'''
pass
class ATenChannelShuffleSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::channel_shuffle(Tensor self, int groups) -> (Tensor)'''
pass
class ATenTensorSplitSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::tensor_split.sections(Tensor(a) self, int sections, int dim=0) -> (Tensor[])
aten::tensor_split.indices(Tensor(a) self, int[] indices, int dim=0) -> (Tensor[])
aten::tensor_split.tensor_indices_or_sections(Tensor(a) self, Tensor tensor_indices_or_sections, int dim=0) -> (Tensor[])'''
pass
class ATenDeg2radSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::deg2rad(Tensor self) -> (Tensor)'''
pass
class ATenCumminSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::cummin(Tensor self, int dim) -> (Tensor values, Tensor indices)
aten::cummin.dimname(Tensor self, str dim) -> (Tensor values, Tensor indices)
aten::cummin.out(Tensor self, int dim, *, Tensor(a!) values, Tensor(b!) indices) -> (Tensor(a!) values, Tensor(b!) indices)'''
pass
class ATenLog2Schema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::log2(Tensor self) -> (Tensor)'''
pass
class ATenLog1pSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::log1p(Tensor self) -> (Tensor)'''
pass
class ATenLog10Schema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::log10(Tensor self) -> (Tensor)'''
pass
class ATenBitwiseNotSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::bitwise_not(Tensor self) -> (Tensor)'''
pass
class ATenVarMeanSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::var_mean(Tensor self, bool unbiased=True) -> (Tensor, Tensor)
aten::var_mean.dim(Tensor self, int[1] dim, bool unbiased=True, bool keepdim=False) -> (Tensor, Tensor)
aten::var_mean.names_dim(Tensor self, str[1] dim, bool unbiased=True, bool keepdim=False) -> (Tensor, Tensor)
aten::var_mean.correction(Tensor self, int[1]? dim, *, int? correction, bool keepdim=False) -> (Tensor, Tensor)
aten::var_mean.correction_names(Tensor self, str[1] dim, *, int? correction, bool keepdim=False) -> (Tensor, Tensor)'''
pass
class ATenArctanSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::arctan(Tensor self) -> (Tensor)'''
pass
class ATenVdotSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::vdot(Tensor self, Tensor other) -> (Tensor)'''
pass
class ATenStdMeanSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::std_mean(Tensor self, bool unbiased=True) -> (Tensor, Tensor)
aten::std_mean.dim(Tensor self, int[1] dim, bool unbiased=True, bool keepdim=False) -> (Tensor, Tensor)
aten::std_mean.names_dim(Tensor self, str[1] dim, bool unbiased=True, bool keepdim=False) -> (Tensor, Tensor)
aten::std_mean.correction(Tensor self, int[1]? dim, *, int? correction, bool keepdim=False) -> (Tensor, Tensor)
aten::std_mean.correction_names(Tensor self, str[1] dim, *, int? correction, bool keepdim=False) -> (Tensor, Tensor)'''
pass
class ATenAtanhSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::atanh(Tensor self) -> (Tensor)'''
pass
class ATenBatchNormGatherStatsSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::batch_norm_gather_stats(Tensor input, Tensor mean, Tensor invstd, Tensor? running_mean, Tensor? running_var, float momentum, float eps, int count) -> (Tensor, Tensor)'''
pass
class ATenAnySchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::any(Tensor self) -> (Tensor)
aten::any.dim(Tensor self, int dim, bool keepdim=False) -> (Tensor)
aten::any.dimname(Tensor self, str dim, bool keepdim=False) -> (Tensor)'''
pass
class ATenAlignAsSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::align_as(Tensor self, Tensor other) -> (Tensor)'''
pass
class ATenAliasSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::alias(Tensor(a) self) -> (Tensor(a))'''
pass
class ATenUniqueDimSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::unique_dim(Tensor self, int dim, bool sorted=True, bool return_inverse=False, bool return_counts=False) -> (Tensor, Tensor, Tensor)'''
pass
class ATenLcmSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::lcm(Tensor self, Tensor other) -> (Tensor)'''
pass
class ATenAddReluSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::_add_relu.Tensor(Tensor self, Tensor other, *, Scalar alpha=1) -> (Tensor)'''
pass
class ATenLayerNormSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::layer_norm(Tensor input, int[] normalized_shape, Tensor? weight=None, Tensor? bias=None, float eps=1.0000000000000001e-05, bool cudnn_enable=True) -> (Tensor)'''
pass
class ATenTrapzSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::trapz.x(Tensor y, Tensor x, *, int dim=-1) -> (Tensor)
aten::trapz.dx(Tensor y, *, float dx=1., int dim=-1) -> (Tensor)'''
pass
class ATenFusedDropoutSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::_fused_dropout(Tensor self, float p, Generator? generator=None) -> (Tensor, Tensor)'''
pass
class ATenSortedSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::sorted.Tensor(Tensor[](a) input) -> (Tensor[])'''
pass
class ATenBinaryCrossEntropySchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::binary_cross_entropy(Tensor self, Tensor target, Tensor? weight=None, int reduction=1) -> (Tensor)'''
pass
class ATenScatterAddSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::scatter_add(Tensor self, int dim, Tensor index, Tensor src) -> (Tensor)
aten::scatter_add.dimname(Tensor self, str dim, Tensor index, Tensor src) -> (Tensor)'''
pass
class ATenTensordotSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::tensordot(Tensor self, Tensor other, int[] dims_self, int[] dims_other) -> (Tensor)'''
pass
class ATenTensorToListSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::_tensor_to_list(Tensor self) -> (int[])'''
pass
class ATenCrossSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::cross(Tensor self, Tensor other, int? dim=None) -> (Tensor)'''
pass
class ATenBilinearSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::bilinear(Tensor input1, Tensor input2, Tensor weight, Tensor? bias) -> (Tensor)'''
pass
class ATenCumprodSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::cumprod(Tensor self, int dim, *, int? dtype=None) -> (Tensor)
aten::cumprod.dimname(Tensor self, str dim, *, int? dtype=None) -> (Tensor)
aten::_cumprod(Tensor self, int dim) -> (Tensor)'''
pass
class ATenLogSoftmaxSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::log_softmax.int(Tensor self, int dim, int? dtype=None) -> (Tensor)
aten::log_softmax.Dimname(Tensor self, str dim, *, int? dtype=None) -> (Tensor)
aten::_log_softmax(Tensor self, int dim, bool half_to_float) -> (Tensor)'''
pass
class ATenAcoshSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::acosh(Tensor self) -> (Tensor)'''
pass
class ATenSoftmaxSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::softmax.int(Tensor self, int dim, int? dtype=None) -> (Tensor)
aten::softmax.Dimname(Tensor self, str dim, *, int? dtype=None) -> (Tensor)
aten::_softmax(Tensor self, int dim, bool half_to_float) -> (Tensor)'''
pass
class ATenAtan2Schema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::atan2(Tensor self, Tensor other) -> (Tensor)'''
pass
class ATenRenormSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::renorm(Tensor self, Scalar p, int dim, Scalar maxnorm) -> (Tensor)'''
pass
class ATenCdistSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::cdist(Tensor x1, Tensor x2, float p=2., int? compute_mode=None) -> (Tensor)'''
pass
class ATenPdistSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::pdist(Tensor self, float p=2.) -> (Tensor)'''
pass
class ATenDistSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::dist(Tensor self, Tensor other, Scalar p=2) -> (Tensor)'''
pass
class ATenMultiMarginLossSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::multi_margin_loss(Tensor self, Tensor target, Scalar p=1, Scalar margin=1, Tensor? weight=None, int reduction=1) -> (Tensor)'''
pass
class ATenConv2dSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::conv2d(Tensor input, Tensor weight, Tensor? bias=None, int[2] stride=[1, 1], int[2] padding=[0, 0], int[2] dilation=[1, 1], int groups=1) -> (Tensor)
aten::conv2d.padding(Tensor input, Tensor weight, Tensor? bias=None, int[2] stride=[1, 1], str padding="valid", int[2] dilation=[1, 1], int groups=1) -> (Tensor)'''
pass
class ATenSoftMarginLossSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::soft_margin_loss(Tensor self, Tensor target, int reduction=1) -> (Tensor)'''
pass
class ATenMultilabelMarginLossSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::multilabel_margin_loss(Tensor self, Tensor target, int reduction=1) -> (Tensor)'''
pass
class ATenKthvalueSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::kthvalue(Tensor self, int k, int dim=-1, bool keepdim=False) -> (Tensor values, Tensor indices)
aten::kthvalue.dimname(Tensor self, int k, str dim, bool keepdim=False) -> (Tensor values, Tensor indices)
aten::kthvalue.values(Tensor self, int k, int dim=-1, bool keepdim=False, *, Tensor(a!) values, Tensor(b!) indices) -> (Tensor(a!) values, Tensor(b!) indices)'''
pass
class ATenHuberLossSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::huber_loss(Tensor self, Tensor target, int reduction=1, float delta=1.) -> (Tensor)'''
pass
class ATenNllLossSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::nll_loss(Tensor self, Tensor target, Tensor? weight=None, int reduction=1, int ignore_index=-100) -> (Tensor)'''
pass
class ATenPoissonNllLossSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::poisson_nll_loss(Tensor input, Tensor target, bool log_input, bool full, float eps, int reduction) -> (Tensor)'''
pass
class ATenSortSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::sort.values(Tensor self, int dim=-1, bool descending=False, *, Tensor(a!) values, Tensor(b!) indices) -> (Tensor(a!) values, Tensor(b!) indices)
aten::sort(Tensor self, int dim=-1, bool descending=False) -> (Tensor values, Tensor indices)
aten::sort.values_stable(Tensor self, *, bool? stable, int dim=-1, bool descending=False, Tensor(a!) values, Tensor(b!) indices) -> (Tensor(a!) values, Tensor(b!) indices)
aten::sort.stable(Tensor self, *, bool? stable, int dim=-1, bool descending=False) -> (Tensor values, Tensor indices)
aten::sort.dimname_values(Tensor self, str dim, bool descending=False, *, Tensor(a!) values, Tensor(b!) indices) -> (Tensor(a!) values, Tensor(b!) indices)
aten::sort.dimname(Tensor self, str dim, bool descending=False) -> (Tensor values, Tensor indices)
aten::sort.dimname_values_stable(Tensor self, *, bool? stable, str dim, bool descending=False, Tensor(a!) values, Tensor(b!) indices) -> (Tensor(a!) values, Tensor(b!) indices)
aten::sort.dimname_stable(Tensor self, *, bool? stable, str dim, bool descending=False) -> (Tensor values, Tensor indices)'''
pass
class ATenArcsinhSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::arcsinh(Tensor self) -> (Tensor)'''
pass
class ATenCosineSimilaritySchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::cosine_similarity(Tensor x1, Tensor x2, int dim=1, float eps=1e-08) -> (Tensor)'''
pass
class ATenGroupNormSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::group_norm(Tensor input, int num_groups, Tensor? weight=None, Tensor? bias=None, float eps=1.0000000000000001e-05, bool cudnn_enabled=True) -> (Tensor)'''
pass
class ATenGeluSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::gelu(Tensor self) -> (Tensor)'''
pass
class ATenCosineEmbeddingLossSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::cosine_embedding_loss(Tensor input1, Tensor input2, Tensor target, float margin=0., int reduction=1) -> (Tensor)'''
pass
class ATenArcsinSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::arcsin(Tensor self) -> (Tensor)'''
pass
class ATenSoftplusSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::softplus(Tensor self, Scalar beta=1, Scalar threshold=20) -> (Tensor)'''
pass
class ATenIndexSelectSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::index_select(Tensor self, int dim, Tensor index) -> (Tensor)
aten::index_select.dimname(Tensor self, str dim, Tensor index) -> (Tensor)'''
pass
class ATenErfinvSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::erfinv(Tensor self) -> (Tensor)'''
pass
class ATenLinalgTensorsolveSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::linalg_tensorsolve(Tensor self, Tensor other, int[]? dims=None) -> (Tensor)'''
pass
class ATenThnnFusedGruCellSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::_thnn_fused_gru_cell(Tensor input_gates, Tensor hidden_gates, Tensor hx, Tensor? input_bias=None, Tensor? hidden_bias=None) -> (Tensor, Tensor)'''
pass
class ATenNormSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::norm.Scalar(Tensor self, Scalar p=2) -> (Tensor)
aten::norm.ScalarOpt_dim(Tensor self, Scalar? p, int[1] dim, bool keepdim=False) -> (Tensor)
aten::norm.names_ScalarOpt_dim(Tensor self, Scalar? p, str[1] dim, bool keepdim=False) -> (Tensor)
aten::norm.ScalarOpt_dtype(Tensor self, Scalar? p, *, int dtype) -> (Tensor)
aten::norm.ScalarOpt_dim_dtype(Tensor self, Scalar? p, int[1] dim, bool keepdim, *, int dtype) -> (Tensor)
aten::norm.names_ScalarOpt_dim_dtype(Tensor self, Scalar? p, str[1] dim, bool keepdim, *, int dtype) -> (Tensor)'''
pass
class ATenThnnFusedLstmCellSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::_thnn_fused_lstm_cell(Tensor input_gates, Tensor hidden_gates, Tensor cx, Tensor? input_bias=None, Tensor? hidden_bias=None) -> (Tensor, Tensor, Tensor)'''
pass
class ATenRandLikeSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::rand_like(Tensor self, *, int? dtype=None, int? layout=None, Device? device=None, bool? pin_memory=None, int? memory_format=None) -> (Tensor)'''
pass
class ATenAddbmmSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::addbmm(Tensor self, Tensor batch1, Tensor batch2, *, Scalar beta=1, Scalar alpha=1) -> (Tensor)'''
pass
class ATenAlignToSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::align_to(Tensor(a) self, str[] names) -> (Tensor(a))
aten::align_to.ellipsis_idx(Tensor(a) self, str[] order, int ellipsis_idx) -> (Tensor(a))'''
pass
class ATenLinearSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::linear(Tensor input, Tensor weight, Tensor? bias=None) -> (Tensor)'''
pass
class ATenSqrtSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::sqrt(Tensor self) -> (Tensor)'''
pass
class ATenConvolutionSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::convolution(Tensor input, Tensor weight, Tensor? bias, int[] stride, int[] padding, int[] dilation, bool transposed, int[] output_padding, int groups) -> (Tensor)
aten::_convolution.deprecated(Tensor input, Tensor weight, Tensor? bias, int[] stride, int[] padding, int[] dilation, bool transposed, int[] output_padding, int groups, bool benchmark, bool deterministic, bool cudnn_enabled) -> (Tensor)
aten::_convolution(Tensor input, Tensor weight, Tensor? bias, int[] stride, int[] padding, int[] dilation, bool transposed, int[] output_padding, int groups, bool benchmark, bool deterministic, bool cudnn_enabled, bool allow_tf32) -> (Tensor)'''
pass
class ATenConvTranspose3dSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::conv_transpose3d.input(Tensor input, Tensor weight, Tensor? bias=None, int[3] stride=[1, 1, 1], int[3] padding=[0, 0, 0], int[3] output_padding=[0, 0, 0], int groups=1, int[3] dilation=[1, 1, 1]) -> (Tensor)'''
pass
class ATenXlogySchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::xlogy.Tensor(Tensor self, Tensor other) -> (Tensor)
aten::xlogy.Scalar_Self(Scalar self, Tensor other) -> (Tensor)
aten::xlogy.Scalar_Other(Tensor self, Scalar other) -> (Tensor)'''
pass
class ATenLstmCellSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::lstm_cell(Tensor input, Tensor[] hx, Tensor w_ih, Tensor w_hh, Tensor? b_ih=None, Tensor? b_hh=None) -> (Tensor, Tensor)'''
pass
class ATenConvTranspose1dSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::conv_transpose1d(Tensor input, Tensor weight, Tensor? bias=None, int[1] stride=[1], int[1] padding=[0], int[1] output_padding=[0], int groups=1, int[1] dilation=[1]) -> (Tensor)'''
pass
class ATenSoftmaxBackwardDataSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::_softmax_backward_data(Tensor grad_output, Tensor output, int dim, Tensor self) -> (Tensor)'''
pass
class ATenArccoshSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::arccosh(Tensor self) -> (Tensor)'''
pass
class ATenEmptyPerChannelAffineQuantizedSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::_empty_per_channel_affine_quantized(int[] size, *, Tensor scales, Tensor zero_points, int axis, int? dtype=None, int? layout=None, Device? device=None, bool? pin_memory=None, int? memory_format=0) -> (Tensor)'''
pass
class ATenConvTbcSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::conv_tbc(Tensor self, Tensor weight, Tensor bias, int pad=0) -> (Tensor)'''
pass
class ATenConv3dSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::conv3d(Tensor input, Tensor weight, Tensor? bias=None, int[3] stride=[1, 1, 1], int[3] padding=[0, 0, 0], int[3] dilation=[1, 1, 1], int groups=1) -> (Tensor)
aten::conv3d.padding(Tensor input, Tensor weight, Tensor? bias=None, int[3] stride=[1, 1, 1], str padding="valid", int[3] dilation=[1, 1, 1], int groups=1) -> (Tensor)'''
pass
class ATenSmoothL1LossSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::smooth_l1_loss(Tensor self, Tensor target, int reduction=1, float beta=1.) -> (Tensor)'''
pass
class ATenConv1dSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::conv1d(Tensor input, Tensor weight, Tensor? bias=None, int[1] stride=[1], int[1] padding=[0], int[1] dilation=[1], int groups=1) -> (Tensor)
aten::conv1d.padding(Tensor input, Tensor weight, Tensor? bias=None, int[1] stride=[1], str padding="valid", int[1] dilation=[1], int groups=1) -> (Tensor)'''
pass
class ATenL1LossSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::l1_loss(Tensor self, Tensor target, int reduction=1) -> (Tensor)'''
pass
class ATenNativeLayerNormSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::native_layer_norm(Tensor input, int[] normalized_shape, Tensor? weight, Tensor? bias, float eps) -> (Tensor, Tensor, Tensor)'''
pass
class ATenKlDivSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::kl_div(Tensor self, Tensor target, int reduction=1, *, bool log_target=False) -> (Tensor)'''
pass
class ATenAddrSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::addr(Tensor self, Tensor vec1, Tensor vec2, *, Scalar beta=1, Scalar alpha=1) -> (Tensor)'''
pass
class ATenQPerChannelZeroPointsSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::q_per_channel_zero_points(Tensor self) -> (Tensor)'''
pass
class ATenAddcmulSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::addcmul(Tensor self, Tensor tensor1, Tensor tensor2, *, Scalar value=1) -> (Tensor)'''
pass
class ATenRandintLikeSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::randint_like(Tensor self, int high, *, int? dtype=None, int? layout=None, Device? device=None, bool? pin_memory=None, int? memory_format=None) -> (Tensor)
aten::randint_like.low_dtype(Tensor self, int low, int high, *, int? dtype=None, int? layout=None, Device? device=None, bool? pin_memory=None, int? memory_format=None) -> (Tensor)'''
pass
class ATenAddmmSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::addmm(Tensor self, Tensor mat1, Tensor mat2, *, Scalar beta=1, Scalar alpha=1) -> (Tensor)'''
pass
class ATenNormalSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::normal.Tensor_float(Tensor mean, float std=1., *, Generator? generator=None) -> (Tensor)
aten::normal.float_Tensor(float mean, Tensor std, *, Generator? generator=None) -> (Tensor)
aten::normal.Tensor_Tensor(Tensor mean, Tensor std, *, Generator? generator=None) -> (Tensor)'''
pass
class ATenRnnTanhCellSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::rnn_tanh_cell(Tensor input, Tensor hx, Tensor w_ih, Tensor w_hh, Tensor? b_ih=None, Tensor? b_hh=None) -> (Tensor)'''
pass
class ATenBinaryCrossEntropyWithLogitsSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::binary_cross_entropy_with_logits(Tensor self, Tensor target, Tensor? weight=None, Tensor? pos_weight=None, int reduction=1) -> (Tensor)'''
pass
class ATenRnnReluCellSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::rnn_relu_cell(Tensor input, Tensor hx, Tensor w_ih, Tensor w_hh, Tensor? b_ih=None, Tensor? b_hh=None) -> (Tensor)'''
pass
class ATenMseLossSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::mse_loss(Tensor self, Tensor target, int reduction=1) -> (Tensor)'''
pass
class ATenQuantizePerChannelSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::quantize_per_channel(Tensor self, Tensor scales, Tensor zero_points, int axis, int dtype) -> (Tensor)'''
pass
class ATenInterpolateSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::__interpolate.scale_list(Tensor input, int? size=None, float[]? scale_factor=None, str mode="nearest", bool? align_corners=None, bool? recompute_scale_factor=None) -> (Tensor)
aten::__interpolate.size_list_scale_list(Tensor input, int[]? size=None, float[]? scale_factor=None, str mode="nearest", bool? align_corners=None, bool? recompute_scale_factor=None) -> (Tensor)
aten::__interpolate(Tensor input, int? size=None, float? scale_factor=None, str mode="nearest", bool? align_corners=None, bool? recompute_scale_factor=None) -> (Tensor)
aten::__interpolate.size_list(Tensor input, int[]? size=None, float? scale_factor=None, str mode="nearest", bool? align_corners=None, bool? recompute_scale_factor=None) -> (Tensor)'''
pass
class ATenExpandSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::expand(Tensor(a) self, int[] size, *, bool implicit=False) -> (Tensor(a))'''
pass
class ATenSvdHelperSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::_svd_helper(Tensor self, bool some, bool compute_uv) -> (Tensor U, Tensor S, Tensor V)'''
pass
class ATenTraceSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::trace(Tensor self) -> (Tensor)'''
pass
class ATenTripletMarginLossSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::triplet_margin_loss(Tensor anchor, Tensor positive, Tensor negative, float margin=1., float p=2., float eps=9.9999999999999995e-07, bool swap=False, int reduction=1) -> (Tensor)'''
pass
class ATenNeSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::ne.Tensor(Tensor self, Tensor other) -> (Tensor)
aten::ne.Scalar(Tensor self, Scalar other) -> (Tensor)'''
pass
class ATenEqSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::eq.Tensor(Tensor self, Tensor other) -> (Tensor)
aten::eq.Scalar(Tensor self, Scalar other) -> (Tensor)'''
pass
class ATenNewZerosSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::new_zeros(Tensor self, int[] size, *, int? dtype=None, int? layout=None, Device? device=None, bool? pin_memory=None) -> (Tensor)'''
pass
class ATenNewEmptyStridedSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::new_empty_strided(Tensor self, int[] size, int[] stride, *, int? dtype=None, int? layout=None, Device? device=None, bool? pin_memory=None) -> (Tensor)'''
pass
class ATenNewEmptySchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::new_empty(Tensor self, int[] size, *, int? dtype=None, int? layout=None, Device? device=None, bool? pin_memory=None) -> (Tensor)'''
pass
class ATenStackSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::stack(Tensor[] tensors, int dim=0) -> (Tensor)
aten::_stack(Tensor[] tensors, int dim=0) -> (Tensor)'''
pass
class ATenConvTranspose2dSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::conv_transpose2d.input(Tensor input, Tensor weight, Tensor? bias=None, int[2] stride=[1, 1], int[2] padding=[0, 0], int[2] output_padding=[0, 0], int groups=1, int[2] dilation=[1, 1]) -> (Tensor)'''
pass
class ATenCatSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::cat(Tensor[] tensors, int dim=0) -> (Tensor)
aten::cat.names(Tensor[] tensors, str dim) -> (Tensor)
aten::_cat(Tensor[] tensors, int dim=0) -> (Tensor)'''
pass
class ATenMmSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::mm(Tensor self, Tensor mat2) -> (Tensor)'''
pass
class ATenBmmSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::bmm(Tensor self, Tensor mat2) -> (Tensor)
aten::_bmm(Tensor self, Tensor mat2, *, bool deterministic=False) -> (Tensor)'''
pass
class ATenSampleDirichletSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::_sample_dirichlet(Tensor self, Generator? generator=None) -> (Tensor)'''
pass
class ATenDotSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::dot(Tensor self, Tensor tensor) -> (Tensor)'''
pass
class ATenViewAsComplexSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::view_as_complex(Tensor(a) self) -> (Tensor(a))'''
pass
class ATenRelu6Schema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::relu6(Tensor self) -> (Tensor)'''
pass
class ATenPreluSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::prelu(Tensor self, Tensor weight) -> (Tensor)'''
pass
class ATenViewAsRealSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::view_as_real(Tensor(a) self) -> (Tensor(a))'''
pass
class ATenPositiveSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::positive(Tensor(a) self) -> (Tensor(a))'''
pass
class ATenImagSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::imag(Tensor(a) self) -> (Tensor(a))'''
pass
class ATenMultinomialSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::multinomial(Tensor self, int num_samples, bool replacement=False, *, Generator? generator=None) -> (Tensor)'''
pass
class ATenExpm1Schema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::expm1(Tensor self) -> (Tensor)'''
pass
class ATenToSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::to.device(Tensor self, Device device, int dtype, bool non_blocking=False, bool copy=False, int? memory_format=None) -> (Tensor)
aten::to.dtype(Tensor self, int dtype, bool non_blocking=False, bool copy=False, int? memory_format=None) -> (Tensor)
aten::to.other(Tensor self, Tensor other, bool non_blocking=False, bool copy=False, int? memory_format=None) -> (Tensor)
aten::to.dtype_layout(Tensor self, *, int? dtype=None, int? layout=None, Device? device=None, bool? pin_memory=None, bool non_blocking=False, bool copy=False, int? memory_format=None) -> (Tensor)
aten::to.prim_Device(Tensor(a) self, Device? device, int? dtype=None, bool non_blocking=False, bool copy=False) -> (Tensor(a|b))
aten::to.prim_d
| 2 |
0084b5137df5a7e6e6f04e0ca2ae84d6185cadfb
|
Python
|
type(Tensor(a) self, int? dtype=None, bool non_blocking=False, bool copy=False) -> (Tensor(a|b))
aten::to.prim_other(Tensor(a) self, bool non_blocking=False, bool copy=False) -> (Tensor(a|b))'''
pass
class ATenTopkSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::topk.values(Tensor self, int k, int dim=-1, bool largest=True, bool sorted=True, *, Tensor(a!) values, Tensor(b!) indices) -> (Tensor(a!) values, Tensor(b!) indices)
aten::topk(Tensor self, int k, int dim=-1, bool largest=True, bool sorted=True) -> (Tensor values, Tensor indices)'''
pass
class ATenLessSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::less.Scalar(Tensor self, Scalar other) -> (Tensor)
aten::less.Tensor(Tensor self, Tensor other) -> (Tensor)'''
pass
class ATenNuclearNormSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::nuclear_norm(Tensor self, bool keepdim=False) -> (Tensor)
aten::nuclear_norm.dim(Tensor self, int[2] dim, bool keepdim=False) -> (Tensor)'''
pass
class ATenGridSamplerSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::grid_sampler(Tensor input, Tensor grid, int interpolation_mode, int padding_mode, bool align_corners) -> (Tensor)'''
pass
class ATenViewSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::view(Tensor(a) self, int[] size) -> (Tensor(a))
aten::view.dtype(Tensor(a) self, int dtype) -> (Tensor(a))'''
pass
class ATenMvSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::mv(Tensor self, Tensor vec) -> (Tensor)'''
pass
class ATenExpSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::exp(Tensor self) -> (Tensor)'''
pass
class ATenRoundSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::round(Tensor self) -> (Tensor)'''
pass
class ATenClampMaxSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::clamp_max(Tensor self, Scalar max) -> (Tensor)
aten::clamp_max.Tensor(Tensor self, Tensor max) -> (Tensor)'''
pass
class ATenAngleSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::angle(Tensor self) -> (Tensor)'''
pass
class ATenClampMinSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::clamp_min(Tensor self, Scalar min) -> (Tensor)
aten::clamp_min.Tensor(Tensor self, Tensor min) -> (Tensor)'''
pass
class ATenSignSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::sign(Tensor self) -> (Tensor)'''
pass
class ATenFracSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::frac(Tensor self) -> (Tensor)'''
pass
class ATenLogSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::log(Tensor self) -> (Tensor)'''
pass
class ATenSinSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::sin(Tensor self) -> (Tensor)'''
pass
class ATenCloneSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::clone(Tensor self, *, int? memory_format=None) -> (Tensor)'''
pass
class ATenSignbitSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::signbit(Tensor self) -> (Tensor)'''
pass
class ATenChunkSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::chunk(Tensor(a) self, int chunks, int dim=0) -> (Tensor[])'''
pass
class ATenLerpSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::lerp.Scalar(Tensor self, Tensor end, Scalar weight) -> (Tensor)
aten::lerp.Tensor(Tensor self, Tensor end, Tensor weight) -> (Tensor)'''
pass
class ATenAlignTensorsSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::align_tensors(Tensor[] tensors) -> (Tensor[])'''
pass
class ATenOuterSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::outer(Tensor self, Tensor vec2) -> (Tensor)'''
pass
class ATenUnsqueezeSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::unsqueeze(Tensor(a) self, int dim) -> (Tensor(a))'''
pass
class ATenFloorSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::floor(Tensor self) -> (Tensor)'''
pass
class ATenRepeatInterleaveSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::repeat_interleave.Tensor(Tensor repeats) -> (Tensor)
aten::repeat_interleave.self_Tensor(Tensor self, Tensor repeats, int? dim=None) -> (Tensor)
aten::repeat_interleave.self_int(Tensor self, int repeats, int? dim=None) -> (Tensor)'''
pass
class ATenCumsumSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::cumsum(Tensor self, int dim, *, int? dtype=None) -> (Tensor)
aten::cumsum.dimname(Tensor self, str dim, *, int? dtype=None) -> (Tensor)
aten::_cumsum(Tensor self, int dim) -> (Tensor)'''
pass
class ATenBatchNormUpdateStatsSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::batch_norm_update_stats(Tensor input, Tensor? running_mean, Tensor? running_var, float momentum) -> (Tensor, Tensor)'''
pass
class ATenTanSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::tan(Tensor self) -> (Tensor)'''
pass
class ATenAllSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::all(Tensor self) -> (Tensor)
aten::all.dim(Tensor self, int dim, bool keepdim=False) -> (Tensor)
aten::all.dimname(Tensor self, str dim, bool keepdim=False) -> (Tensor)'''
pass
class ATenReciprocalSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::reciprocal(Tensor self) -> (Tensor)'''
pass
class ATenNcfViewSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::_ncf_view(Tensor(a) self, int[] input_shape, int normalized_ndim) -> (Tensor(a))'''
pass
class ATenCosSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::cos(Tensor self) -> (Tensor)'''
pass
class ATenRsqrtSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::rsqrt(Tensor self) -> (Tensor)'''
pass
class ATenCeilSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::ceil(Tensor self) -> (Tensor)'''
pass
class ATenLinalgSolveSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::linalg_solve(Tensor input, Tensor other) -> (Tensor)'''
pass
class ATenSliceSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::slice.Tensor(Tensor(a) self, int dim=0, int? start=None, int? end=None, int step=1) -> (Tensor(a))'''
pass
class ATenAbsoluteSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::absolute(Tensor self) -> (Tensor)'''
pass
class ATenSinhSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::sinh(Tensor self) -> (Tensor)'''
pass
class ATenConjSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::conj(Tensor(a) self) -> (Tensor(a))
aten::_conj(Tensor self) -> (Tensor)'''
pass
class ATenAbsSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::abs(Tensor self) -> (Tensor)'''
pass
class ATenCoshSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::cosh(Tensor self) -> (Tensor)'''
pass
class ATenRealSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::real(Tensor(a) self) -> (Tensor(a))'''
pass
class ATenGruCellSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::gru_cell(Tensor input, Tensor hx, Tensor w_ih, Tensor w_hh, Tensor? b_ih=None, Tensor? b_hh=None) -> (Tensor)'''
pass
class ATenSizeSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::size(Tensor self) -> (int[])'''
pass
class ATenNllLoss2dSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::nll_loss2d(Tensor self, Tensor target, Tensor? weight=None, int reduction=1, int ignore_index=-100) -> (Tensor)'''
pass
class ATenFrobeniusNormSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::frobenius_norm(Tensor self) -> (Tensor)
aten::frobenius_norm.dim(Tensor self, int[1] dim, bool keepdim=False) -> (Tensor)'''
pass
class ATenConvolutionNogroupSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::_convolution_nogroup(Tensor input, Tensor weight, Tensor? bias, int[] stride, int[] padding, int[] dilation, bool transposed, int[] output_padding) -> (Tensor)'''
pass
class ATenArccosSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::arccos(Tensor self) -> (Tensor)'''
pass
class ATenContiguousSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::contiguous(Tensor(a) self, *, int memory_format=0) -> (Tensor(a))'''
pass
class ATenUnbindSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::unbind.int(Tensor(a) self, int dim=0) -> (Tensor[])
aten::unbind.Dimname(Tensor(a) self, str dim) -> (Tensor[])'''
pass
class ATenCummaxSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::cummax(Tensor self, int dim) -> (Tensor values, Tensor indices)
aten::cummax.dimname(Tensor self, str dim) -> (Tensor values, Tensor indices)
aten::cummax.out(Tensor self, int dim, *, Tensor(a!) values, Tensor(b!) indices) -> (Tensor(a!) values, Tensor(b!) indices)'''
pass
class ATenLinalgMatrixNormSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::linalg_matrix_norm(Tensor self, Scalar ord, int[] dim=[-2, -1], bool keepdim=False, *, int? dtype=None) -> (Tensor)
aten::linalg_matrix_norm.str_ord(Tensor self, str ord="fro", int[] dim=[-2, -1], bool keepdim=False, *, int? dtype=None) -> (Tensor)'''
pass
class ATenComputeLinearCombinationSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::_compute_linear_combination(Tensor input, Tensor coefficients) -> (Tensor)'''
pass
class ATenTSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::t(Tensor(a) self) -> (Tensor(a))'''
pass
class ATenClipSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::clip(Tensor self, Scalar? min=None, Scalar? max=None) -> (Tensor)
aten::clip.Tensor(Tensor self, Tensor? min=None, Tensor? max=None) -> (Tensor)'''
pass
class ATenStdSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::std(Tensor self, bool unbiased=True) -> (Tensor)
aten::std.dim(Tensor self, int[1] dim, bool unbiased=True, bool keepdim=False) -> (Tensor)
aten::std.names_dim(Tensor self, str[1] dim, bool unbiased=True, bool keepdim=False) -> (Tensor)
aten::std.correction(Tensor self, int[1]? dim, *, int? correction, bool keepdim=False) -> (Tensor)
aten::std.correction_names(Tensor self, str[1] dim, *, int? correction, bool keepdim=False) -> (Tensor)'''
pass
class ATenSqueezeSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::squeeze(Tensor(a) self) -> (Tensor(a))
aten::squeeze.dim(Tensor(a) self, int dim) -> (Tensor(a))
aten::squeeze.dimname(Tensor(a) self, str dim) -> (Tensor(a))'''
pass
class ATenReshapeSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::reshape(Tensor(a) self, int[] shape) -> (Tensor(a))'''
pass
class ATenNcfUnsqueezeSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::_ncf_unsqueeze(Tensor(a) self, int ndim) -> (Tensor(a))'''
pass
class ATenIndexPutSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::index_put(Tensor self, Tensor?[] indices, Tensor values, bool accumulate=False) -> (Tensor)
aten::index_put.hacked_twin(Tensor self, Tensor[] indices, Tensor values, bool accumulate=False) -> (Tensor)'''
pass
class ATenBernoulliSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::bernoulli(Tensor self, *, Generator? generator=None) -> (Tensor)
aten::bernoulli.p(Tensor self, float p, *, Generator? generator=None) -> (Tensor)'''
pass
class ATenBaddbmmSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::baddbmm(Tensor self, Tensor batch1, Tensor batch2, *, Scalar beta=1, Scalar alpha=1) -> (Tensor)'''
pass
class ATenPermuteSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::permute(Tensor(a) self, int[] dims) -> (Tensor(a))'''
pass
class ATenNumpyTSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::numpy_T(Tensor(a) self) -> (Tensor(a))'''
pass
class ATenRad2degSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::rad2deg(Tensor self) -> (Tensor)'''
pass
class ATenQuantizedMaxPool2dSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::quantized_max_pool2d(Tensor self, int[2] kernel_size, int[2] stride=[], int[2] padding=[0, 0], int[2] dilation=[1, 1], bool ceil_mode=False) -> (Tensor)'''
pass
class ATenAddSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::add.Tensor(Tensor self, Tensor other, *, Scalar alpha=1) -> (Tensor)
aten::add.Scalar(Tensor self, Scalar other, Scalar alpha=1) -> (Tensor)'''
pass
class ATenRandnLikeSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::randn_like(Tensor self, *, int? dtype=None, int? layout=None, Device? device=None, bool? pin_memory=None, int? memory_format=None) -> (Tensor)'''
pass
class ATenIntReprSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::int_repr(Tensor self) -> (Tensor)'''
pass
class ATenAddmvSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::addmv(Tensor self, Tensor mat, Tensor vec, *, Scalar beta=1, Scalar alpha=1) -> (Tensor)'''
pass
class ATenQPerChannelScalesSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::q_per_channel_scales(Tensor self) -> (Tensor)'''
pass
class ATenAddcdivSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::addcdiv(Tensor self, Tensor tensor1, Tensor tensor2, *, Scalar value=1) -> (Tensor)'''
pass
class ATenSplitSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::split.Tensor(Tensor(a) self, int split_size, int dim=0) -> (Tensor[])
aten::split(Tensor self, int[] split_sizes, int dim=0) -> (Tensor[])'''
pass
class ATenNarrowSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::narrow(Tensor(a) self, int dim, int start, int length) -> (Tensor(a))
aten::narrow.Tensor(Tensor(a) self, int dim, Tensor start, int length) -> (Tensor(a))'''
pass
class ATenMovedimSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::movedim.intlist(Tensor(a) self, int[] source, int[] destination) -> (Tensor(a))
aten::movedim.int(Tensor(a) self, int source, int destination) -> (Tensor(a))'''
pass
class ATenAsStridedSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::as_strided(Tensor(a) self, int[] size, int[] stride, int? storage_offset=None) -> (Tensor(a))'''
pass
class ATenReluSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::relu(Tensor self) -> (Tensor)'''
pass
class ATenRemoveBatchDimSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::_remove_batch_dim(Tensor self, int level, int batch_size, int out_dim) -> (Tensor)'''
pass
class ATenSearchsortedSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::searchsorted.Tensor(Tensor sorted_sequence, Tensor self, *, bool out_int32=False, bool right=False) -> (Tensor)
aten::searchsorted.Scalar(Tensor sorted_sequence, Scalar self, *, bool out_int32=False, bool right=False) -> (Tensor)'''
pass
class ATenSigmoidSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::sigmoid(Tensor self) -> (Tensor)'''
pass
class ATenDiagonalSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::diagonal(Tensor(a) self, int offset=0, int dim1=0, int dim2=1) -> (Tensor(a))
aten::diagonal.Dimname(Tensor(a) self, *, str outdim, str dim1, str dim2, int offset=0) -> (Tensor(a))'''
pass
class ATenSplitWithSizesSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::split_with_sizes(Tensor(a) self, int[] split_sizes, int dim=0) -> (Tensor[])'''
pass
class ATenMaximumSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::maximum(Tensor self, Tensor other) -> (Tensor)'''
pass
class ATenUnfoldSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::unfold(Tensor(a) self, int dimension, int size, int step) -> (Tensor(a))'''
pass
class ATenErfcSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::erfc(Tensor self) -> (Tensor)'''
pass
class ATenDigammaSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::digamma(Tensor self) -> (Tensor)'''
pass
class ATenQuantizedGruSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::quantized_gru.input(Tensor input, Tensor hx, __torch__.torch.classes.rnn.CellParamsBase[] params, bool has_biases, int num_layers, float dropout, bool train, bool bidirectional, bool batch_first) -> (Tensor, Tensor)
aten::quantized_gru.data(Tensor data, Tensor batch_sizes, Tensor hx, __torch__.torch.classes.rnn.CellParamsBase[] params, bool has_biases, int num_layers, float dropout, bool train, bool bidirectional) -> (Tensor, Tensor)
aten::quantized_gru.input_legacy(Tensor input, Tensor hx, Tensor[] params, bool has_biases, int num_layers, float dropout, bool train, bool bidirectional, bool batch_first) -> (Tensor, Tensor)
aten::quantized_gru.data_legacy(Tensor data, Tensor batch_sizes, Tensor hx, Tensor[] params, bool has_biases, int num_layers, float dropout, bool train, bool bidirectional) -> (Tensor, Tensor)'''
pass
class ATenLinalgVectorNormSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::linalg_vector_norm(Tensor self, Scalar ord=2, int[1]? dim=None, bool keepdim=False, *, int? dtype=None) -> (Tensor)'''
pass
class ATenAminmaxSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::_aminmax(Tensor self) -> (Tensor, Tensor)
aten::_aminmax.dim(Tensor self, int dim, bool keepdim=False) -> (Tensor, Tensor)'''
pass
class ATenSumSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::sum.dim_IntList(Tensor self, int[1] dim, bool keepdim=False, *, int? dtype=None) -> (Tensor)
aten::sum(Tensor self, *, int? dtype=None) -> (Tensor)
aten::sum.dim_DimnameList(Tensor self, str[1] dim, bool keepdim=False, *, int? dtype=None) -> (Tensor)'''
pass
class ATenAddBatchDimSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::_add_batch_dim(Tensor self, int batch_dim, int level) -> (Tensor)'''
pass
class ATenUpsampleNearestSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::__upsample_nearest(Tensor input, int? size=None, int? scale_factor=None) -> (Tensor)
aten::__upsample_nearest.size_list(Tensor input, int[]? size=None, int? scale_factor=None) -> (Tensor)'''
pass
class ATenExpandAsSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::expand_as(Tensor(a) self, Tensor other) -> (Tensor(a))'''
pass
class ATenModeSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::mode(Tensor self, int dim=-1, bool keepdim=False) -> (Tensor values, Tensor indices)
aten::mode.dimname(Tensor self, str dim, bool keepdim=False) -> (Tensor values, Tensor indices)
aten::mode.values(Tensor self, int dim=-1, bool keepdim=False, *, Tensor(a!) values, Tensor(b!) indices) -> (Tensor(a!) values, Tensor(b!) indices)'''
pass
class ATenUnsafeChunkSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::unsafe_chunk(Tensor self, int chunks, int dim=0) -> (Tensor[])'''
pass
class ATenSelectSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::select.int(Tensor(a) self, int dim, int index) -> (Tensor(a))
aten::select.Dimname(Tensor(a) self, str dim, int index) -> (Tensor(a))'''
pass
class ATenLinalgMatrixPowerSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::linalg_matrix_power(Tensor self, int n) -> (Tensor)'''
pass
class ATenInverseHelperSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::_inverse_helper(Tensor self) -> (Tensor)'''
pass
class ATenRsubSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::rsub.Tensor(Tensor self, Tensor other, *, Scalar alpha=1) -> (Tensor)
aten::rsub.Scalar(Tensor self, Scalar other, Scalar alpha=1) -> (Tensor)'''
pass
class ATenQuantizedLstmSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::quantized_lstm.input(Tensor input, Tensor[] hx, __torch__.torch.classes.rnn.CellParamsBase[] params, bool has_biases, int num_layers, float dropout, bool train, bool bidirectional, bool batch_first, *, int? dtype=None, bool use_dynamic=False) -> (Tensor, Tensor, Tensor)
aten::quantized_lstm.data(Tensor data, Tensor batch_sizes, Tensor[] hx, __torch__.torch.classes.rnn.CellParamsBase[] params, bool has_biases, int num_layers, float dropout, bool train, bool bidirectional, *, int? dtype=None, bool use_dynamic=False) -> (Tensor, Tensor, Tensor)
aten::quantized_lstm.input_legacy(Tensor input, Tensor[] hx, Tensor[] params, bool has_biases, int num_layers, float dropout, bool train, bool bidirectional, bool batch_first, *, int? dtype=None, bool use_dynamic=False) -> (Tensor, Tensor, Tensor)
aten::quantized_lstm.data_legacy(Tensor data, Tensor batch_sizes, Tensor[] hx, Tensor[] params, bool has_biases, int num_layers, float dropout, bool train, bool bidirectional, *, int? dtype=None, bool use_dynamic=False) -> (Tensor, Tensor, Tensor)'''
pass
class ATenFwPrimalSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::_fw_primal(Tensor(a) self, int level) -> (Tensor(a))'''
pass
class ATenMishSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::mish(Tensor self) -> (Tensor)'''
pass
class ATenReshapeAsSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::reshape_as(Tensor(a) self, Tensor other) -> (Tensor(a))'''
pass
class ATenTanhSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::tanh(Tensor self) -> (Tensor)'''
pass
class ATenFakeQuantizePerTensorAffineCachemaskSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::fake_quantize_per_tensor_affine_cachemask(Tensor self, float scale, int zero_point, int quant_min, int quant_max) -> (Tensor output, Tensor mask)'''
pass
class ATenLgammaSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::lgamma(Tensor self) -> (Tensor)'''
pass
class ATenHingeEmbeddingLossSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::hinge_embedding_loss(Tensor self, Tensor target, float margin=1., int reduction=1) -> (Tensor)'''
pass
class ATenMatrixPowerSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::matrix_power(Tensor self, int n) -> (Tensor)'''
pass
class ATenErfSchema(OperatorConverter):
@abstractmethod
def parse(self, node, attrs, args, graph_converter):
'''aten::erf(Tensor self) -> (Tensor)'''
pass
| 3 |
1c9c1a6f0b22d6fef4587a04d7eea1c516dc439b
|
Python
|
from __future__ import division
import sys
import numpy as np
import matplotlib.pyplot as plt
from sklearn import preprocessing
from sklearn import svm
import re
dataVectors = []
#the file train.csv is expected
file = open('train.csv','r')
for line in file:
dataVectors.append(line.strip().split(','))
file.close
#find the attribute names
attributes = dataVectors[0]
dataVectors = dataVectors[1:]
data=np.array(np.genfromtxt('train.csv',dtype=('S32','S32','S32','S32','S32','S32','S32',int,'S32','S32'),delimiter=',',names=True))
data.shape
#lets first convert all ages into days
#this code was meant to convert all data into days, we found out that was not going to work
#dateByDaysVec = []
#for i in range(len(dataVectors)):
# if "year" in dataVectors[i][7]:
# num = [int(s) for s in dataVectors[i][7].split() if s.isdigit()]
# dateByDaysVec.append(365*num[0])
# elif "month" in dataVectors[i][7]:
# num = [int(s) for s in dataVectors[i][7].split() if s.isdigit()]
# dateByDaysVec.append(30*num[0])
# elif "week" in dataVectors[i][7]:
# num = [int(s) for s in dataVectors[i][7].split() if s.isdigit()]
# dateByDaysVec.append(7*num[0])
# elif "day" in dataVectors[i][7]:
# num = [int(s) for s in dataVectors[i][7].split() if s.isdigit()]
# dateByDaysVec.append(num[0])
# else:
# dateByDaysVec.append(0)
yearsAlive = []
#assign number based on year
#less than a year 0
#every year after is another int
#convert all age data into yearly ints
for i in range(len(dataVectors)):
if "year" in dataVectors[i][7]:
num = [int(s) for s in dataVectors[i][7].split() if s.isdigit()]
yearsAlive.append(num[0])
data['AgeuponOutcome'][i] = num[0]
else:
yearsAlive.append(0)
data['AgeuponOutcome'][i] = 0
#used to show the age dataskew uncomment to see
#plt.hist(data['AgeuponOutcome'],4)
#plt.show()
#seperate age data into 3 distinct categories
idx_age_0=data['AgeuponOutcome']<5
idx_age_1=(data['AgeuponOutcome']>=5) & (data['AgeuponOutcome']<10)
idx_age_2=data['AgeuponOutcome']>=10
#save new data and reopen data with years now as strings instead of ints
np.savetxt('filterPass1.txt',data,fmt="%s",delimiter=',')
data=np.array(np.genfromtxt('filterPass1.txt',dtype=('S32','S32','S32','S32','S32','S32','S32','S32','S32','S32'),delimiter=',',names=attributes))
dataLen = len(dataVectors)
dataVectors = []
file = open('filterPass1.txt','r')
for line in file:
dataVectors.append(line.strip().split(','))
file.close()
dataLen2 = len(dataVectors)
#save new year data as easy to read strings
data['AgeuponOutcome'][idx_age_0]='<5years'
data['AgeuponOutcome'][idx_age_1]='>=5and<10years'
data['AgeuponOutcome'][idx_age_2]='>=10years'
#so seperating the animals by pairs of 5 years could work
#now we have defined two different ways to look at the amount of time the pets have been alive
#decide later what is more appropriate
#next step is to take the animals with no names and assign them "NoName"
#I will also keep track of unnamed pets vs named
listOfAnimalNames = []
unnamedVsNamed = []
for i in range(len(dataVectors)):
if dataVectors[i][1] != '':
listOfAnimalNames.append(dataVectors[i][1])
unnamedVsNamed.append('Named')
else:
listOfAnimalNames.append('NoName')
unnamedVsNamed.append('NoName')
idx_name_0 = data['Name'] != ''
idx_name_1 = data['Name'] == ''
data['Name'][idx_name_0] = "Named"
data['Name'][idx_name_1] = "NoName"
#now that names are taken care of we need to handle the DateTime data
listOfSeasons = []
listOfTimeOfDays = []
#use a simple regular expression to grab distinct parts of th date data
for i in range(len(dataVectors)):
getMonthAndTime = re.findall('\d+-(\d+)-\d+ (\d+):\d+:\d+',dataVectors[i][2])
month = int(getMonthAndTime[0][0])
time = int(getMonthAndTime[0][1])
season = ''
timeOfDay = ''
if month >= 3 and month <= 5:
season = 'Spring'
if month >= 6 and month <= 8:
season = 'Summer'
if month >= 9 and month <= 11:
season = 'Fall'
if month == 12:
season = 'Winter'
if month >= 1 and month <= 2:
season = 'Winter'
if time >= 1 and time <= 6:
timeOfDay = 'Morning'
if time >= 7 and time <= 12:
timeOfDay = 'Morning'
if time >= 13 and time <= 18:
timeOfDay = 'Afternoon'
if time >= 19 and time <= 23:
timeOfDay = 'Night'
if time == 0:
timeOfDay = 'Night'
listOfSeasons.append(season)
listOfTimeOfDays.append(timeOfDay)
#save new data with name modified
np.savetxt('filterPass2.txt',data,fmt="%s",delimiter=',')
dataVectors = []
file = open('filterPass2.txt','r')
for line in file:
dataVectors.append(line.strip().split(','))
file.close()
dataLen3 = len(dataVectors)
#get rid of animalID and datetime and add timeOfDay and Seasons
for i in range(dataLen3):
dataVectors[i].pop(2)
dataVectors[i].pop(0)
dataVectors[i].insert(1, listOfSeasons[i])
dataVectors[i].insert(2, listOfTimeOfDays[i])
#save data with new timeOfDay and Seasons attributes
data2 = np.array(dataVectors)
np.savetxt('filterPass3.txt',data2,fmt="%s",delimiter=',')
#generate new data array
data=np.array(np.genfromtxt('filterPass3.txt',dtype=('S32','S32','S32','S32','S32','S32','S32','S32','S32','S32'),delimiter=',',names=attributes))
dataVectors = []
file = open('filterPass3.txt','r')
for line in file:
dataVectors.append(line.strip().split(','))
file.close
isMixOrNot = []
#determine if an animal is a mix or not
for i in range(len(dataVectors)):
if 'Mix' in data[i][8]:
isMixOrNot.append('Mix')
else:
isMixOrNot.append('Purebred')
for i in range(len(dataVectors)):
data[i][8] = isMixOrNot[i]
#np.savetxt('filterPass4.txt',data,fmt="%s",delimiter=',')
#data=np.array(np.genfromtxt('filterPass4.txt',dtype=('S32','S32','S32','S32','S32','S32','S32','S32','S32','S32'),delimiter=',',names=attributes))
#dataVectors = []
#file = open('filterPass4.txt','r')
#for line in file:
# dataVectors.append(line.strip().split(','))
#file.close
mixedColorOrNot = []
for i in range(len(dataVectors)):
if '/' in data[i][9]:
mixedColorOrNot.append('MixedColor')
else:
mixedColorOrNot.append('SolidColor')
for i in range(len(dataVectors)):
data[i][9] = mixedColorOrNot[i]
#get rid of the rest of the whitespace in the data so it can be used with Association Rules
idx_subtype_0 = data['OutcomeSubtype'] == ''
idx_subtype_1 = data['OutcomeSubtype'] == 'At Vet'
idx_subtype_2 = data['OutcomeSubtype'] == 'Foster'
idx_subtype_3 = data['OutcomeSubtype'] == 'In Foster'
idx_subtype_4 = data['OutcomeSubtype'] == 'In Kennel'
idx_subtype_5 = data['OutcomeSubtype'] == 'In Surgery'
idx_subtype_6 = data['OutcomeSubtype'] == 'Rabies Risk'
data['OutcomeSubtype'][idx_subtype_0] = "NoSubtype"
data['OutcomeSubtype'][idx_subtype_1] = "AtVet"
data['OutcomeSubtype'][idx_subtype_2] = "Foster"
data['OutcomeSubtype'][idx_subtype_3] = "Foster"
data['OutcomeSubtype'][idx_subtype_4] = "Kennel"
data['OutcomeSubtype'][idx_subtype_5] = "Surgery"
data['OutcomeSubtype'][idx_subtype_6] = "RabiesRisk"
idx_sex_0 = data['SexuponOutcome'] == ''
idx_sex_1 = data['SexuponOutcome'] == 'Intact Male'
idx_sex_2 = data['SexuponOutcome'] == 'Intact Female'
idx_sex_3 = data['SexuponOutcome'] == 'Spayed Female'
idx_sex_4 = data['SexuponOutcome'] == 'Neutered Male'
data['SexuponOutcome'][idx_sex_1] = "IntactMale"
data['SexuponOutcome'][idx_sex_2] = "IntactFemale"
data['SexuponOutcome'][idx_sex_3] = "SpayedFemale"
data['SexuponOutcome'][idx_sex_4] = "NeuteredMale"
data['SexuponOutcome'][idx_sex_0] = "Unknown"
np.savetxt('filterPass4.txt',data,fmt="%s",delimiter=',')
#dataVectors = []
#file = open('filterPass5.txt','r')
#for line in file:
# dataVectors.append(line.strip().split(','))
#file.close()
#newData = np.array(dataVectors)
#np.savetxt('filterPass6.txt',newData,fmt="%s",delimiter=',')
#listOfUniqueElements = [[] for i in range(10)]
#for i in range(len(dataVectors)):
# for k in range(len(dataVectors[i])):
# if dataVectors[i][k] not in listOfUniqueElements[k]:
# listOfUniqueElements[k].append(dataVectors[i][k])
#listOfNumericalElements = [[] for i in range(10)]
#for i in range(len(dataVectors)):
# for k in range(len(dataVectors[i])):
# listOfNumericalElements[k].append(listOfUniqueElements[k].index(dataVectors[i][k]))
#dataVectorsTest = []
#file = open('filterPass6.txt','r')
#for line in file:
# dataVectorsTest.append(line.strip().split(','))
#file.close()
#listOfNumericalElementsTest = [[] for i in range(10)]
#for i in range(len(dataVectorsTest)):
# for k in range(len(dataVectorsTest[i])):
# listOfNumericalElementsTest[k].append(listOfUniqueElements[k].index(dataVectorsTest[i][k]))
#f = open('numericalDataTrain.txt', 'w')
#for i in range(len(listOfNumericalElements[0])):
# for k in range(len(listOfNumericalElements)):
# f.write(str(listOfNumericalElements[k][i]))
# if k != len(listOfNumericalElements) - 1:
# f.write(',')
# f.write('\n')
#f.close()
#f = open('numericalDataTest.txt', 'w')
#for i in range(len(listOfNumericalElementsTest[0])):
# for k in range(len(listOfNumericalElementsTest)):
# f.write(str(listOfNumericalElementsTest[k][i]))
# if k != len(listOfNumericalElementsTest) - 1:
# f.write(',')
# f.write('\n')
#f.close()
#everything below this point was the code to produce those bar graphs that were in the presentation
#there was a lot of tedious and copy pasted probability calculation in it
#however all the code is down there so you can see, just uncomment if you wish to run yourself
#mixDogsAdopted = 0
#mixDogsDied = 0
#mixDogsTransfered = 0
#mixDogsReturnedToOwners = 0
#mixDogsEuthanized = 0
#purebredDogsAdopted = 0
#purebredDogsDied = 0
#purebredDogsTransfered = 0
#purebredDogsReturnedToOwners = 0
#purebredDogsEuthanized = 0
#mixCatsAdopted = 0
#mixCatsDied = 0
#mixCatsTransfered = 0
#mixCatsReturnedToOwners = 0
#mixCatsEuthanized = 0
#purebredCatsAdopted = 0
#purebredCatsDied = 0
#purebredCatsTransfered = 0
#purebredCatsReturnedToOwners = 0
#purebredCatsEuthanized = 0
#for i in range(len(dataVectors)):
# if data[i][5] == 'Dog' and data[i][3] == 'Adoption' and data[i][8] == 'Mix':
# mixDogsAdopted += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Died' and data[i][8] == 'Mix':
# mixDogsDied += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Transfer' and data[i][8] == 'Mix':
# mixDogsTransfered += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Return_to_owner' and data[i][8] == 'Mix':
# mixDogsReturnedToOwners += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Euthanasia' and data[i][8] == 'Mix':
# mixDogsEuthanized += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Adoption' and data[i][8] == 'Purebred':
# purebredDogsAdopted += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Died' and data[i][8] == 'Purebred':
# purebredDogsDied += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Transfer' and data[i][8] == 'Purebred':
# purebredDogsTransfered += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Return_to_owner' and data[i][8] == 'Purebred':
# purebredDogsReturnedToOwners += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Euthanasia' and data[i][8] == 'Purebred':
# purebredDogsEuthanized += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Adoption' and data[i][8] == 'Mix':
# mixCatsAdopted += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Died' and data[i][8] == 'Mix':
# mixCatsDied += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Transfer' and data[i][8] == 'Mix':
# mixCatsTransfered += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Return_to_owner' and data[i][8] == 'Mix':
# mixCatsReturnedToOwners += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Euthanasia' and data[i][8] == 'Mix':
# mixCatsEuthanized += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Adoption' and data[i][8] == 'Purebred':
# purebredCatsAdopted += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Died' and data[i][8] == 'Purebred':
# purebredCatsDied += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Transfer' and data[i][8] == 'Purebred':
# purebredCatsTransfered += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Return_to_owner' and data[i][8] == 'Purebred':
# purebredCatsReturnedToOwners += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Euthanasia' and data[i][8] == 'Purebred':
# purebredCatsEuthanized += 1
#nummixDogs = 0
#numpurebredDogs = 0
#nummixCats = 0
#numpurebredCats = 0
#for i in range(len(dataVectors)):
# if data[i][5] == 'Dog' and data[i][8] == 'Mix':
# nummixDogs += 1
# if data[i][5] == 'Dog' and data[i][8] == 'Purebred':
# numpurebredDogs += 1
# if data[i][5] == 'Cat' and data[i][8] == 'Mix':
# nummixCats += 1
# if data[i][5] == 'Cat' and data[i][8] == 'Purebred':
# numpurebredCats += 1
#percentagemixDogsAdopted = mixDogsAdopted/nummixDogs*100
#percentagemixDogsDied = mixDogsDied/nummixDogs*100
#percentagemixDogsTransfered = mixDogsTransfered/nummixDogs*100
#percentagemixDogsReturnToOwners = mixDogsReturnedToOwners/nummixDogs*100
#percentagemixDogsEuthanized = mixDogsEuthanized/nummixDogs*100
#percentagemixDogsOutcomes = [percentagemixDogsAdopted, percentagemixDogsDied, percentagemixDogsTransfered, percentagemixDogsReturnToOwners, percentagemixDogsEuthanized]
#percentagepurebredDogsAdopted = purebredDogsAdopted/numpurebredDogs*100
#percentagepurebredDogsDied = purebredDogsDied/numpurebredDogs*100
#percentagepurebredDogsTransfered = purebredDogsTransfered/numpurebredDogs*100
#percentagepurebredDogsReturnToOwners = purebredDogsReturnedToOwners/numpurebredDogs*100
#percentagepurebredDogsEuthanized = purebredDogsEuthanized/numpurebredDogs*100
#percentagepurebredDogsOutcomes = [percentagepurebredDogsAdopted, percentagepurebredDogsDied, percentagepurebredDogsTransfered, percentagepurebredDogsReturnToOwners, percentagepurebredDogsEuthanized]
#percentagemixCatsAdopted = mixCatsAdopted/nummixCats*100
#percentagemixCatsDied = mixCatsDied/nummixCats*100
#percentagemixCatsTransfered = mixCatsTransfered/nummixCats*100
#percentagemixCatsReturnToOwners = mixCatsReturnedToOwners/nummixCats*100
#percentagemixCatsEuthanized = mixCatsEuthanized/nummixCats*100
#percentagemixCatsOutcomes = [percentagemixCatsAdopted, percentagemixCatsDied, percentagemixCatsTransfered, percentagemixCatsReturnToOwners, percentagemixCatsEuthanized]
#percentagepurebredCatsAdopted = purebredCatsAdopted/numpurebredCats*100
#percentagepurebredCatsDied = purebredCatsDied/numpurebredCats*100
#percentagepurebredCatsTransfered = purebredCatsTransfered/numpurebredCats*100
#percentagepurebredCatsReturnToOwners = purebredCatsReturnedToOwners/numpurebredCats*100
#percentagepurebredCatsEuthanized = purebredCatsEuthanized/numpurebredCats*100
#percentagepurebredCatsOutcomes = [percentagepurebredCatsAdopted, percentagepurebredCatsDied, percentagepurebredCatsTransfered, percentagepurebredCatsReturnToOwners, percentagepurebredCatsEuthanized]
#fig, ax = plt.subplots()
#bars = ax.bar(np.arange(5), percentagemixDogsOutcomes)
#ax.set_ylabel('Percentage')
#ax.set_title('Percentage Mixed Dog Outcomes')
#ax.set_ylim([0,100])
#ax.set_xticks(np.arange(5) + 0.42)
#ax.set_xticklabels(('Adopted','Died','Transfered','Returned', 'Euthanized'))
#plt.show()
#fig, ax = plt.subplots()
#bars = ax.bar(np.arange(5), percentagepurebredDogsOutcomes)
#ax.set_ylabel('Percentage')
#ax.set_title('Percentage Purebred Dog Outcomes')
#ax.set_ylim([0,100])
#ax.set_xticks(np.arange(5) + 0.42)
#ax.set_xticklabels(('Adopted','Died','Transfered','Returned', 'Euthanized'))
#plt.show()
#fig, ax = plt.subplots()
#bars = ax.bar(np.arange(5), percentagemixCatsOutcomes)
#ax.set_ylabel('Percentage')
#ax.set_title('Percentage Mixed Cat Outcomes')
#ax.set_ylim([0,100])
#ax.set_xticks(np.arange(5) + 0.42)
#ax.set_xticklabels(('Adopted','Died','Transfered','Returned', 'Euthanized'))
#plt.show()
#fig, ax = plt.subplots()
#bars = ax.bar(np.arange(5), percentagepurebredCatsOutcomes)
#ax.set_ylabel('Percentage')
#ax.set_title('Percentage Purebred Cat Outcomes')
#ax.set_ylim([0,100])
#ax.set_xticks(np.arange(5) + 0.42)
#ax.set_xticklabels(('Adopted','Died','Transfered','Returned', 'Euthanized'))
#plt.show()
#youngDogsAdopted = 0
#youngDogsDied = 0
#youngDogsTransfered = 0
#youngDogsReturnedToOwners = 0
#youngDogsEuthanized = 0
#middleAgedDogsAdopted = 0
#middleAgedDogsDied = 0
#middleAgedDogsTransfered = 0
#middleAgedDogsReturnedToOwners = 0
#middleAgedDogsEuthanized = 0
#oldDogsAdopted = 0
#oldDogsDied = 0
#oldDogsTransfered = 0
#oldDogsReturnedToOwners = 0
#oldDogsEuthanized = 0
#######################################
#youngCatsAdopted = 0
#youngCatsDied = 0
#youngCatsTransfered = 0
#youngCatsReturnedToOwners = 0
#youngCatsEuthanized = 0
#middleAgedCatsAdopted = 0
#middleAgedCatsDied = 0
#middleAgedCatsTransfered = 0
#middleAgedCatsReturnedToOwners = 0
#middleAgedCatsEuthanized = 0
#oldCatsAdopted = 0
#oldCatsDied = 0
#oldCatsTransfered = 0
#oldCatsReturnedToOwners = 0
#oldCatsEuthanized = 0
#for i in range(len(dataVectors)):
# if data[i][5] == 'Dog' and data[i][3] == 'Adoption' and data[i][7] == '<5years':
# youngDogsAdopted += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Died' and data[i][7] == '<5years':
# youngDogsDied += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Transfer' and data[i][7] == '<5years':
# youngDogsTransfered += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Return_to_owner' and data[i][7] == '<5years':
# youngDogsReturnedToOwners += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Euthanasia' and data[i][7] == '<5years':
# youngDogsEuthanized += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Adoption' and data[i][7] == '>=5and<10years':
# middleAgedDogsAdopted += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Died' and data[i][7] == '>=5and<10years':
# middleAgedDogsDied += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Transfer' and data[i][7] == '>=5and<10years':
# middleAgedDogsTransfered += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Return_to_owner' and data[i][7] == '>=5and<10years':
# middleAgedDogsReturnedToOwners += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Euthanasia' and data[i][7] == '>=5and<10years':
# middleAgedDogsEuthanized += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Adoption' and data[i][7] == '>=10years':
# oldDogsAdopted += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Died' and data[i][7] == '>=10years':
# oldDogsDied += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Transfer' and data[i][7] == '>=10years':
# oldDogsTransfered += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Return_to_owner' and data[i][7] == '>=10years':
# oldDogsReturnedToOwners += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Euthanasia' and data[i][7] == '>=10years':
# oldDogsEuthanized += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Adoption' and data[i][7] == '<5years':
# youngCatsAdopted += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Died' and data[i][7] == '<5years':
# youngCatsDied += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Transfer' and data[i][7] == '<5years':
# youngCatsTransfered += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Return_to_owner' and data[i][7] == '<5years':
# youngCatsReturnedToOwners += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Euthanasia' and data[i][7] == '<5years':
# youngCatsEuthanized += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Adoption' and data[i][7] == '>=5and<10years':
# middleAgedCatsAdopted += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Died' and data[i][7] == '>=5and<10years':
# middleAgedCatsDied += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Transfer' and data[i][7] == '>=5and<10years':
# middleAgedCatsTransfered += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Return_to_owner' and data[i][7] == '>=5and<10years':
# middleAgedCatsReturnedToOwners += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Euthanasia' and data[i][7] == '>=5and<10years':
# middleAgedCatsEuthanized += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Adoption' and data[i][7] == '>=10years':
# oldCatsAdopted += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Died' and data[i][7] == '>=10years':
# oldCatsDied += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Transfer' and data[i][7] == '>=10years':
# oldCatsTransfered += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Return_to_owner' and data[i][7] == '>=10years':
# oldCatsReturnedToOwners += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Euthanasia' and data[i][7] == '>=10years':
# oldCatsEuthanized += 1
#numOfDogs = np.sum(data['AnimalType'] == 'Dog')
#numOfCats = np.sum(data['AnimalType'] == 'Cat')
#numAdopted = np.sum(data['OutcomeType'] == 'Adoption')
#numDied = np.sum(data['OutcomeType'] == 'Died')
#numEuthanized = np.sum(data['OutcomeType'] == 'Euthanasia')
#numTransfered = np.sum(data['OutcomeType'] == 'Transfer')
#numReturned = np.sum(data['OutcomeType'] == 'Return_to_owner')
#numYoungDogs = 0
#numMiddleDogs = 0
#numOldDogs = 0
#numYoungCats = 0
#numMiddleCats = 0
#numOldCats = 0
#for i in range(len(dataVectors)):
# if data[i][5] == 'Dog' and data[i][7] == '<5years':
# numYoungDogs += 1
# if data[i][5] == 'Dog' and data[i][7] == '>=5and<10years':
# numMiddleDogs += 1
# if data[i][5] == 'Dog' and data[i][7] == '>=10years':
# numOldDogs += 1
# if data[i][5] == 'Cat' and data[i][7] == '<5years':
# numYoungCats += 1
# if data[i][5] == 'Cat' and data[i][7] == '>=5and<10years':
# numMiddleCats += 1
# if data[i][5] == 'Cat' and data[i][7] == '>=10years':
# numOldCats += 1
#percentageYoungDogsAdopted = youngDogsAdopted/numYoungDogs*100
#percentageYoungDogsDied = youngDogsDied/numYoungDogs*100
#percentageYoungDogsTransfered = youngDogsTransfered/numYoungDogs*100
#percentageYoungDogsReturnToOwners = youngDogsReturnedToOwners/numYoungDogs*100
#percentageYoungDogsEuthanized = youngDogsEuthanized/numYoungDogs*100
#percentageYoungDogsOutcomes = [percentageYoungDogsAdopted, percentageYoungDogsDied, percentageYoungDogsTransfered, percentageYoungDogsReturnToOwners, percentageYoungDogsEuthanized]
#percentageMiddleDogsAdopted = middleAgedDogsAdopted/numMiddleDogs*100
#percentageMiddleDogsDied = middleAgedDogsDied/numMiddleDogs*100
#percentageMiddleDogsTransfered = middleAgedDogsTransfered/numMiddleDogs*100
#percentageMiddleDogsReturnToOwners = middleAgedDogsReturnedToOwners/numMiddleDogs*100
#percentageMiddleDogsEuthanized = middleAgedDogsEuthanized/numMiddleDogs*100
#percentageMiddleDogsOutcomes = [percentageMiddleDogsAdopted, percentageMiddleDogsDied, percentageMiddleDogsTransfered, percentageMiddleDogsReturnToOwners, percentageMiddleDogsEuthanized]
#percentageOldDogsAdopted = oldDogsAdopted/numOldDogs*100
#percentageOldDogsDied = oldDogsDied/numOldDogs*100
#percentageOldDogsTransfered = oldDogsTransfered/numOldDogs*100
#percentageOldDogsReturnToOwners = oldDogsReturnedToOwners/numOldDogs*100
#percentageOldDogsEuthanized = oldDogsEuthanized/numOldDogs*100
#percentageOldDogsOutcomes = [percentageOldDogsAdopted, percentageOldDogsDied, percentageOldDogsTransfered, percentageOldDogsReturnToOwners, percentageOldDogsEuthanized]
#percentageYoungCatsAdopted = youngCatsAdopted/numYoungCats*100
#percentageYoungCatsDied = youngCatsDied/numYoungCats*100
#percentageYoungCatsTransfered = youngCatsTransfered/numYoungCats*100
#percentageYoungCatsReturnToOwners = youngCatsReturnedToOwners/numYoungCats*100
#percentageYoungCatsEuthanized = youngCatsEuthanized/numYoungCats*100
#percentageYoungCatsOutcomes = [percentageYoungCatsAdopted, percentageYoungCatsDied, percentageYoungCatsTransfered, percentageYoungCatsReturnToOwners, percentageYoungCatsEuthanized]
#percentageMiddleCatsAdopted = middleAgedCatsAdopted/numMiddleCats*100
#percentageMiddleCatsDied = middleAgedCatsDied/numMiddleCats*100
#percentageMiddleCatsTransfered = middleAgedCatsTransfered/numMiddleCats*100
#percentageMiddleCatsReturnToOwners = middleAgedCatsReturnedToOwners/numMiddleCats*100
#percentageMiddleCatsEuthanized = middleAgedCatsEuthanized/numMiddleCats*100
#percentageMiddleCatsOutcomes = [percentageMiddleCatsAdopted, percentageMiddleCatsDied, percentageMiddleCatsTransfered, percentageMiddleCatsReturnToOwners, percentageMiddleCatsEuthanized]
#percentageOldCatsAdopted = oldCatsAdopted/numOldCats*100
#percentageOldCatsDied = oldCatsDied/numOldCats*100
#percentageOldCatsTransfered = oldCatsTransfered/numOldCats*100
#percentageOldCatsReturnToOwners = oldCatsReturnedToOwners/numOldCats*100
#percentageOldCatsEuthanized = oldCatsEuthanized/numOldCats*100
#percentageOldCatsOutcomes = [percentageOldCatsAdopted, percentageOldCatsDied, percentageOldCatsTransfered, percentageOldCatsReturnToOwners, percentageOldCatsEuthanized]
#fig, ax = plt.subplots()
#bars = ax.bar(np.arange(5), percentageYoungDogsOutcomes)
#ax.set_ylabel('Percentage')
#ax.set_title('Percentage Young Dog Outcomes')
#ax.set_ylim([0,100])
#ax.set_xticks(np.arange(5) + 0.42)
#ax.set_xticklabels(('Adopted','Died','Transfered','Returned', 'Euthanized'))
#plt.show()
#fig, ax = plt.subplots()
#bars = ax.bar(np.arange(5), percentageMiddleDogsOutcomes)
#ax.set_ylabel('Percentage')
#ax.set_title('Percentage Middle Aged Dog Outcomes')
#ax.set_ylim([0,100])
#ax.set_xticks(np.arange(5) + 0.42)
#ax.set_xticklabels(('Adopted','Died','Transfered','Returned', 'Euthanized'))
#plt.show()
#fig, ax = plt.subplots()
#bars = ax.bar(np.arange(5), percentageOldDogsOutcomes)
#ax.set_ylabel('Percentage')
#ax.set_title('Percentage Old Dog Outcomes')
#ax.set_ylim([0,100])
#ax.set_xticks(np.arange(5) + 0.42)
#ax.set_xticklabels(('Adopted','Died','Transfered','Returned', 'Euthanized'))
#plt.show()
#fig, ax = plt.subplots()
#bars = ax.bar(np.arange(5), percentageYoungCatsOutcomes)
#ax.set_ylabel('Percentage')
#ax.set_title('Percentage Young Cat Outcomes')
#ax.set_ylim([0,100])
#ax.set_xticks(np.arange(5) + 0.42)
#ax.set_xticklabels(('Adopted','Died','Transfered','Returned', 'Euthanized'))
#plt.show()
#fig, ax = plt.subplots()
#bars = ax.bar(np.arange(5), percentageMiddleCatsOutcomes)
#ax.set_ylabel('Percentage')
#ax.set_title('Percentage Middle Aged Cats Outcomes')
#ax.set_ylim([0,100])
#ax.set_xticks(np.arange(5) + 0.42)
#ax.set_xticklabels(('Adopted','Died','Transfered','Returned', 'Euthanized'))
#plt.show()
#fig, ax = plt.subplots()
#bars = ax.bar(np.arange(5), percentageOldCatsOutcomes)
#ax.set_ylabel('Percentage')
#ax.set_title('Percentage Old Cats Outcomes')
#ax.set_ylim([0,100])
#ax.set_xticks(np.arange(5) + 0.42)
#ax.set_xticklabels(('Adopted','Died','Transfered','Returned', 'Euthanized'))
#plt.show()
#namedDogsAdopted = 0
#namedDogsDied = 0
#namedDogsTransfered = 0
#namedDogsReturnedToOwners = 0
#namedDogsEuthanized = 0
#unNamedDogsAdopted = 0
#unNamedDogsDied = 0
#unNamedDogsTransfered = 0
#unNamedDogsReturnedToOwners = 0
#unNamedDogsEuthanized = 0
#namedCatsAdopted = 0
#namedCatsDied = 0
#namedCatsTransfered = 0
#namedCatsReturnedToOwners = 0
#namedCatsEuthanized = 0
#unNamedCatsAdopted = 0
#unNamedCatsDied = 0
#unNamedCatsTransfered = 0
#unNamedCatsReturnedToOwners = 0
#unNamedCatsEuthanized = 0
#for i in range(len(dataVectors)):
# if data[i][5] == 'Dog' and data[i][3] == 'Adoption' and data[i][0] == 'Named':
# namedDogsAdopted += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Died' and data[i][0] == 'Named':
# namedDogsDied += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Transfer' and data[i][0] == 'Named':
# namedDogsTransfered += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Return_to_owner' and data[i][0] == 'Named':
# namedDogsReturnedToOwners += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Euthanasia' and data[i][0] == 'Named':
# namedDogsEuthanized += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Adoption' and data[i][0] == 'NoName':
# unNamedDogsAdopted += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Died' and data[i][0] == 'NoName':
# unNamedDogsDied += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Transfer' and data[i][0] == 'NoName':
# unNamedDogsTransfered += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Return_to_owner' and data[i][0] == 'NoName':
# unNamedDogsReturnedToOwners += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Euthanasia' and data[i][0] == 'NoName':
# unNamedDogsEuthanized += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Adoption' and data[i][0] == 'Named':
# namedCatsAdopted += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Died' and data[i][0] == 'Named':
# namedCatsDied += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Transfer' and data[i][0] == 'Named':
# namedCatsTransfered += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Return_to_owner' and data[i][0] == 'Named':
# namedCatsReturnedToOwners += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Euthanasia' and data[i][0] == 'Named':
# namedCatsEuthanized += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Adoption' and data[i][0] == 'NoName':
# unNamedCatsAdopted += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Died' and data[i][0] == 'NoName':
# unNamedCatsDied += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Transfer' and data[i][0] == 'NoName':
# unNamedCatsTransfered += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Return_to_owner' and data[i][0] == 'NoName':
# unNamedCatsReturnedToOwners += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Euthanasia' and data[i][0] == 'NoName':
# unNamedCatsEuthanized += 1
#numNamedDogs = 0
#numUnNamedDogs = 0
#numNamedCats = 0
#numUnNamedCats = 0
#for i in range(len(dataVectors)):
# if data[i][5] == 'Dog' and data[i][0] == 'Named':
# numNamedDogs += 1
# if data[i][5] == 'Dog' and data[i][0] == 'NoName':
# numUnNamedDogs += 1
# if data[i][5] == 'Cat' and data[i][0] == 'Named':
# numNamedCats += 1
# if data[i][5] == 'Cat' and data[i][0] == 'NoName':
# numUnNamedCats += 1
#percentageNamedDogsAdopted = namedDogsAdopted/numNamedDogs*100
#percentageNamedDogsDied = namedDogsDied/numNamedDogs*100
#percentageNamedDogsTransfered = namedDogsTransfered/numNamedDogs*100
#percentageNamedDogsReturnToOwners = namedDogsReturnedToOwners/numNamedDogs*100
#percentageNamedDogsEuthanized = namedDogsEuthanized/numNamedDogs*100
#percentageNamedDogsOutcomes = [percentageNamedDogsAdopted, percentageNamedDogsDied, percentageNamedDogsTransfered, percentageNamedDogsReturnToOwners, percentageNamedDogsEuthanized]
#percentageUnNamedDogsAdopted = unNamedDogsAdopted/numUnNamedDogs*100
#percentageUnNamedDogsDied = unNamedDogsDied/numUnNamedDogs*100
#percentageUnNamedDogsTransfered = unNamedDogsTransfered/numUnNamedDogs*100
#percentageUnNamedDogsReturnToOwners = unNamedDogsReturnedToOwners/numUnNamedDogs*100
#percentageUnNamedDogsEuthanized = unNamedDogsEuthanized/numUnNamedDogs*100
#percentageUnNamedDogsOutcomes = [percentageUnNamedDogsAdopted, percentageUnNamedDogsDied, percentageUnNamedDogsTransfered, percentageUnNamedDogsReturnToOwners, percentageUnNamedDogsEuthanized]
#percentageNamedCatsAdopted = namedCatsAdopted/numNamedCats*100
#percentageNamedCatsDied = namedCatsDied/numNamedCats*100
#percentageNamedCatsTransfered = namedCatsTransfered/numNamedCats*100
#percentageNamedCatsReturnToOwners = namedCatsReturnedToOwners/numNamedCats*100
#percentageNamedCatsEuthanized = namedCatsEuthanized/numNamedCats*100
#percentageNamedCatsOutcomes = [percentageNamedCatsAdopted, percentageNamedCatsDied, percentageNamedCatsTransfered, percentageNamedCatsReturnToOwners, percentageNamedCatsEuthanized]
#percentageUnNamedCatsAdopted = unNamedCatsAdopted/numUnNamedCats*100
#percentageUnNamedCatsDied = unNamedCatsDied/numUnNamedCats*100
#percentageUnNamedCatsTransfered = unNamedCatsTransfered/numUnNamedCats*100
#percentageUnNamedCatsReturnToOwners = unNamedCatsReturnedToOwners/numUnNamedCats*100
#percentageUnNamedCatsEuthanized = unNamedCatsEuthanized/numUnNamedCats*100
#percentageUnNamedCatsOutcomes = [percentageUnNamedCatsAdopted, percentageUnNamedCatsDied, percentageUnNamedCatsTransfered, percentageUnNamedCatsReturnToOwners, percentageUnNamedCatsEuthanized]
#fig, ax = plt.subplots()
#bars = ax.bar(np.arange(5), percentageNamedDogsOutcomes)
#ax.set_ylabel('Percentage')
#ax.set_title('Percentage Named Dog Outcomes')
#ax.set_ylim([0,100])
#ax.set_xticks(np.arange(5) + 0.42)
#ax.set_xticklabels(('Adopted','Died','Transfered','Returned', 'Euthanized'))
#plt.show()
#fig, ax = plt.subplots()
#bars = ax.bar(np.arange(5), percentageUnNamedDogsOutcomes)
#ax.set_ylabel('Percentage')
#ax.set_title('Percentage Un-Named Dog Outcomes')
#ax.set_ylim([0,100])
#ax.set_xticks(np.arange(5) + 0.42)
#ax.set_xticklabels(('Adopted','Died','Transfered','Returned', 'Euthanized'))
#plt.show()
#fig, ax = plt.subplots()
#bars = ax.bar(np.arange(5), percentageNamedCatsOutcomes)
#ax.set_ylabel('Percentage')
#ax.set_title('Percentage Named Cat Outcomes')
#ax.set_ylim([0,100])
#ax.set_xticks(np.arange(5) + 0.42)
#ax.set_xticklabels(('Adopted','Died','Transfered','Returned', 'Euthanized'))
#plt.show()
#fig, ax = plt.subplots()
#bars = ax.bar(np.arange(5), percentageUnNamedCatsOutcomes)
#ax.set_ylabel('Percentage')
#ax.set_title('Percentage Un-Named Cat Outcomes')
#ax.set_ylim([0,100])
#ax.set_xticks(np.arange(5) + 0.42)
#ax.set_xticklabels(('Adopted','Died','Transfered','Returned', 'Euthanized'))
#plt.show()
#SolidColorDogsAdopted = 0
#SolidColorDogsDied = 0
#SolidColorDogsTransfered = 0
#SolidColorDogsReturnedToOwners = 0
#SolidColorDogsEuthanized = 0
#MixedColorDogsAdopted = 0
#MixedColorDogsDied = 0
#MixedColorDogsTransfered = 0
#MixedColorDogsReturnedToOwners = 0
#MixedColorDogsEuthanized = 0
#SolidColorCatsAdopted = 0
#SolidColorCatsDied = 0
#SolidColorCatsTransfered = 0
#SolidColorCatsReturnedToOwners = 0
#SolidColorCatsEuthanized = 0
#MixedColorCatsAdopted = 0
#MixedColorCatsDied = 0
#MixedColorCatsTransfered = 0
#MixedColorCatsReturnedToOwners = 0
#MixedColorCatsEuthanized = 0
#for i in range(len(dataVectors)):
# if data[i][5] == 'Dog' and data[i][3] == 'Adoption' and data[i][9] == 'SolidColor':
# SolidColorDogsAdopted += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Died' and data[i][9] == 'SolidColor':
# SolidColorDogsDied += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Transfer' and data[i][9] == 'SolidColor':
# SolidColorDogsTransfered += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Return_to_owner' and data[i][9] == 'SolidColor':
# SolidColorDogsReturnedToOwners += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Euthanasia' and data[i][9] == 'SolidColor':
# SolidColorDogsEuthanized += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Adoption' and data[i][9] == 'MixedColor':
# MixedColorDogsAdopted += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Died' and data[i][9] == 'MixedColor':
# MixedColorDogsDied += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Transfer' and data[i][9] == 'MixedColor':
# MixedColorDogsTransfered += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Return_to_owner' and data[i][9] == 'MixedColor':
# MixedColorDogsReturnedToOwners += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Euthanasia' and data[i][9] == 'MixedColor':
# MixedColorDogsEuthanized += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Adoption' and data[i][9] == 'SolidColor':
# SolidColorCatsAdopted += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Died' and data[i][9] == 'SolidColor':
# SolidColorCatsDied += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Transfer' and data[i][9] == 'SolidColor':
# SolidColorCatsTransfered += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Return_to_owner' and data[i][9] == 'SolidColor':
# SolidColorCatsReturnedToOwners += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Euthanasia' and data[i][9] == 'SolidColor':
# SolidColorCatsEuthanized += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Adoption' and data[i][9] == 'MixedColor':
# MixedColorCatsAdopted += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Died' and data[i][9] == 'MixedColor':
# MixedColorCatsDied += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Transfer' and data[i][9] == 'MixedColor':
# MixedColorCatsTransfered += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Return_to_owner' and data[i][9] == 'MixedColor':
# MixedColorCatsReturnedToOwners += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Euthanasia' and data[i][9] == 'MixedColor':
# MixedColorCatsEuthanized += 1
#numSolidColorDogs = 0
#numMixedColorDogs = 0
#numSolidColorCats = 0
#numMixedColorCats = 0
#for i in range(len(dataVectors)):
# if data[i][5] == 'Dog' and data[i][9] == 'SolidColor':
# numSolidColorDogs += 1
# if data[i][5] == 'Dog' and data[i][9] == 'MixedColor':
# numMixedColorDogs += 1
# if data[i][5] == 'Cat' and data[i][9] == 'SolidColor':
# numSolidColorCats += 1
# if data[i][5] == 'Cat' and data[i][9] == 'MixedColor':
# numMixedColorCats += 1
#percentageSolidColorDogsAdopted = SolidColorDogsAdopted/numSolidColorDogs*100
#percentageSolidColorDogsDied = SolidColorDogsDied/numSolidColorDogs*100
#percentageSolidColorDogsTransfered = SolidColorDogsTransfered/numSolidColorDogs*100
#percentageSolidColorDogsReturnToOwners = SolidColorDogsReturnedToOwners/numSolidColorDogs*100
#percentageSolidColorDogsEuthanized = SolidColorDogsEuthanized/numSolidColorDogs*100
#percentageSolidColorDogsOutcomes = [percentageSolidColorDogsAdopted, percentageSolidColorDogsDied, percentageSolidColorDogsTransfered, percentageSolidColorDogsReturnToOwners, percentageSolidColorDogsEuthanized]
#percentageMixedColorDogsAdopted = MixedColorDogsAdopted/numMixedColorDogs*100
#percentageMixedColorDogsDied =
| 0 |
1c9c1a6f0b22d6fef4587a04d7eea1c516dc439b
|
Python
|
MixedColorDogsDied/numMixedColorDogs*100
#percentageMixedColorDogsTransfered = MixedColorDogsTransfered/numMixedColorDogs*100
#percentageMixedColorDogsReturnToOwners = MixedColorDogsReturnedToOwners/numMixedColorDogs*100
#percentageMixedColorDogsEuthanized = MixedColorDogsEuthanized/numMixedColorDogs*100
#percentageMixedColorDogsOutcomes = [percentageMixedColorDogsAdopted, percentageMixedColorDogsDied, percentageMixedColorDogsTransfered, percentageMixedColorDogsReturnToOwners, percentageMixedColorDogsEuthanized]
#percentageSolidColorCatsAdopted = SolidColorCatsAdopted/numSolidColorCats*100
#percentageSolidColorCatsDied = SolidColorCatsDied/numSolidColorCats*100
#percentageSolidColorCatsTransfered = SolidColorCatsTransfered/numSolidColorCats*100
#percentageSolidColorCatsReturnToOwners = SolidColorCatsReturnedToOwners/numSolidColorCats*100
#percentageSolidColorCatsEuthanized = SolidColorCatsEuthanized/numSolidColorCats*100
#percentageSolidColorCatsOutcomes = [percentageSolidColorCatsAdopted, percentageSolidColorCatsDied, percentageSolidColorCatsTransfered, percentageSolidColorCatsReturnToOwners, percentageSolidColorCatsEuthanized]
#percentageMixedColorCatsAdopted = MixedColorCatsAdopted/numMixedColorCats*100
#percentageMixedColorCatsDied = MixedColorCatsDied/numMixedColorCats*100
#percentageMixedColorCatsTransfered = MixedColorCatsTransfered/numMixedColorCats*100
#percentageMixedColorCatsReturnToOwners = MixedColorCatsReturnedToOwners/numMixedColorCats*100
#percentageMixedColorCatsEuthanized = MixedColorCatsEuthanized/numMixedColorCats*100
#percentageMixedColorCatsOutcomes = [percentageMixedColorCatsAdopted, percentageMixedColorCatsDied, percentageMixedColorCatsTransfered, percentageMixedColorCatsReturnToOwners, percentageMixedColorCatsEuthanized]
#fig, ax = plt.subplots()
#bars = ax.bar(np.arange(5), percentageSolidColorDogsOutcomes)
#ax.set_ylabel('Percentage')
#ax.set_title('Percentage Solid Color Dog Outcomes')
#ax.set_ylim([0,100])
#ax.set_xticks(np.arange(5) + 0.42)
#ax.set_xticklabels(('Adopted','Died','Transfered','Returned', 'Euthanized'))
#plt.show()
#fig, ax = plt.subplots()
#bars = ax.bar(np.arange(5), percentageMixedColorDogsOutcomes)
#ax.set_ylabel('Percentage')
#ax.set_title('Percentage Mixed Color Dog Outcomes')
#ax.set_ylim([0,100])
#ax.set_xticks(np.arange(5) + 0.42)
#ax.set_xticklabels(('Adopted','Died','Transfered','Returned', 'Euthanized'))
#plt.show()
#fig, ax = plt.subplots()
#bars = ax.bar(np.arange(5), percentageSolidColorCatsOutcomes)
#ax.set_ylabel('Percentage')
#ax.set_title('Percentage Solid Color Cat Outcomes')
#ax.set_ylim([0,100])
#ax.set_xticks(np.arange(5) + 0.42)
#ax.set_xticklabels(('Adopted','Died','Transfered','Returned', 'Euthanized'))
#plt.show()
#fig, ax = plt.subplots()
#bars = ax.bar(np.arange(5), percentageMixedColorCatsOutcomes)
#ax.set_ylabel('Percentage')
#ax.set_title('Percentage Mixed Color Cat Outcomes')
#ax.set_ylim([0,100])
#ax.set_xticks(np.arange(5) + 0.42)
#ax.set_xticklabels(('Adopted','Died','Transfered','Returned', 'Euthanized'))
#plt.show()
#SpringDogsAdopted = 0
#SpringDogsDied = 0
#SpringDogsTransfered = 0
#SpringDogsReturnedToOwners = 0
#SpringDogsEuthanized = 0
#SummerDogsAdopted = 0
#SummerDogsDied = 0
#SummerDogsTransfered = 0
#SummerDogsReturnedToOwners = 0
#SummerDogsEuthanized = 0
#FallDogsAdopted = 0
#FallDogsDied = 0
#FallDogsTransfered = 0
#FallDogsReturnedToOwners = 0
#FallDogsEuthanized = 0
#WinterDogsAdopted = 0
#WinterDogsDied = 0
#WinterDogsTransfered = 0
#WinterDogsReturnedToOwners = 0
#WinterDogsEuthanized = 0
#SpringCatsAdopted = 0
#SpringCatsDied = 0
#SpringCatsTransfered = 0
#SpringCatsReturnedToOwners = 0
#SpringCatsEuthanized = 0
#SummerCatsAdopted = 0
#SummerCatsDied = 0
#SummerCatsTransfered = 0
#SummerCatsReturnedToOwners = 0
#SummerCatsEuthanized = 0
#FallCatsAdopted = 0
#FallCatsDied = 0
#FallCatsTransfered = 0
#FallCatsReturnedToOwners = 0
#FallCatsEuthanized = 0
#WinterCatsAdopted = 0
#WinterCatsDied = 0
#WinterCatsTransfered = 0
#WinterCatsReturnedToOwners = 0
#WinterCatsEuthanized = 0
#for i in range(len(dataVectors)):
# if data[i][5] == 'Dog' and data[i][3] == 'Adoption' and data[i][1] == 'Spring':
# SpringDogsAdopted += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Died' and data[i][1] == 'Spring':
# SpringDogsDied += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Transfer' and data[i][1] == 'Spring':
# SpringDogsTransfered += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Return_to_owner' and data[i][1] == 'Spring':
# SpringDogsReturnedToOwners += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Euthanasia' and data[i][1] == 'Spring':
# SpringDogsEuthanized += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Adoption' and data[i][1] == 'Summer':
# SummerDogsAdopted += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Died' and data[i][1] == 'Summer':
# SummerDogsDied += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Transfer' and data[i][1] == 'Summer':
# SummerDogsTransfered += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Return_to_owner' and data[i][1] == 'Summer':
# SummerDogsReturnedToOwners += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Euthanasia' and data[i][1] == 'Summer':
# SummerDogsEuthanized += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Adoption' and data[i][1] == 'Fall':
# FallDogsAdopted += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Died' and data[i][1] == 'Fall':
# FallDogsDied += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Transfer' and data[i][1] == 'Fall':
# FallDogsTransfered += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Return_to_owner' and data[i][1] == 'Fall':
# FallDogsReturnedToOwners += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Euthanasia' and data[i][1] == 'Fall':
# FallDogsEuthanized += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Adoption' and data[i][1] == 'Winter':
# WinterDogsAdopted += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Died' and data[i][1] == 'Winter':
# WinterDogsDied += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Transfer' and data[i][1] == 'Winter':
# WinterDogsTransfered += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Return_to_owner' and data[i][1] == 'Winter':
# WinterDogsReturnedToOwners += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Euthanasia' and data[i][1] == 'Winter':
# WinterDogsEuthanized += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Adoption' and data[i][1] == 'Spring':
# SpringCatsAdopted += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Died' and data[i][1] == 'Spring':
# SpringCatsDied += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Transfer' and data[i][1] == 'Spring':
# SpringCatsTransfered += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Return_to_owner' and data[i][1] == 'Spring':
# SpringCatsReturnedToOwners += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Euthanasia' and data[i][1] == 'Spring':
# SpringCatsEuthanized += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Adoption' and data[i][1] == 'Summer':
# SummerCatsAdopted += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Died' and data[i][1] == 'Summer':
# SummerCatsDied += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Transfer' and data[i][1] == 'Summer':
# SummerCatsTransfered += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Return_to_owner' and data[i][1] == 'Summer':
# SummerCatsReturnedToOwners += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Euthanasia' and data[i][1] == 'Summer':
# SummerCatsEuthanized += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Adoption' and data[i][1] == 'Fall':
# FallCatsAdopted += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Died' and data[i][1] == 'Fall':
# FallCatsDied += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Transfer' and data[i][1] == 'Fall':
# FallCatsTransfered += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Return_to_owner' and data[i][1] == 'Fall':
# FallCatsReturnedToOwners += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Euthanasia' and data[i][1] == 'Fall':
# FallCatsEuthanized += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Adoption' and data[i][1] == 'Winter':
# WinterCatsAdopted += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Died' and data[i][1] == 'Winter':
# WinterCatsDied += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Transfer' and data[i][1] == 'Winter':
# WinterCatsTransfered += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Return_to_owner' and data[i][1] == 'Winter':
# WinterCatsReturnedToOwners += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Euthanasia' and data[i][1] == 'Winter':
# WinterCatsEuthanized += 1
#numSpringDogs = 0
#numSummerDogs = 0
#numFallDogs = 0
#numWinterDogs = 0
#numSpringCats = 0
#numSummerCats = 0
#numFallCats = 0
#numWinterCats = 0
#for i in range(len(dataVectors)):
# if data[i][5] == 'Dog' and data[i][1] == 'Spring':
# numSpringDogs += 1
# if data[i][5] == 'Dog' and data[i][1] == 'Summer':
# numSummerDogs += 1
# if data[i][5] == 'Dog' and data[i][1] == 'Fall':
# numFallDogs += 1
# if data[i][5] == 'Dog' and data[i][1] == 'Winter':
# numWinterDogs += 1
# if data[i][5] == 'Cat' and data[i][1] == 'Spring':
# numSpringCats += 1
# if data[i][5] == 'Cat' and data[i][1] == 'Summer':
# numSummerCats += 1
# if data[i][5] == 'Cat' and data[i][1] == 'Fall':
# numFallCats += 1
# if data[i][5] == 'Cat' and data[i][1] == 'Winter':
# numWinterCats += 1
#percentageSpringDogsAdopted = SpringDogsAdopted/numSpringDogs*100
#percentageSpringDogsDied = SpringDogsDied/numSpringDogs*100
#percentageSpringDogsTransfered = SpringDogsTransfered/numSpringDogs*100
#percentageSpringDogsReturnToOwners = SpringDogsReturnedToOwners/numSpringDogs*100
#percentageSpringDogsEuthanized = SpringDogsEuthanized/numSpringDogs*100
#percentageSpringDogsOutcomes = [percentageSpringDogsAdopted, percentageSpringDogsDied, percentageSpringDogsTransfered, percentageSpringDogsReturnToOwners, percentageSpringDogsEuthanized]
#percentageSummerDogsAdopted = SummerDogsAdopted/numSummerDogs*100
#percentageSummerDogsDied = SummerDogsDied/numSummerDogs*100
#percentageSummerDogsTransfered = SummerDogsTransfered/numSummerDogs*100
#percentageSummerDogsReturnToOwners = SummerDogsReturnedToOwners/numSummerDogs*100
#percentageSummerDogsEuthanized = SummerDogsEuthanized/numSummerDogs*100
#percentageSummerDogsOutcomes = [percentageSummerDogsAdopted, percentageSummerDogsDied, percentageSummerDogsTransfered, percentageSummerDogsReturnToOwners, percentageSummerDogsEuthanized]
#percentageFallDogsAdopted = FallDogsAdopted/numFallDogs*100
#percentageFallDogsDied = FallDogsDied/numFallDogs*100
#percentageFallDogsTransfered = FallDogsTransfered/numFallDogs*100
#percentageFallDogsReturnToOwners = FallDogsReturnedToOwners/numFallDogs*100
#percentageFallDogsEuthanized = FallDogsEuthanized/numFallDogs*100
#percentageFallDogsOutcomes = [percentageFallDogsAdopted, percentageFallDogsDied, percentageFallDogsTransfered, percentageFallDogsReturnToOwners, percentageFallDogsEuthanized]
#percentageWinterDogsAdopted = WinterDogsAdopted/numWinterDogs*100
#percentageWinterDogsDied = WinterDogsDied/numWinterDogs*100
#percentageWinterDogsTransfered = WinterDogsTransfered/numWinterDogs*100
#percentageWinterDogsReturnToOwners = WinterDogsReturnedToOwners/numWinterDogs*100
#percentageWinterDogsEuthanized = WinterDogsEuthanized/numWinterDogs*100
#percentageWinterDogsOutcomes = [percentageWinterDogsAdopted, percentageWinterDogsDied, percentageWinterDogsTransfered, percentageWinterDogsReturnToOwners, percentageWinterDogsEuthanized]
#percentageSpringCatsAdopted = SpringCatsAdopted/numSpringCats*100
#percentageSpringCatsDied = SpringCatsDied/numSpringCats*100
#percentageSpringCatsTransfered = SpringCatsTransfered/numSpringCats*100
#percentageSpringCatsReturnToOwners = SpringCatsReturnedToOwners/numSpringCats*100
#percentageSpringCatsEuthanized = SpringCatsEuthanized/numSpringCats*100
#percentageSpringCatsOutcomes = [percentageSpringCatsAdopted, percentageSpringCatsDied, percentageSpringCatsTransfered, percentageSpringCatsReturnToOwners, percentageSpringCatsEuthanized]
#percentageSummerCatsAdopted = SummerCatsAdopted/numSummerCats*100
#percentageSummerCatsDied = SummerCatsDied/numSummerCats*100
#percentageSummerCatsTransfered = SummerCatsTransfered/numSummerCats*100
#percentageSummerCatsReturnToOwners = SummerCatsReturnedToOwners/numSummerCats*100
#percentageSummerCatsEuthanized = SummerCatsEuthanized/numSummerCats*100
#percentageSummerCatsOutcomes = [percentageSummerCatsAdopted, percentageSummerCatsDied, percentageSummerCatsTransfered, percentageSummerCatsReturnToOwners, percentageSummerCatsEuthanized]
#percentageFallCatsAdopted = FallCatsAdopted/numFallCats*100
#percentageFallCatsDied = FallCatsDied/numFallCats*100
#percentageFallCatsTransfered = FallCatsTransfered/numFallCats*100
#percentageFallCatsReturnToOwners = FallCatsReturnedToOwners/numFallCats*100
#percentageFallCatsEuthanized = FallCatsEuthanized/numFallCats*100
#percentageFallCatsOutcomes = [percentageFallCatsAdopted, percentageFallCatsDied, percentageFallCatsTransfered, percentageFallCatsReturnToOwners, percentageFallCatsEuthanized]
#percentageWinterCatsAdopted = WinterCatsAdopted/numWinterCats*100
#percentageWinterCatsDied = WinterCatsDied/numWinterCats*100
#percentageWinterCatsTransfered = WinterCatsTransfered/numWinterCats*100
#percentageWinterCatsReturnToOwners = WinterCatsReturnedToOwners/numWinterCats*100
#percentageWinterCatsEuthanized = WinterCatsEuthanized/numWinterCats*100
#percentageWinterCatsOutcomes = [percentageWinterCatsAdopted, percentageWinterCatsDied, percentageWinterCatsTransfered, percentageWinterCatsReturnToOwners, percentageWinterCatsEuthanized]
#fig, ax = plt.subplots()
#bars = ax.bar(np.arange(5), percentageSpringDogsOutcomes)
#ax.set_ylabel('Percentage')
#ax.set_title('Percentage Spring Dog Outcomes')
#ax.set_ylim([0,100])
#ax.set_xticks(np.arange(5) + 0.42)
#ax.set_xticklabels(('Adopted','Died','Transfered','Returned', 'Euthanized'))
#plt.show()
#fig, ax = plt.subplots()
#bars = ax.bar(np.arange(5), percentageSummerDogsOutcomes)
#ax.set_ylabel('Percentage')
#ax.set_title('Percentage Summer Dog Outcomes')
#ax.set_ylim([0,100])
#ax.set_xticks(np.arange(5) + 0.42)
#ax.set_xticklabels(('Adopted','Died','Transfered','Returned', 'Euthanized'))
#plt.show()
#fig, ax = plt.subplots()
#bars = ax.bar(np.arange(5), percentageFallDogsOutcomes)
#ax.set_ylabel('Percentage')
#ax.set_title('Percentage Fall Dog Outcomes')
#ax.set_ylim([0,100])
#ax.set_xticks(np.arange(5) + 0.42)
#ax.set_xticklabels(('Adopted','Died','Transfered','Returned', 'Euthanized'))
#plt.show()
#fig, ax = plt.subplots()
#bars = ax.bar(np.arange(5), percentageWinterDogsOutcomes)
#ax.set_ylabel('Percentage')
#ax.set_title('Percentage Winter Dog Outcomes')
#ax.set_ylim([0,100])
#ax.set_xticks(np.arange(5) + 0.42)
#ax.set_xticklabels(('Adopted','Died','Transfered','Returned', 'Euthanized'))
#plt.show()
#fig, ax = plt.subplots()
#bars = ax.bar(np.arange(5), percentageSpringCatsOutcomes)
#ax.set_ylabel('Percentage')
#ax.set_title('Percentage Spring Cat Outcomes')
#ax.set_ylim([0,100])
#ax.set_xticks(np.arange(5) + 0.42)
#ax.set_xticklabels(('Adopted','Died','Transfered','Returned', 'Euthanized'))
#plt.show()
#fig, ax = plt.subplots()
#bars = ax.bar(np.arange(5), percentageSummerCatsOutcomes)
#ax.set_ylabel('Percentage')
#ax.set_title('Percentage Summer Cat Outcomes')
#ax.set_ylim([0,100])
#ax.set_xticks(np.arange(5) + 0.42)
#ax.set_xticklabels(('Adopted','Died','Transfered','Returned', 'Euthanized'))
#plt.show()
#fig, ax = plt.subplots()
#bars = ax.bar(np.arange(5), percentageFallCatsOutcomes)
#ax.set_ylabel('Percentage')
#ax.set_title('Percentage Fall Cat Outcomes')
#ax.set_ylim([0,100])
#ax.set_xticks(np.arange(5) + 0.42)
#ax.set_xticklabels(('Adopted','Died','Transfered','Returned', 'Euthanized'))
#plt.show()
#fig, ax = plt.subplots()
#bars = ax.bar(np.arange(5), percentageWinterCatsOutcomes)
#ax.set_ylabel('Percentage')
#ax.set_title('Percentage Winter Cat Outcomes')
#ax.set_ylim([0,100])
#ax.set_xticks(np.arange(5) + 0.42)
#ax.set_xticklabels(('Adopted','Died','Transfered','Returned', 'Euthanized'))
#plt.show()
#MorningDogsAdopted = 0
#MorningDogsDied = 0
#MorningDogsTransfered = 0
#MorningDogsReturnedToOwners = 0
#MorningDogsEuthanized = 0
#AfternoonDogsAdopted = 0
#AfternoonDogsDied = 0
#AfternoonDogsTransfered = 0
#AfternoonDogsReturnedToOwners = 0
#AfternoonDogsEuthanized = 0
#NightDogsAdopted = 0
#NightDogsDied = 0
#NightDogsTransfered = 0
#NightDogsReturnedToOwners = 0
#NightDogsEuthanized = 0
#MorningCatsAdopted = 0
#MorningCatsDied = 0
#MorningCatsTransfered = 0
#MorningCatsReturnedToOwners = 0
#MorningCatsEuthanized = 0
#AfternoonCatsAdopted = 0
#AfternoonCatsDied = 0
#AfternoonCatsTransfered = 0
#AfternoonCatsReturnedToOwners = 0
#AfternoonCatsEuthanized = 0
#NightCatsAdopted = 0
#NightCatsDied = 0
#NightCatsTransfered = 0
#NightCatsReturnedToOwners = 0
#NightCatsEuthanized = 0
#for i in range(len(dataVectors)):
# if data[i][5] == 'Dog' and data[i][3] == 'Adoption' and data[i][2] == 'Morning':
# MorningDogsAdopted += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Died' and data[i][2] == 'Morning':
# MorningDogsDied += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Transfer' and data[i][2] == 'Morning':
# MorningDogsTransfered += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Return_to_owner' and data[i][2] == 'Morning':
# MorningDogsReturnedToOwners += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Euthanasia' and data[i][2] == 'Morning':
# MorningDogsEuthanized += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Adoption' and data[i][2] == 'Afternoon':
# AfternoonDogsAdopted += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Died' and data[i][2] == 'Afternoon':
# AfternoonDogsDied += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Transfer' and data[i][2] == 'Afternoon':
# AfternoonDogsTransfered += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Return_to_owner' and data[i][2] == 'Afternoon':
# AfternoonDogsReturnedToOwners += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Euthanasia' and data[i][2] == 'Afternoon':
# AfternoonDogsEuthanized += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Adoption' and data[i][2] == 'Night':
# NightDogsAdopted += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Died' and data[i][2] == 'Night':
# NightDogsDied += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Transfer' and data[i][2] == 'Night':
# NightDogsTransfered += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Return_to_owner' and data[i][2] == 'Night':
# NightDogsReturnedToOwners += 1
# if data[i][5] == 'Dog' and data[i][3] == 'Euthanasia' and data[i][2] == 'Night':
# NightDogsEuthanized += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Adoption' and data[i][2] == 'Morning':
# MorningCatsAdopted += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Died' and data[i][2] == 'Morning':
# MorningCatsDied += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Transfer' and data[i][2] == 'Morning':
# MorningCatsTransfered += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Return_to_owner' and data[i][2] == 'Morning':
# MorningCatsReturnedToOwners += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Euthanasia' and data[i][2] == 'Morning':
# MorningCatsEuthanized += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Adoption' and data[i][2] == 'Afternoon':
# AfternoonCatsAdopted += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Died' and data[i][2] == 'Afternoon':
# AfternoonCatsDied += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Transfer' and data[i][2] == 'Afternoon':
# AfternoonCatsTransfered += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Return_to_owner' and data[i][2] == 'Afternoon':
# AfternoonCatsReturnedToOwners += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Euthanasia' and data[i][2] == 'Afternoon':
# AfternoonCatsEuthanized += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Adoption' and data[i][2] == 'Night':
# NightCatsAdopted += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Died' and data[i][2] == 'Night':
# NightCatsDied += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Transfer' and data[i][2] == 'Night':
# NightCatsTransfered += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Return_to_owner' and data[i][2] == 'Night':
# NightCatsReturnedToOwners += 1
# if data[i][5] == 'Cat' and data[i][3] == 'Euthanasia' and data[i][2] == 'Night':
# NightCatsEuthanized += 1
#numMorningDogs = 0
#numAfternoonDogs = 0
#numNightDogs = 0
#numMorningCats = 0
#numAfternoonCats = 0
#numNightCats = 0
#for i in range(len(dataVectors)):
# if data[i][5] == 'Dog' and data[i][2] == 'Morning':
# numMorningDogs += 1
# if data[i][5] == 'Dog' and data[i][2] == 'Afternoon':
# numAfternoonDogs += 1
# if data[i][5] == 'Dog' and data[i][2] == 'Night':
# numNightDogs += 1
# if data[i][5] == 'Cat' and data[i][2] == 'Morning':
# numMorningCats += 1
# if data[i][5] == 'Cat' and data[i][2] == 'Afternoon':
# numAfternoonCats += 1
# if data[i][5] == 'Cat' and data[i][2] == 'Night':
# numNightCats += 1
#percentageMorningDogsAdopted = MorningDogsAdopted/numMorningDogs*100
#percentageMorningDogsDied = MorningDogsDied/numMorningDogs*100
#percentageMorningDogsTransfered = MorningDogsTransfered/numMorningDogs*100
#percentageMorningDogsReturnToOwners = MorningDogsReturnedToOwners/numMorningDogs*100
#percentageMorningDogsEuthanized = MorningDogsEuthanized/numMorningDogs*100
#percentageMorningDogsOutcomes = [percentageMorningDogsAdopted, percentageMorningDogsDied, percentageMorningDogsTransfered, percentageMorningDogsReturnToOwners, percentageMorningDogsEuthanized]
#percentageAfternoonDogsAdopted = AfternoonDogsAdopted/numAfternoonDogs*100
#percentageAfternoonDogsDied = AfternoonDogsDied/numAfternoonDogs*100
#percentageAfternoonDogsTransfered = AfternoonDogsTransfered/numAfternoonDogs*100
#percentageAfternoonDogsReturnToOwners = AfternoonDogsReturnedToOwners/numAfternoonDogs*100
#percentageAfternoonDogsEuthanized = AfternoonDogsEuthanized/numAfternoonDogs*100
#percentageAfternoonDogsOutcomes = [percentageAfternoonDogsAdopted, percentageAfternoonDogsDied, percentageAfternoonDogsTransfered, percentageAfternoonDogsReturnToOwners, percentageAfternoonDogsEuthanized]
#percentageNightDogsAdopted = NightDogsAdopted/numNightDogs*100
#percentageNightDogsDied = NightDogsDied/numNightDogs*100
#percentageNightDogsTransfered = NightDogsTransfered/numNightDogs*100
#percentageNightDogsReturnToOwners = NightDogsReturnedToOwners/numNightDogs*100
#percentageNightDogsEuthanized = NightDogsEuthanized/numNightDogs*100
#percentageNightDogsOutcomes = [percentageNightDogsAdopted, percentageNightDogsDied, percentageNightDogsTransfered, percentageNightDogsReturnToOwners, percentageNightDogsEuthanized]
#percentageMorningCatsAdopted = MorningCatsAdopted/numMorningCats*100
#percentageMorningCatsDied = MorningCatsDied/numMorningCats*100
#percentageMorningCatsTransfered = MorningCatsTransfered/numMorningCats*100
#percentageMorningCatsReturnToOwners = MorningCatsReturnedToOwners/numMorningCats*100
#percentageMorningCatsEuthanized = MorningCatsEuthanized/numMorningCats*100
#percentageMorningCatsOutcomes = [percentageMorningCatsAdopted, percentageMorningCatsDied, percentageMorningCatsTransfered, percentageMorningCatsReturnToOwners, percentageMorningCatsEuthanized]
#percentageAfternoonCatsAdopted = AfternoonCatsAdopted/numAfternoonCats*100
#percentageAfternoonCatsDied = AfternoonCatsDied/numAfternoonCats*100
#percentageAfternoonCatsTransfered = AfternoonCatsTransfered/numAfternoonCats*100
#percentageAfternoonCatsReturnToOwners = AfternoonCatsReturnedToOwners/numAfternoonCats*100
#percentageAfternoonCatsEuthanized = AfternoonCatsEuthanized/numAfternoonCats*100
#percentageAfternoonCatsOutcomes = [percentageAfternoonCatsAdopted, percentageAfternoonCatsDied, percentageAfternoonCatsTransfered, percentageAfternoonCatsReturnToOwners, percentageAfternoonCatsEuthanized]
#percentageNightCatsAdopted = NightCatsAdopted/numNightCats*100
#percentageNightCatsDied = NightCatsDied/numNightCats*100
#percentageNightCatsTransfered = NightCatsTransfered/numNightCats*100
#percentageNightCatsReturnToOwners = NightCatsReturnedToOwners/numNightCats*100
#percentageNightCatsEuthanized = NightCatsEuthanized/numNightCats*100
#percentageNightCatsOutcomes = [percentageNightCatsAdopted, percentageNightCatsDied, percentageNightCatsTransfered, percentageNightCatsReturnToOwners, percentageNightCatsEuthanized]
#fig, ax = plt.subplots()
#bars = ax.bar(np.arange(5), percentageMorningDogsOutcomes)
#ax.set_ylabel('Percentage')
#ax.set_title('Percentage Morning Dog Outcomes')
#ax.set_ylim([0,100])
#ax.set_xticks(np.arange(5) + 0.42)
#ax.set_xticklabels(('Adopted','Died','Transfered','Returned', 'Euthanized'))
#plt.show()
#fig, ax = plt.subplots()
#bars = ax.bar(np.arange(5), percentageAfternoonDogsOutcomes)
#ax.set_ylabel('Percentage')
#ax.set_title('Percentage Afternoon Dog Outcomes')
#ax.set_ylim([0,100])
#ax.set_xticks(np.arange(5) + 0.42)
#ax.set_xticklabels(('Adopted','Died','Transfered','Returned', 'Euthanized'))
#plt.show()
#fig, ax = plt.subplots()
#bars = ax.bar(np.arange(5), percentageNightDogsOutcomes)
#ax.set_ylabel('Percentage')
#ax.set_title('Percentage Night Dog Outcomes')
#ax.set_ylim([0,100])
#ax.set_xticks(np.arange(5) + 0.42)
#ax.set_xticklabels(('Adopted','Died','Transfered','Returned', 'Euthanized'))
#plt.show()
#fig, ax = plt.subplots()
#bars = ax.bar(np.arange(5), percentageMorningCatsOutcomes)
#ax.set_ylabel('Percentage')
#ax.set_title('Percentage Morning Cat Outcomes')
#ax.set_ylim([0,100])
#ax.set_xticks(np.arange(5) + 0.42)
#ax.set_xticklabels(('Adopted','Died','Transfered','Returned', 'Euthanized'))
#plt.show()
#fig, ax = plt.subplots()
#bars = ax.bar(np.arange(5), percentageAfternoonCatsOutcomes)
#ax.set_ylabel('Percentage')
#ax.set_title('Percentage Afternoon Cat Outcomes')
#ax.set_ylim([0,100])
#ax.set_xticks(np.arange(5) + 0.42)
#ax.set_xticklabels(('Adopted','Died','Transfered','Returned', 'Euthanized'))
#plt.show()
#fig, ax = plt.subplots()
#bars = ax.bar(np.arange(5), percentageNightCatsOutcomes)
#ax.set_ylabel('Percentage')
#ax.set_title('Percentage Night Cat Outcomes')
#ax.set_ylim([0,100])
#ax.set_xticks(np.arange(5) + 0.42)
#ax.set_xticklabels(('Adopted','Died','Transfered','Returned', 'Euthanized'))
#plt.show()
| 1 |
fdd6a9fb7e1f297f3560ccd4ca2e29eec3e4956d
|
Python
|
from cep_price_console.utils.utils import is_path_exists_or_creatable, creation_date
from cep_price_console.db_management.server_utils import mysql_login_required
from cep_price_console.utils.log_utils import debug, CustomAdapter
from cep_price_console.utils.excel_utils import Workbook
import cep_price_console.db_management.server_utils as server_utils
from cep_price_console.utils import config
from sqlalchemy.schema import CreateSchema
from sqlalchemy.sql import text
# from sqlalchemy.ext.declarative import DeferredReflection
# noinspection PyUnresolvedReferences
from sqlalchemy import exc, and_, select, or_, func
import importlib
import logging
import datetime
import os
import csv
import textwrap
reflected = False
creation_module = None
@debug(lvl=logging.DEBUG, prefix='')
def get_creation_module():
global creation_module
if creation_module is None:
for table in list(server_utils.mysql_base.metadata.tables.keys()):
server_utils.mysql_base.metadata.remove(server_utils.mysql_base.metadata.tables[table])
creation_module = importlib.import_module("cep_price_console.db_management.ARW_PRF_Creation")
return creation_module
else:
return creation_module
class ArwPrfImporter(object):
logger = CustomAdapter(logging.getLogger(str(__name__)), None)
@debug(lvl=logging.DEBUG, prefix='')
@mysql_login_required
def __init__(self, relative_filename):
self.relative_filename = relative_filename
self.wb_cls = Workbook(relative_filename)
self.session = server_utils.mysql_session_maker()
@debug(lvl=logging.DEBUG)
def investigate_arw_prf_xl(self):
for sheet_name in self.wb_cls.ws_lst:
prf_obj = self.ws_format_check(sheet_name)
if prf_obj is not None:
self.field_instantiation(prf_obj)
self.wb_cls.wb.unload_sheet(sheet_name)
@debug(lvl=logging.DEBUG)
def ws_format_check(self, sheet_name):
# PrimaryReportFile.clear_dict()
formatting_error = False
tbl_init_dict = {}
self.wb_cls.ws_sel = sheet_name
for col in range(1, self.wb_cls.col_count + 1):
col_dict = dict(
arw_or_static=None,
table_name=None,
filepath_or_master_table_name=None,
)
# Table-Level loop
# Row 1 in every spreadsheet should have Y/N values signifying that the column
# be considered for table import. Import only the columns w/ Y values.
for row in range(1, 4):
cell_val = self.wb_cls.fetch_value(row, col).formatted_value
try:
cell_val = str(cell_val).strip()
except ValueError:
ArwPrfImporter.logger.error("Sheet Name: {0}, Column: {1}, Row: {2}, Value not a string: {3}"
.format(sheet_name, col, str(row), cell_val))
else:
if row == 1:
if cell_val in ('Y', 'S', 'N', 'MySQL File?'):
col_dict['arw_or_static'] = cell_val
else:
formatting_error = True
ArwPrfImporter.logger.error("Sheet Name: {0}, Column: {1}, Row: {2}, First row value not "
"'Y', 'S', 'N' or 'MySQL File?': {3}".format(sheet_name, col,
row, cell_val))
break
elif row == 2:
if self.wb_cls.fetch_value(1, col).formatted_value != 'S':
if cell_val.strip() != "N/A":
if cell_val[-4:].upper() == ".CSV":
fileroot = config.config["directory"]["arw_export_dir"]
filepath = os.path.join(fileroot, cell_val)
ArwPrfImporter.logger.log(logging.DEBUG, "filepath: {0}".format(filepath))
if not is_path_exists_or_creatable(filepath):
formatting_error = True
ArwPrfImporter.logger.error("Sheet Name: {0}, Column: {1}, Row: {2}, Invalid "
"filepath: {3}".format(sheet_name, col, row,
cell_val))
break
else:
col_dict['filepath_or_master_table_name'] = filepath
else:
formatting_error = True
ArwPrfImporter.logger.error("Sheet Name: {0}, Column: {1}, Row: {2}, "
"Second row value must be a filepath or "
"'N/A': {3}".format(sheet_name, col, row, cell_val))
break
elif cell_val.strip() == "N/A":
col_dict['filepath_or_master_table_name'] = cell_val
elif self.wb_cls.fetch_value(1, col).formatted_value == 'S':
col_dict['filepath_or_master_table_name'] = cell_val
elif row == 3:
# table_name = None
ArwPrfImporter.logger.log(logging.NOTSET,
"Sheet Name: {0}, Column: {1}, Row: {2}, "
"ARW Column List: {3}, Cell Value: {4}"
.format(sheet_name, col, row, arw_col_list.get(str(col)), cell_val))
if col <= 22:
if arw_col_list.get(str(col)) != cell_val:
formatting_error = True
ArwPrfImporter.logger.error("Sheet Name: {0}, Column: {1}, Row: {2}, Column Ordering "
"Error: {3}".format(sheet_name, col, row, cell_val))
break
elif arw_col_list.get(str(col)) == cell_val:
col_dict['table_name'] = cell_val
else:
col_dict['table_name'] = cell_val
if formatting_error:
break
# ArwPrfImporter.logger.log(logging.NOTSET, "Sheet Name: {0}, Column: {1}".format(sheet_name, col))
# for str_key in col_dict.keys():
# str_value = col_dict.get(str_key)
# ArwPrfImporter.logger.log(logging.DEBUG, "Key: {0}, Value: {1}".format(str_key, str_value))
if col > 22:
tbl_init_dict[str(col)] = col_dict
if not formatting_error:
prf_obj = PrimaryReportFile(self.session, sheet_name)
for col_key in sorted(tbl_init_dict.keys(), key=lambda x: int(x)):
col_value = tbl_init_dict.get(col_key)
ArwPrfImporter.logger.log(logging.NOTSET, "Key: {0}, Value: {1}".format(col_key, col_value.values()))
prf_obj.tbl_init_dict = tbl_init_dict
self.table_instantiation(prf_obj)
return prf_obj
else:
return None
# self.wb_cls.wb.unload_sheet(sheet_name)
@debug(lvl=logging.DEBUG)
def table_instantiation(self, prf_obj):
for col in sorted(prf_obj.tbl_init_dict.keys(), key=lambda x: int(x)):
col_dict = prf_obj.tbl_init_dict.get(col)
if col_dict.get('arw_or_static') == 'Y':
current_table = CurrentTable(
session=self.session,
prf_name=prf_obj.filename,
prf_col=int(col),
base_table_name=col_dict.get('table_name'),
table_name=col_dict.get('table_name') + "_01_current",
filepath=col_dict.get('filepath_or_master_table_name'))
prf_obj.current_tbl_dict[col] = current_table
archive_table = ArchiveTable(
session=self.session,
prf_name=prf_obj.filename,
prf_col=int(col),
base_table_name=col_dict.get('table_name'),
table_name=col_dict.get('table_name') + "_02_archive",
filepath=col_dict.get('filepath_or_master_table_name'))
prf_obj.archive_tbl_dict[col] = archive_table
elif col_dict.get('arw_or_static') == 'S':
static_table = StaticTable(
session=self.session,
prf_name=prf_obj.filename,
prf_col=int(col),
base_table_name=col_dict.get('table_name'),
table_name=col_dict.get('table_name') + "_01_static",
master_table_name=col_dict.get('filepath_or_master_table_name'))
prf_obj.static_tbl_dict[col] = static_table
@debug(lvl=logging.DEBUG)
def field_instantiation(self, prf_obj):
self.wb_cls.ws_sel = prf_obj.sheetname
col_num_list = list(prf_obj.current_tbl_dict.keys()) + list(prf_obj.archive_tbl_dict.keys()) + list(
prf_obj.static_tbl_dict.keys())
col_num_list = [int(x) for x in list(set(col_num_list))]
# print(col_num_list)
for row in range(4, self.wb_cls.row_count + 1):
try:
new_field = Field(
arw_name=self.wb_cls.fetch_value(row, "A").formatted_value,
logical_field=self.wb_cls.fetch_value(row, "B").formatted_value,
tag=self.wb_cls.fetch_value(row, "C").formatted_value,
length=self.wb_cls.fetch_value(row, "D").formatted_value,
nested=self.wb_cls.fetch_value(row, "E").formatted_value,
desc=self.wb_cls.fetch_value(row, "F").formatted_value,
column_name=self.wb_cls.fetch_value(row, "H").formatted_value,
data_type=self.wb_cls.fetch_value(row, "I").formatted_value,
fill=self.wb_cls.fetch_value(row, "J").formatted_value,
primary_key=self.wb_cls.fetch_value(row, "K").formatted_value,
nullable=self.wb_cls.fetch_value(row, "L").formatted_value,
unique=self.wb_cls.fetch_value(row, "M").formatted_value,
index=self.wb_cls.fetch_value(row, "N").formatted_value,
binary_col=self.wb_cls.fetch_value(row, "O").formatted_value,
auto_incremental=self.wb_cls.fetch_value(row, "P").formatted_value,
generated=self.wb_cls.fetch_value(row, "Q").formatted_value,
static_key=self.wb_cls.fetch_value(row, "R").formatted_value,
dflt_exp=self.wb_cls.fetch_value(row, "U").raw_raw_val,
notes=self.wb_cls.fetch_value(row, "A").formatted_value,
)
except ValueError as err:
if not err.args:
err.args = ('',)
err.args = ("Sheet Name: {0}, Row: {1}"
.format(prf_obj.sheetname,
row),
) + err.args
ArwPrfImporter.logger.error(err.args)
else:
for col in sorted(col_num_list):
try:
order = int(self.wb_cls.fetch_value(row, col).formatted_value)
except ValueError:
ArwPrfImporter.logger.log(
logging.DEBUG, "Value is not an integer. Field not appended to any dictionary.")
else:
current_tbl_obj = prf_obj.current_tbl_dict.get(str(col))
if current_tbl_obj is not None:
ArwPrfImporter.logger.log(
logging.DEBUG,
"Column: {0}, Table: {1}, Value is an integer. Field appended to dictionary.".format(
col, current_tbl_obj.table_name))
current_tbl_obj.fields[str(order)] = new_field
else:
ArwPrfImporter.logger.log(
logging.DEBUG,
"Column: {0}. Current Table Dictionary. Get returned 'None'".format(col))
archive_tbl_obj = prf_obj.archive_tbl_dict.get(str(col))
if archive_tbl_obj is not None:
ArwPrfImporter.logger.log(
logging.DEBUG,
"Column: {0}, Table: {1}, Value is an integer. Field appended to dictionary.".format(
col, archive_tbl_obj.table_name))
archive_tbl_obj.fields[str(order)] = new_field
else:
ArwPrfImporter.logger.log(
logging.DEBUG,
"Column: {0}. Archive Table Dictionary. Get returned 'None'".format(col))
static_tbl_obj = prf_obj.static_tbl_dict.get(str(col))
if static_tbl_obj is not None:
ArwPrfImporter.logger.log(
logging.DEBUG,
"Column: {0}, Table: {1}, Value is an integer. Field appended to dictionary.".format(
col, static_tbl_obj.table_name))
static_tbl_obj.fields[str(order)] = new_field
else:
ArwPrfImporter.logger.log(
logging.DEBUG,
"Row: {1}, Column: {0}. Static Table Dictionary. Get returned 'None'".format(col, row))
tbl_obj_lst = \
list(prf_obj.current_tbl_dict.values()) + \
list(prf_obj.archive_tbl_dict.values()) + \
list(prf_obj.static_tbl_dict.values())
for tbl_obj in tbl_obj_lst:
tbl_obj.post_field_instantiation()
# self.wb_cls.wb.unload_sheet(prf_obj.sheetname)
@debug(lvl=logging.DEBUG)
def write_module_file(self, creation=False, mapping=False):
if bool(PrimaryReportFile.prf_dict.values()):
filename = None
if sum([creation, mapping]) != 1:
raise ValueError
elif creation:
filename = config.SOURCE_PATH / "cep_price_console" / "db_management" / "ARW_PRF_Creation.py"
with filename.open("w") as module_file:
print("from sqlalchemy.ext.declarative import DeferredReflection", file=module_file)
print("from sqlalchemy import Column, Table, func", file=module_file)
print("from sqlalchemy.sql import case, and_, or_, literal", file=module_file)
print("from sqlalchemy.ext.hybrid import hybrid_property", file=module_file)
print("from sqlalchemy.types import Date, DateTime, Integer, Numeric, String, Time",
file=module_file)
print("from sqlalchemy.dialects.mysql import LONGTEXT", file=module_file)
print("import cep_price_console.db_management.server_utils as server_utils\n\n", file=module_file)
elif mapping:
filename = config.SOURCE_PATH / "cep_price_console" / "db_management" / "ARW_PRF_Mapping.py"
with filename.open("w") as module_file:
print("from sqlalchemy.ext.declarative import DeferredReflection", file=module_file)
print("from sqlalchemy import Table, func", file=module_file)
print("from sqlalchemy.sql import case, and_, or_, literal", file=module_file)
print("from sqlalchemy.ext.hybrid import hybrid_property", file=module_file)
print("import cep_price_console.db_management.server_utils as server_utils\n\n", file=module_file)
with filename.open("a") as module_file:
filename_statement = "Workbook Filename: {0}\n".format(self.wb_cls.xl_fullpath_pretty)
max_length = 110
fmt_string = "# " + "\n# ".join([filename_statement[i:i + max_length] for i in
range(0, len(filename_statement), max_length)])
print(fmt_string, file=module_file)
print("# Timestamp: {0}".format(datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")),
file=module_file)
print("\n", file=module_file)
print("class InformReflection(DeferredReflection, server_utils.mysql_base):", file=module_file)
print(" __abstract__ = True\n\n", file=module_file)
for prf_obj in PrimaryReportFile.prf_dict.values():
ArwPrfImporter.logger.log(logging.NOTSET, "Primary Report File: {0}".
format(prf_obj.sheetname))
tbl_obj_lst = \
list(prf_obj.current_tbl_dict.values()) + \
list(prf_obj.archive_tbl_dict.values()) + \
list(prf_obj.static_tbl_dict.values())
for tbl_obj in sorted(tbl_obj_lst, key=lambda x: x.table_name):
ArwPrfImporter.logger.log(logging.NOTSET, "Tablename: {0}".format(tbl_obj.table_name))
if creation:
print(tbl_obj.creation_stmt, file=module_file)
elif mapping:
print(tbl_obj.mapping_stmt, file=module_file)
elif not bool(PrimaryReportFile.prf_dict.values()):
ArwPrfImporter.logger.error("Primary Report File list empty.")
self.investigate_arw_prf_xl()
self.write_module_file(creation, mapping)
@debug(lvl=logging.DEBUG)
def create_schemas(self):
for prf_obj in PrimaryReportFile.prf_dict.values():
prf_obj.create_if_not_exists()
@debug(lvl=logging.DEBUG)
def drop_and_create_all_tables(self):
for prf_obj in PrimaryReportFile.prf_dict.values():
prf_obj.drop_and_create_tables()
@debug(lvl=logging.DEBUG)
def scheduled_script(self):
if hasattr(self, 'session'):
if bool(PrimaryReportFile.prf_dict.values()):
for prf_obj in PrimaryReportFile.prf_dict.values():
prf_obj.update_schema()
schema_create_if_not_exists('pythontest')
self.fill_prod_uom()
elif not bool(PrimaryReportFile.prf_dict.values()):
ArwPrfImporter.logger.error("Primary Report File list empty.")
self.investigate_arw_prf_xl()
self.scheduled_script()
@debug(lvl=logging.DEBUG, prefix='')
def fill_prod_uom(self):
import cep_price_console.db_management.ARW_PRF_Mapping as ARW_PRF_Mapping
base_uom_update = ARW_PRF_Mapping.prod_uom_v2_01_current.__table__.update().where(
ARW_PRF_Mapping.prod_uom_v2_01_current.__table__.c.UOM_Factor_Desc == "1"
).values(
Base_UOM_Factor=ARW_PRF_Mapping.prod_uom_v2_01_current.__table__.c.UOM,
Base_UOM_Qty=1
)
server_utils.mysql_engine.execute(base_uom_update)
self.session.commit()
# noinspection PyPep8,PyComparisonWithNone
no_base_uom = self.session.query(ARW_PRF_Mapping.prod_uom_v2_01_current.__table__.c.ID).filter(
and_(ARW_PRF_Mapping.prod_uom_v2_01_current.__table__.c.Base_UOM_Factor.is_(None),
ARW_PRF_Mapping.prod_uom_v2_01_current.__table__.c.Base_UOM_Qty.is_(None)))
while no_base_uom.count() > 0:
# noinspection PyPep8,PyComparisonWithNone
has_base_uom = \
select([ARW_PRF_Mapping.prod_uom_v2_01_current.__table__.c.Prod_Num,
ARW_PRF_Mapping.prod_uom_v2_01_current.__table__.c.UOM,
ARW_PRF_Mapping.prod_uom_v2_01_current.__table__.c.UOM_Qty,
ARW_PRF_Mapping.prod_uom_v2_01_current.Of_UOM,
ARW_PRF_Mapping.prod_uom_v2_01_current.__table__.c.Base_UOM_Factor,
ARW_PRF_Mapping.prod_uom_v2_01_current.__table__.c.Base_UOM_Qty,
ARW_PRF_Mapping.prod_uom_v2_01_current.__table__.c.UOM_Factor_Desc]) \
.where(and_(
ARW_PRF_Mapping.prod_uom_v2_01_current.__table__.c.Base_UOM_Factor.isnot(None),
ARW_PRF_Mapping.prod_uom_v2_01_current.__table__.c.Base_UOM_Qty.isnot(None))) \
.distinct() \
.alias("has_base_uom")
# for _ in server_utils.mysql_engine.execute(has_base_uom):
# ArwPrfImporter.logger.log(logging.DEBUG, _)
# noinspection PyPep8,PyComparisonWithNone
update_next_uom_level = ARW_PRF_Mapping.prod_uom_v2_01_current.__table__.update().where(and_(
or_(
and_(
ARW_PRF_Mapping.prod_uom_v2_01_current.__table__.c.Prod_Num.is_(None),
has_base_uom.c.Prod_Num.is_(None)),
ARW_PRF_Mapping.prod_uom_v2_01_current.__table__.c.Prod_Num == has_base_uom.c.Prod_Num),
or_(
and_(
ARW_PRF_Mapping.prod_uom_v2_01_current.Of_UOM.is_(None),
has_base_uom.c.UOM.is_(None)),
ARW_PRF_Mapping.prod_uom_v2_01_current.Of_UOM == has_base_uom.c.UOM),
and_(ARW_PRF_Mapping.prod_uom_v2_01_current.__table__.c.Base_UOM_Factor.is_(None),
ARW_PRF_Mapping.prod_uom_v2_01_current.__table__.c.Base_UOM_Qty.is_(None)))) \
.values(Base_UOM_Factor=has_base_uom.c.Base_UOM_Factor,
Base_UOM_Qty=(has_base_uom.c.Base_UOM_Qty *
ARW_PRF_Mapping.prod_uom_v2_01_current.__table__.c.UOM_Qty))
server_utils.mysql_engine.execute(update_next_uom_level)
self.session.commit()
@debug(lvl=logging.DEBUG, prefix='')
def recreate(self):
if hasattr(self, 'session'):
self.write_module_file(creation=True)
get_creation_module()
self.create_schemas()
self.drop_and_create_all_tables()
self.write_mapping()
@debug(lvl=logging.DEBUG, prefix='')
def write_mapping(self):
if hasattr(self, 'session'):
self.write_module_file(mapping=True)
self.scheduled_script()
arw_col_list = {
"1": "Name",
"2": "Logical Field",
"3": "Tag",
"4": "Length",
"5": "Nested",
"6": "Description",
"7": "|",
"8": "Column Name",
"9": "Datatype",
"10": "Fill",
"11": "PK",
"12": "Nullable",
"13": "UQ",
"14": "IND",
"15": "B",
"16": "AI",
"17": "G",
"18": "SK",
"19": "Mapping",
"20": "Static Name",
"21": "Default/ Expression",
"22": "Notes"
}
class PrimaryReportFile(object):
logger = CustomAdapter(logging.getLogger(str(__name__)), None)
prf_dict = {}
@debug(lvl=logging.DEBUG, prefix='Primary Report File Initiated')
def __init__(self,
session,
filename):
self.session = session
self.filename = filename.lower()
self.sheetname = filename
self.tbl_init_dict = {}
self.current_tbl_dict = {}
self.archive_tbl_dict = {}
self.static_tbl_dict = {}
PrimaryReportFile.prf_dict[self.sheetname] = self
# @classmethod
# def clear_dict(cls):
# cls.prf_dict = {}
@debug(lvl=logging.DEBUG, prefix='')
def exists(self):
try:
server_utils.mysql_engine.execute("SHOW CREATE SCHEMA `{0}`;".format(self.filename)).scalar()
PrimaryReportFile.logger.log(logging.NOTSET, "Schema Exists: {0}".format(self.filename))
return True
except exc.DBAPIError:
PrimaryReportFile.logger.log(logging.NOTSET, "Schema Does Not Exist: {0}".format(self.filename))
return False
@debug(lvl=logging.DEBUG, prefix='')
def create(self):
PrimaryReportFile.logger.log(logging.NOTSET, "Creating Schema: {0}".format(self.filename))
server_utils.mysql_engine.execute(CreateSchema(self.filename))
@debug(lvl=logging.DEBUG, prefix='')
def create_if_not_exists(self):
if not self.exists():
self.create()
@debug(lvl=logging.DEBUG, prefix='')
def drop_and_create_tables(self):
tbl_lst = \
list(self.current_tbl_dict.values()) + \
list(self.archive_tbl_dict.values()) + \
list(self.static_tbl_dict.values())
for tbl_obj in tbl_lst:
tbl_obj.drop_and_create_if_not_exists()
# ARW_PRF_Mapping.InformReflection.prepare(server_utils.mysql_engine)
@debug(lvl=logging.DEBUG, prefix='')
def update_schema(self):
for current_tbl_obj in self.current_tbl_dict.values():
self.session.commit()
current_tbl_obj.truncate()
current_tbl_obj.append()
for archive_tbl_obj in self.archive_tbl_dict.values():
create_date = datetime.datetime.strptime(creation_date(archive_tbl_obj.filepath), "%Y-%m-%d %H:%M:%S")
max_date_time = archive_tbl_obj.max_date_time()
if create_date != max_date_time:
archive_tbl_obj.append()
archive_tbl_obj.delete_sub_max_date_time()
# for static_tbl_obj in self.static_tbl_dict.values():
# pass
# append static
class Field(object):
logger = CustomAdapter(logging.getLogger(str(__name__)), None)
type_list = (
"BigInteger",
"Boolean",
"Date",
"DateTime",
"Enum",
"Float",
"Integer",
"Interval",
"LargeBinary",
"MatchType",
"Numeric",
"PickleType",
"SchemaType",
"SmallInteger",
"String",
"Text",
"Time",
"Unicode",
"UnicodeText",
"LONGTEXT"
)
@debug(lvl=logging.DEBUG, prefix='')
def __init__(self,
arw_name="",
logical_field="",
tag="",
length="",
nested="",
desc="",
column_name="",
data_type="N/A",
primary_key="",
nullable="",
unique="",
index="",
binary_col="",
fill="",
auto_incremental="",
dflt_exp="", # Don't need it
generated="", # Don't need it
static_key="", # Don't need it
default="", # Don't need it
notes=""):
self.arw_name = arw_name # ARW Name with spaces and such (Column A)
self.logical_field = logical_field # If this is true, don't look for this value in the .csv file (Column B)
self.tag = tag # ARW Tag (Column C)
self.length = length # ARW Length (Not the length associated with datatype) (Column D)
self.nested = nested # ARW value (Column E)
self.desc = desc # ARW Description of field (Column F)
# None of the above fields influence the field's status in the DB
self.column_name = column_name # My assigned name without spaces (check that this is true in setter)(Column H)
self.data_type = data_type # SQL Datatype (convert to SQL Alchemy Datatype) (Column I)
self.primary_key = primary_key # Is this a primary key? (Column K)
self.nullable = nullable # Is this a NotNull field? (Column L)
self.unique = unique # Is this a Unique Index? (Column M)
self.index = index # Is this an Index? (Column N)
self.binary_col = binary_col # Is this a Binary Column? (Column O)
self.fill = fill # Datatype length (Column J)
self.auto_incremental = auto_incremental # Is this field Auto-Incremental? (Column R)
self.generated = generated # Is this field generated? (Column S)
self.static_key = static_key # Is this field a static key? (Column T)
self.default = default # Don't really know
self.dflt_exp = dflt_exp # What is the default expression for this field? (Only used if generated) (Column W)
self.notes = notes # Don't really know (Column X)
self.get_create_field()
# region arw_name ##########################################################################################s######
@property
@debug(lvl=logging.NOTSET)
def arw_name(self):
return self._arw_name
@arw_name.setter
@debug(lvl=logging.NOTSET, prefix="")
def arw_name(self, value):
try:
str_val = str(value)
self._arw_name = str_val.strip()
except ValueError:
raise ValueError("{0}: Value cannot be converted to string: {1}".format("arw_name", value))
# endregion ########################################################################################################
# region logical_field ############################################################################################
@property
@debug(lvl=logging.NOTSET)
def logical_field(self):
return self._logical_field
@logical_field.setter
@debug(lvl=logging.NOTSET, prefix="")
def logical_field(self, value):
try:
str_val = str(value).upper().strip()
if str_val in ("Y", "N"):
self._logical_field = str_val.strip()
else:
raise ValueError("{0}.{1}: Value must be 'Y' or 'N': {2}".
format(self.arw_name, "logical_field", value))
except ValueError:
raise ValueError("{0}.{1}: Value cannot be converted to string: {2}".
format(self.arw_name, "logical_field", value))
# endregion ########################################################################################################
# region tag ######################################################################################################
@property
@debug(lvl=logging.NOTSET)
def tag(self):
return self._tag
@tag.setter
@debug(lvl=logging.NOTSET, prefix="")
def tag(self, value):
try:
str_val = str(value)
self._tag = str_val.strip()
except ValueError:
raise ValueError("{0}.{1}: Value cannot be converted to string: {2}".
format(self.arw_name, "tag", value))
# endregion ########################################################################################################
# region length ###################################################################################################
@property
@debug(lvl=logging.NOTSET)
def length(self):
return self._length
@length.setter
@debug(lvl=logging.NOTSET, prefix="")
def length(self, value):
try:
int_val = int(value)
self._length = int_val
except ValueError:
try:
str_val = str(value)
if str_val.upper().strip() == "N/A":
self._length = None
else:
raise ValueError("{0}.{1}: Value is not 'N/A': {2}".format(self.arw_name, "length", value))
except ValueError:
raise ValueError("{0}.{1}: Value cannot be converted to an integer: {2}"
.format(self.arw_name, "length", value))
# endregion ########################################################################################################
# region nested ###################################################################################################
@property
@debug(lvl=logging.NOTSET)
def nested(self):
return self._nested
@nested.setter
@debug(lvl=logging.NOTSET, prefix="")
def nested(self, value):
try:
str_val = str(value)
self._nested = str_val.strip()
except ValueError:
raise ValueError("{0}.{1}: Value cannot be converted to string: {2}".format(self.arw_name, "nested", value))
# endregion ########################################################################################################
# region desc #####################################################################################################
@property
@debug(lvl=logging.NOTSET)
def desc(self):
return self._desc
@desc.setter
@debug(lvl=logging.NOTSET, prefix="")
def desc(self, value):
try:
str_val = str(value).replace("'", '"').strip()
str_val = ' '.join(str_val.splitlines())
str_val.strip()
self._desc = str_val
except ValueError:
raise ValueError("{0}.{1}: Value cannot be converted to string: {2}"
.format(self.arw_name, "desc", value))
# endregion ########################################################################################################
# region column_name ##############################################################################################
@property
@debug(lvl=logging.NOTSET)
def column_name(self):
return self._column_name
@column_name.setter
@debug(lvl=logging.NOTSET, prefix="")
def column_name(self, value):
try:
str_val = str(value).strip()
if len(str_val) > 64:
raise Exception("{0}.{1}: String length greater than the 64 character limit: {2}"
.format(self.arw_name, "column_name", value))
scrubbed_val = str_val.replace("(", "").replace(")", "").replace("/", "").replace("-", "").replace("#", "")
if str_val == scrubbed_val:
try:
int(scrubbed_val[:1])
except ValueError:
self._column_name = scrubbed_val
else:
raise Exception("{0}.{1}: First character of value cannot be a number: {2}"
.format(self.arw_name, "column_name", value))
else:
raise Exception("{0}.{1}: Value has one of the following illegal characters: {{(, ), /, -, #}}: {2}"
.format(self.arw_name, "column_name", value))
except ValueError:
raise ValueError("{0}.{1}: Value cannot be converted to string: {2}"
.format(self.arw_name, "column_name", value))
# endregion ########################################################################################################
# region data_type ################################################################################################
@property
@debug(lvl=logging.NOTSET)
def data_type(self):
return self._data_type
@data_type.setter
@debug(lvl=logging.NOTSET, prefix="")
def data_type(self, value):
try:
str_val = str(value)
if str_val.strip() in Field.type_list:
self._data_type = str_val.strip()
else:
raise ValueError("{0}.{1}: Value not in datatype list: {2}"
.format(self.arw_name, "data_type", value))
except ValueError:
raise ValueError("{0}.{1}: Value cannot be converted to string: {2}"
.format(self.arw_name, "data_type", value))
# endregion ########################################################################################################
# region fill #####################################################################################################
@property
@debug(lvl=logging.NOTSET)
def fill(self):
return self._fill
@fill.setter
@debug(lvl=logging.NOTSET, prefix="")
def fill(self, value):
if self.data_type in (
"BigInteger",
"Boolean",
"Date",
"DateTime",
"Integer",
"SmallInteger",
"Time",
"Text",
"LONGTEXT"
):
if value not in ("", None):
raise ValueError("{0}.{1}: Datatype does not allow for a fill: {2}"
.format(self.arw_name, "fill", self.data_type))
else:
self._fill = None
elif self.data_type in (
"LargeBinary",
"String",
# "Text",
"Unicode",
"UnicodeText",
"Float"
):
if value in ("", None):
raise ValueError("{0}.{1}: Datatype requires a fill: {2}"
.format(self.arw_name, "fill", self.data_type))
else:
try:
int_val = int(value)
if self.data_type == "String" and self.binary_col:
self._fill = "length={0}, collation='binary'".format(str(int_val))
else:
self._fill = "length={0}".format(str(int_val))
except ValueError:
raise ValueError("{0}.{1}: Value cannot be converted to an integer: {2}"
.format(self.arw_name, "fill", value))
elif self.data_type == "Float":
try:
int_val = int(value)
self._fill = "precision={0}".format(str(int_val))
except ValueError:
raise ValueError("{0}.{1}: Value cannot be converted to an integer: {2}"
.format(self.arw_name, "fill", value))
elif self.data_type == "Numeric":
try:
str_val = str(value).strip()
pre_str, scale_str = str_val.split(",")
try:
pre_int = int(pre_str.strip())
scale_int = int(scale_str.strip())
self._fill = "precision={0}, scale={1}".format(str(pre_int), str(scale_int))
except ValueError:
raise ValueError("{0}.{1}: Error with precision or scale integer conversion: "
"precision={2}, scale={3}".
format(self.arw_name, "fill", pre_str, scale_str))
except ValueError:
raise ValueError("{0}.{1}: Value cannot be converted to string: {2}".
format(self.arw_name, "fill", value))
elif self.data_type in (
"Enum",
"Interval",
"MatchType",
"PickleType",
"SchemaType"
):
raise ValueError("{0}.{1}: What the fuck are you doing using this datatype?: {2}"
.format(self.arw_name, "fill", self.data_type))
# endregion ########################################################################################################
# region primary_key ##############################################################################################
@property
@debug(lvl=logging.NOTSET)
def primary_key(self):
return self._primary_key
@primary_key.setter
@debug(lvl=logging.NOTSET, prefix="")
def primary_key(self, value):
if value is None:
str_val = ""
else:
try:
str_val = str(value)
except ValueError:
raise ValueError("{0}.{1}: Value cannot be converted to string: {2}".
format(self.arw_name, "primary_key", value))
if str_val.strip().upper() == "X":
self._primary_key = True
elif str_val.strip().upper() == "":
self._primary_key = False
else:
raise ValueError("{0}.{1}: Value must be empty or 'X': {2}".
format(self.arw_name, "primary_key", value))
# endregion ########################################################################################################
# region nullable #################################################################################################
@property
@debug(lvl=logging.NOTSET)
def nullable(self):
return self._nullable
@nullable.setter
@debug(lvl=logging.NOTSET, prefix="")
def nullable(self, value):
if value is None:
str_val = ""
else:
try:
str_val = str(value)
except ValueError:
raise ValueError("{0}.{1}: Value cannot be converted to string: {2}".
format(self.arw_name, "nullable", value))
if str_val.strip().upper() == "X":
if not self.primary_key:
self._nullable = True
else:
raise ValueError("{0}.{1}: Primary key cannot be nullable: {2}".
format(self.arw_name, "nullable", value))
elif str_val.strip().upper() == "":
self._nullable = False
else:
raise ValueError("{0}.{1}: Value must be empty or 'X': {2}".
format(self.arw_name, "nullable", value))
# endregion ########################################################################################################
# region unique ###################################################################################################
@property
@debug(lvl=logging.NOTSET)
def unique(self):
return self._unique
@unique.setter
@debug(lvl=logging.NOTSET, prefix="")
def unique(self, value):
if value is None:
str_val = ""
else:
try:
str_val = str(value)
except ValueError:
raise ValueError("{0}.{1}: Value cannot be converted to string: {2}".
format(self.arw_name, "unique", value))
if str_val.strip().upper() == "X":
self._unique = True
elif str_val.strip().upper() == "":
self._unique = False
else:
raise ValueError("{0}.{1}: Value must be empty or 'X': {2}".
format(self.arw_name, "unique", value))
# endregion ########################################################################################################
# region index ####################################################################################################
@property
@debug(lvl=logging.NOTSET)
def index(self):
return self._index
@index.setter
@debug(lvl=logging.NOTSET, prefix="")
def index(self, value):
if value is None:
str_val = ""
else:
try:
str_val = str(value)
except ValueError:
raise ValueError("{0}.{1}: Value cannot be converted to string: {2}".
format(self.arw_name, "index", value))
if str_val.strip().upper() == "X":
self._index = True
elif str_val.strip().upper() == "":
self._index = False
else:
raise ValueError("{0}.{1}: Value must be empty or 'X': {2}".
format(self.arw_name, "index", value))
# endregion ########################################################################################################
# region binary_col ###############################################################################################
@property
@debug(lvl=logging.NOTSET)
def binary_col(self):
return self._binary_col
@binary_col.setter
@debug(lvl=logging.NOTSET, prefix="")
def binary_col(self, value):
if value is None:
str_val = ""
else:
try:
str_val = str(value)
except ValueError:
raise ValueError("{0}.{1}: Value cannot be converted to string: {2}".
format(self.arw_name, "binary_col", value))
if str_val.strip().upper() == "X":
if self.data_type in ("String", "Text"):
self._binary_col = True
else:
raise ValueError("{0}.{1}: Only string and text datatypes can be binary: {2}".
format(self.arw_name, "binary_col", self.data_type))
elif str_val.strip().upper() == "":
self._binary_col = False
else:
raise ValueError("{0}.{1}: Value must be empty or 'X': {2}".
format(self.arw_name, "binary_col", value))
# endregion ########################################################################################################
# region auto_incremental #########################################################################################
@property
@debug(lvl=logging.NOTSET)
def auto_incremental(self):
return self._auto_incremental
@auto_incremental.setter
@debug(lvl=logging.NOTSET, prefix="")
def auto_incremental(self, value):
if value is None:
str_val = ""
else:
try:
str_val = str(value)
except ValueError:
raise ValueError("{0}.{1}: Value cannot be converted to string: {2}".
format(self.arw_name, "auto_incremental", value))
if str_val.strip().upper() == "X":
if self.index and self.data_type in (
"BigInteger",
"Boolean",
"Float",
"Integer",
"Numeric",
"SmallInteger"):
self._auto_incremental = True
else:
raise ValueError("{0}.{1}: Autoincremented columns must be indexed and numeric.".
format(self.arw_name, "auto_incremental"))
elif str_val.strip().upper() == "":
self._auto_incremental = False
else:
raise ValueError("{0}.{1}: Value must be empty or 'X': {2}".
format(self.arw_name, "auto_incremental", value))
# endregion ########################################################################################################
# region generated ################################################################################################
@property
@debug(lvl=logging.NOTSET)
def generated(self):
return self._generated
@generated.setter
@debug(lvl=logging.NOTSET, prefix="")
def generated(self, value):
if value is None:
str_val = ""
else:
try:
str_val = str(value)
except ValueError:
raise ValueError("{0}.{1}: Value cannot be converted to string: {2}".
format(self.arw_name, "generated", value))
if str_val.strip().upper() == "X":
if not self.auto_incremental:
self._generated = True
else:
raise ValueError("{0}.{1}: Value cannot be generated and autoincremented: {2}".
format(self.arw_name, "generated", value))
elif str_val.strip().upper() == "":
self._generated = False
else:
raise ValueError("{0}.{1}: Value must be empty or 'X': {2}".
format(self.arw_name, "generated", value))
# endregion ########################################################################################################
# region static_key ###############################################################################################
@property
@debug(lvl=logging.NOTSET)
def static_key(self):
return self._static_key
@static_key.setter
@debug(lvl=logging.NOTSET, prefix="")
def static_key(self, value):
if value is None:
str_val = ""
else:
try:
str_val = str(value)
except ValueError:
raise ValueError("{0}.{1}: Value cannot be converted to string: {2}".
format(self.arw_name, "static_key", value))
if str_val.strip().upper() == "X":
self._static_key = True
elif str_val.strip().upper() == "":
self._static_key = False
else:
raise ValueError("{0}.{1}: Value must be empty or 'X': {2}".
format(self.arw_name, "static_key", value))
# endregion ########################################################################################################
# region default ##################################################################################################
@property
@debug(lvl=logging.NOTSET)
def default(self):
return self._default
@default.setter
@debug(lvl=logging.NOTSET, prefix="")
def default(self, value):
if value is None:
str_val = ""
else:
try:
str_val = str(value)
except ValueError:
raise ValueError("{0}.{1}: Value cannot be converted to string: {2}".
format(self.arw_name, "default", value))
if str_val.strip().upper() == "X":
self._default = True
elif str_val.strip().upper() == "":
self._default = False
else:
raise ValueError("{0}.{1}: Value must be empty or 'X': {2}".
format(self.arw_name, "default", value))
# endregion ########################################################################################################
# region dflt_exp #################################################################################################
@property
@debug(lvl=logging.NOTSET)
def dflt_exp(self):
return self._dflt_exp
@dflt_exp.setter
@debug(lvl=logging.NOTSET, prefix="")
def dflt_exp(self, value):
try:
str_val = str(value)
self._dflt_exp = str_val.strip()
except ValueError:
raise ValueError("{0}.{1}: Value cannot be converted to string: {2}".
format(self.arw_name, "dflt_exp", value))
# endregion ########################################################################################################
# region notes ####################################################################################################
@property
@debug(lvl=logging.NOTSET)
def notes(self):
return self._notes
@notes.setter
@debug(lvl=logging.NOTSET, prefix="")
def notes(self, value):
try:
str_val = str(value)
self._notes = str_val.strip().replace(",", '"')
except ValueError:
raise ValueError("{0}.{1}: Value cannot be converted to string: {2}".
format(self.arw_name, "notes", value))
# endregion ########################################################################################################
@debug(lvl=logging.NOTSET, prefix='')
def get_create_field(self):
code_line_list = []
offset = len("Column(")
code_line_list.append("Column('{column_name}',".format(column_name=self.column_name))
if self.fill not in ("", None):
code_line_list.append(
offset * " " + "{data_type}({fill}),".format(data_type=self.data_type, fill=self.fill))
else:
code_line_list.append(offset * " " + "{data_type},".format(data_type=self.data_type))
if self.primary_key:
code_line_list.append(offset * " " + "primary_key=True,")
if self.nullable:
code_line_list.append(offset * " " + "nullable=True,")
if self.index and self.unique:
code_line_list.append(offset * " " + "unique=True,")
code_line_list.append(offset * " " + "index=True,")
else:
if self.index and not self.unique:
code_line_list.append(offset * " " + "index=True,")
if self.unique and not self.index:
code_line_list.append(offset * " " + "unique=True,")
code_line_list.append(offset * " " + "index=True,")
if self.auto_incremental:
code_line_list.append(offset * " " + "autoincrement=True,")
if self.notes not in ("", None):
code_line_list.append(offset * " " + "doc='{notes}',".format(notes=self.notes))
if self.desc not in ("", None):
max_length = 79
fmt_string = textwrap.wrap(self.desc, max_length)
fmt_str_len = len(fmt_string)
for count, line in enumerate(fmt_string, 1):
if count == 1:
if count == fmt_str_len:
code_line_list.append(
offset * " " + "comment='{description}',".format(description=line.strip()))
else:
code_line_list.append(offset * " " + "comment='{description}'".format(description=line.strip()))
elif count == fmt_str_len:
code_line_list.append(offset * " " + " '{description}',".format(description=line.strip()))
else:
code_line_list.append(offset * " " + " '{description}'".format(description=line.strip()))
if not self.generated:
if self.dflt_exp not in (None, "", "None"):
if isinstance(self.dflt_exp, str):
code_line_list.append(offset * " " + "default='{dflt_exp}', ".format(dflt_exp=self.dflt_exp))
else:
Field.logger.log(logging.ERROR, "Figure out what to do with int/float generated columns: {0}"
.format(self.arw_name))
elif self.generated:
if self.dflt_exp in (None, ""):
Field.logger.log(logging.ERROR, "Generated without default expression: {0}".format(self.arw_name))
elif self.dflt_exp not in (None, ""):
code_line_list = []
for line in self.dflt_exp.splitlines():
code_line_list.append("{0}".format(line.replace(" ", " ")))
Field.logger.log(logging.NOTSET, "Code:")
for line in code_line_list:
Field.logger.log(logging.NOTSET, " {code_line}".format(code_line=line))
return code_line_list
final_code_list = []
code_list_len = len(code_line_list)
for line in code_line_list[0:code_list_len - 1]:
final_code_list.append(line)
final_line = code_line_list[code_list_len - 1][:-1] + "),"
final_code_list.append(code_line_list[code_list_len - 1][:-1] + "),")
Field.logger.log(logging.NOTSET, "Code:")
for line in final_code_list:
Field.logger.log(logging.NOTSET, " {code_line}".format(code_line=line))
return final_code_list
@debug(lvl=logging.NOTSET, prefix='')
def convert_csv_value(self, csv_string):
formatted_value = "Unassigned Error"
if csv_string == '':
formatted_value = None
Field.logger.log(logging.NOTSET, "CSV String: {csv_string}, Formatted Value: {formatted_value}".
format(csv_string=csv_string, formatted_value=formatted_value))
else:
if self.data_type in ("Text", "String", "Unicode", "UnicodeText"):
try:
formatted_value = str(csv_string)
except ValueError:
formatted_value = "Error converting to string"
Field.logger.log(
logging.ERROR,
"ARW Name: {arw_name}, Column Name: {column_name}, "
"CSV Value: {csv_string}, Datatype: {data_type}".format(
arw_name=self.arw_name,
column_name=self.column_name,
csv_string=csv_string,
data_type=self.data_type))
elif self.data_type in ("BigInteger", "Integer", "SmallInteger"):
try:
formatted_value = int(csv_string)
except ValueError:
formatted_value = "Error converting to an integer"
Field.logger.log(
logging.ERROR,
"ARW Name: {arw_name}, Column Name: {column_name}, "
"CSV Value: {csv_string}, Datatype: {data_type}".format(
arw_name=self.arw_name,
column_name=self.column_name,
csv_string=csv_string,
data_type=self.data_type))
elif self.data_type in ("Numeric", "Float"):
try:
formatted_value = float(csv_string)
except ValueError:
formatted_value = "Error converting to a float"
Field.logger.log(
logging.ERROR,
"ARW Name: {arw_name}, Column Name: {column_name}, "
"CSV Value: {csv_string}, Datatype: {data_type}".format(
arw_name=self.arw_name,
column_name=self.column_name,
| 0 |
fdd6a9fb7e1f297f3560ccd4ca2e29eec3e4956d
|
Python
|
csv_string=csv_string,
data_type=self.data_type))
elif self.data_type == "Boolean":
if csv_string.strip().upper() == "FALSE":
formatted_value = False
elif csv_string.strip().upper() == "TRUE":
formatted_value = True
else:
formatted_value = "Error converting to a boolean"
Field.logger.log(
logging.ERROR,
"ARW Name: {arw_name}, Column Name: {column_name}, "
"CSV Value: {csv_string}, Datatype: {data_type}".format(
arw_name=self.arw_name,
column_name=self.column_name,
csv_string=csv_string,
data_type=self.data_type))
elif self.data_type in ("LargeBinary", "Enum", "Interval", "MatchType", "PickleType", "SchemaType"):
formatted_value = "Unmapped Datatype"
Field.logger.log(
logging.ERROR,
"ARW Name: {arw_name}, Column Name: {column_name}, "
"CSV Value: {csv_string}, Datatype: {data_type}".format(
arw_name=self.arw_name,
column_name=self.column_name,
csv_string=csv_string,
data_type=self.data_type))
elif self.data_type == "DateTime":
try:
formatted_value = csv_string
except ValueError:
formatted_value = "Date Conversion Error"
Field.logger.log(
logging.ERROR,
"ARW Name: {arw_name}, Column Name: {column_name}, "
"CSV Value: {csv_string}, Datatype: {data_type}".format(
arw_name=self.arw_name,
column_name=self.column_name,
csv_string=csv_string,
data_type=self.data_type))
elif self.data_type == "Date":
try:
formatted_value = datetime.datetime.strptime(csv_string, "%m/%d/%Y").date()
except ValueError:
formatted_value = "Date Conversion Error"
Field.logger.log(
logging.ERROR,
"ARW Name: {arw_name}, Column Name: {column_name}, "
"CSV Value: {csv_string}, Datatype: {data_type}".format(
arw_name=self.arw_name,
column_name=self.column_name,
csv_string=csv_string,
data_type=self.data_type))
elif self.data_type == "Time":
try:
formatted_value = datetime.datetime.strptime(csv_string, "%I:%M%p").time()
except ValueError:
formatted_value = "Date Conversion Error"
Field.logger.log(
logging.ERROR,
"ARW Name: {arw_name}, Column Name: {column_name}, "
"CSV Value: {csv_string}, Datatype: {data_type}".format(
arw_name=self.arw_name,
column_name=self.column_name,
csv_string=csv_string,
data_type=self.data_type))
return formatted_value
class ConsoleTable(object):
logger = CustomAdapter(logging.getLogger(str(__name__)), None)
@debug(lvl=logging.NOTSET, prefix='Table Initiated')
def __init__(self,
session,
prf_name,
prf_col,
base_table_name,
table_name=None):
self.session = session
self.prf_name = prf_name
self.prf_col = prf_col
self.base_table_name = base_table_name
self.table_name = table_name
self._mapping_stmt = None
self._creation_stmt = None
self.fields = {}
# region base_table_name ##########################################################################################
@property
@debug(lvl=logging.NOTSET)
def base_table_name(self):
return self._base_table_name
@base_table_name.setter
@debug(lvl=logging.NOTSET, prefix="")
def base_table_name(self, value):
try:
str_value = str(value).lower()
except ValueError:
raise ValueError("{0}: Value cannot be converted to string: {1}".format("base_table_name", value))
else:
self._base_table_name = str_value
# endregion ########################################################################################################
# region table_name ##########################################################################################
@property
@debug(lvl=logging.NOTSET)
def table_name(self):
return self._table_name
@table_name.setter
@debug(lvl=logging.NOTSET, prefix="")
def table_name(self, value):
try:
str_value = str(value).lower()
except ValueError:
raise ValueError("{0}: Value cannot be converted to string: {1}".format("table_name", value))
else:
self._table_name = str_value
self.map = None
self.create = None
# endregion ########################################################################################################
# region prf_name #################################################################################################
@property
@debug(lvl=logging.NOTSET)
def prf_name(self):
return self._prf_name
@prf_name.setter
@debug(lvl=logging.NOTSET)
def prf_name(self, value):
try:
str_value = str(value).lower()
except ValueError:
raise ValueError("{0}: Value cannot be converted to string: {1}".format("prf_name", value))
else:
self._prf_name = str_value
# endregion ########################################################################################################
# region mapping_stmt ##########################################################################################
@property
@debug(lvl=logging.NOTSET)
def mapping_stmt(self):
return self._mapping_stmt
@mapping_stmt.setter
@debug(lvl=logging.NOTSET)
def mapping_stmt(self, _):
if bool(self.fields):
gen_field_lst = []
code = "# noinspection PyPep8Naming\n"
code += "class {table_name}({reflection}):\n" \
.format(table_name=self.table_name, reflection="InformReflection")
code += " " * 4 + "__table__ = Table('{table_name}', {base_name}.metadata,\n".format(
table_name=self.table_name,
base_name="server_utils.mysql_base"
)
for field_order in sorted(self.fields.keys(), key=lambda x: int(x)):
field_obj = self.fields.get(field_order)
if field_obj.generated and field_obj.dflt_exp not in (None, "", "None"):
gen_field_lst.append(field_obj)
code += " " * 22 + "schema='{schema_name}')\n".format(schema_name=self.prf_name)
if bool(gen_field_lst):
for field_obj in gen_field_lst:
code += "\n"
gen_code_lst = field_obj.get_create_field()
for line in gen_code_lst:
code += " " + line + "\n"
code += "\n"
self._mapping_stmt = code
elif not bool(self.fields):
raise NotImplementedError
# endregion ########################################################################################################
# region creation_stmt ##########################################################################################
@property
@debug(lvl=logging.NOTSET)
def creation_stmt(self):
return self._creation_stmt
@creation_stmt.setter
@debug(lvl=logging.NOTSET)
def creation_stmt(self, _):
if bool(self.fields):
gen_field_lst = []
offset = 22
code = "# noinspection PyPep8Naming\n"
code += "class {0}(server_utils.mysql_base):\n" \
.format(self.table_name)
code += " " * 4 + "__table__ = Table('{table_name}', {base_name}.metadata,\n".format(
table_name=self.table_name,
base_name="server_utils.mysql_base"
)
for field_order in sorted(self.fields.keys(), key=lambda x: int(x)):
field_obj = self.fields.get(field_order)
if field_obj.generated and field_obj.dflt_exp not in (None, "", "None"):
gen_field_lst.append(field_obj)
else:
code_lst = field_obj.get_create_field()
for line in code_lst:
code += " " * offset + line + "\n"
code += " " * offset + "schema='{schema_name}')\n".format(schema_name=self.prf_name)
if bool(gen_field_lst):
for field_obj in gen_field_lst:
code += "\n"
gen_code_lst = field_obj.get_create_field()
for line in gen_code_lst:
code += " " + line + "\n"
code += "\n"
self._creation_stmt = code
elif not bool(self.fields):
raise NotImplementedError
# endregion ########################################################################################################
# noinspection PyAttributeOutsideInit
@debug(lvl=logging.NOTSET, prefix='')
def post_field_instantiation(self):
self.mapping_stmt = None
self.creation_stmt = None
@debug(lvl=logging.DEBUG, prefix='')
def exists(self):
if not server_utils.mysql_engine.dialect.has_table(
server_utils.mysql_engine,
self.table_name,
schema=self.prf_name):
ConsoleTable.logger.log(
logging.NOTSET,
"Table does not exist: {0}.{1}".format(self.prf_name, self.table_name)
)
return False
else:
ConsoleTable.logger.log(logging.NOTSET, "Table exists: {0}.{1}".format(self.prf_name, self.table_name))
return True
@debug(lvl=logging.DEBUG, prefix='')
def create_a(self):
statement = "creation_module.{table_name}.__table__.create({engine_name})" \
.format(table_name=self.table_name,
engine_name="server_utils.mysql_engine")
ConsoleTable.logger.log(logging.NOTSET, "{schema_name}.{table_name} Create Statement: {statement}".
format(schema_name=self.prf_name,
table_name=self.table_name,
statement=statement))
exec(statement)
@debug(lvl=logging.DEBUG, prefix='')
def drop(self):
statement = "creation_module.{table_name}.__table__.drop({engine_name})" \
.format(table_name=self.table_name,
engine_name="server_utils.mysql_engine")
ConsoleTable.logger.log(logging.DEBUG, "{schema_name}.{table_name} Drop Statement: {statement}".
format(schema_name=self.prf_name,
table_name=self.table_name,
statement=statement))
exec(statement)
@debug(lvl=logging.DEBUG, prefix='')
def truncate(self):
statement = ("TRUNCATE `{schema_name}`.`{table_name}`;".format(schema_name=self.prf_name,
table_name=self.table_name))
ConsoleTable.logger.log(logging.NOTSET, "{schema_name}.{table_name} Truncate Statement: {statement}".
format(schema_name=self.prf_name,
table_name=self.table_name,
statement=statement))
server_utils.mysql_engine.execute(statement)
# statement = "creation_module.{table_name}.__table__.delete({engine_name})" \
# .format(table_name=self.table_name,
# engine_name="server_utils.mysql_engine")
# exec(statement)
@debug(lvl=logging.DEBUG, prefix='')
def drop_and_create_if_not_exists(self):
if not self.exists():
self.create_a()
else:
self.drop()
self.create_a()
class ARWTable(ConsoleTable):
logger = CustomAdapter(logging.getLogger(str(__name__)), None)
@debug(lvl=logging.NOTSET, prefix='ARW Table Initiated')
def __init__(self,
session,
prf_name,
prf_col,
base_table_name,
table_name,
filepath=None,
):
super().__init__(
session=session,
prf_name=prf_name,
prf_col=prf_col,
base_table_name=base_table_name,
table_name=table_name
)
self.filepath = filepath
# region filepath #################################################################################################
@property
@debug(lvl=logging.NOTSET)
def filepath(self):
return self._filepath
@filepath.setter
@debug(lvl=logging.NOTSET)
def filepath(self, value):
try:
str_value = str(value)
except ValueError:
raise AttributeError("{0}: Value cannot be converted to string: {1}".format("filepath", value))
else:
fileroot = config.SOURCE_PATH / "cep_price_console" / "db_management"
# TODO: Production Change
filepath = str_value
# filepath = fileroot + str_value
if is_path_exists_or_creatable(filepath):
self._filepath = filepath
else:
raise AttributeError("{0}: Value is not a valid filepath: {1}".format("filepath", filepath))
# endregion ########################################################################################################
class StaticTable(ConsoleTable):
logger = CustomAdapter(logging.getLogger(str(__name__)), None)
@debug(lvl=logging.NOTSET, prefix='Static table initiated')
def __init__(self,
session,
prf_name,
prf_col,
base_table_name,
table_name,
master_table_name=None):
super().__init__(
session=session,
prf_name=prf_name,
prf_col=prf_col,
base_table_name=base_table_name,
table_name=table_name
)
self.master_table_name = master_table_name
self._append_stmt = None
# region append_stmt ##############################################################################################
@property
@debug(lvl=logging.NOTSET)
def append_stmt(self):
return self._append_stmt
@append_stmt.setter
@debug(lvl=logging.DEBUG)
def append_stmt(self, value):
self._append_stmt = value
# endregion ########################################################################################################
class CurrentTable(ARWTable):
logger = CustomAdapter(logging.getLogger(str(__name__)), None)
@debug(lvl=logging.DEBUG)
def append(self):
filepath_useful = self.filepath.replace('\\\\', '+-+-+-+-').replace('\\', '\\\\').replace('+-+-+-+-', '\\\\')
order_mapping_dict = {}
temp_field_key_list = list(self.fields.keys())
with open(filepath_useful, newline='') as csvfile:
spamreader = csv.DictReader(csvfile, delimiter=',', quotechar='"')
for row in spamreader:
for col_num, col_name in enumerate(row.keys()):
for field_key in sorted(temp_field_key_list, key=lambda x: int(x)):
field_obj = self.fields.get(field_key)
if field_obj.arw_name == col_name:
order_mapping_dict[col_num] = field_obj
temp_field_key_list.remove(field_key)
break
break
field_lst = []
set_lst = []
set_dict = {}
var_cntr_int = 0
for field_key in sorted(order_mapping_dict.keys(), key=lambda x: int(x)):
field_obj = order_mapping_dict.get(field_key)
if field_obj.logical_field == "N":
if field_obj.data_type in (
"BigInteger", "Date", "DateTime", "Float", "Integer", "Numeric", "SmallInteger", "Time"):
var_cntr_int += 1
var_str = "@var" + str(var_cntr_int)
set_dict[var_str] = field_obj
field_lst.append(" {0}".format(var_str))
elif not field_obj.generated:
# field_lst.append(" {0}".format(field_obj.column_name))
field_lst.append(" `{0}`".format(field_obj.column_name))
elif field_obj.logical_field == "Y":
if field_obj.column_name not in ("ID", "Date_Time_Stamp"):
pass
for var_str, field_obj in set_dict.items():
if field_obj.data_type in ("Date", "DateTime", "Time"):
func_str = "STR_TO_DATE"
format_str = ""
aug_var_str = var_str
if field_obj.data_type == "DateTime":
format_str = "%Y-%m-%d %H.%i.%s"
elif field_obj.data_type == "Date":
format_str = "%m/%d/%Y"
elif field_obj.data_type == "Time":
format_str = "%h:%i %p"
aug_var_str = "CONCAT(SUBSTRING({0},1,5),' ',SUBSTRING({0},6))".format(var_str)
set_lst.append(" `{col_name}` = {func_str}({aug_var_str}, '{format_str}')".format(
col_name=field_obj.column_name,
func_str=func_str,
aug_var_str=aug_var_str,
format_str=format_str
))
elif field_obj.data_type in ("BigInteger", "Float", "Integer", "Numeric", "SmallInteger"):
func_str = "NULLIF"
aug_var_str = var_str
set_lst.append(" `{col_name}` = {func_str}({aug_var_str}, '')".format(
col_name=field_obj.column_name,
func_str=func_str,
aug_var_str=aug_var_str,
))
set_stmt = ""
if len(set_dict) == 0:
pass
elif len(set_dict) > 0:
set_stmt = "\n" + ',\n'.join(map(str, set_lst)) + ",\n "
file_creation_date = creation_date(self.filepath)
filepath_useful = self.filepath.replace('\\', '\\\\')
sql = text("""
LOAD DATA LOCAL INFILE '{filename}'
INTO TABLE `{schema_name}`.`{table_name}`
FIELDS TERMINATED BY ',' OPTIONALLY ENCLOSED BY '\\"'
LINES TERMINATED BY '\\r\\n'
IGNORE 1 LINES (
{field_lst}
)
SET{set_stmt} `Date_Time_Stamp` = '{file_creation_date}';""".format(
filename=filepath_useful,
schema_name=self.prf_name,
table_name=self.table_name,
field_lst=',\n'.join(map(str, field_lst)),
set_stmt=set_stmt,
file_creation_date=file_creation_date))
self.session.execute(sql)
self.session.commit()
class ArchiveTable(ARWTable):
logger = CustomAdapter(logging.getLogger(str(__name__)), None)
@debug(lvl=logging.NOTSET, prefix='')
def __init__(self,
session,
prf_name,
prf_col,
base_table_name,
table_name,
filepath=None,
):
super().__init__(
session=session,
prf_name=prf_name,
prf_col=prf_col,
base_table_name=base_table_name,
table_name=table_name,
filepath=filepath)
self._append_stmt = None
@debug(lvl=logging.DEBUG, prefix='')
def append(self):
# noinspection PyUnusedLocal, PyUnresolvedReferences
import cep_price_console.db_management.ARW_PRF_Mapping as ARW_PRF_Mapping
query_stmt = "self.session.query(\n"
insert_stmt = "ARW_PRF_Mapping.{table_name}.__table__.insert().from_select([\n".format(
table_name=self.table_name
)
for col_num, field_obj in sorted(self.fields.items(), key=lambda x: int(x[0])):
if field_obj.column_name != 'ID':
if not field_obj.generated:
query_stmt += " ARW_PRF_Mapping.{base_table_name}_01_current.{field_name},\n".format(
base_table_name=self.base_table_name,
field_name=field_obj.column_name
)
insert_stmt += " ARW_PRF_Mapping.{table_name}.__table__.c.{field_name},\n".format(
table_name=self.table_name,
field_name=field_obj.column_name
)
query_stmt += ")"
print(query_stmt)
# noinspection PyUnusedLocal
query_obj = eval(query_stmt)
insert_stmt += " ],\n query_obj\n)"
# noinspection PyUnusedLocal
insert_obj = eval(insert_stmt)
server_utils.mysql_engine.execute(insert_obj)
@debug(lvl=logging.DEBUG, prefix='')
def max_date_time(self):
# noinspection PyUnusedLocal, PyUnresolvedReferences
import cep_price_console.db_management.ARW_PRF_Mapping as ARW_PRF_Mapping
statement = "self.session.query(func.max(ARW_PRF_Mapping.{table_name}.__table__.c.Date_Time_Stamp)).scalar()" \
.format(table_name=self.table_name)
evaluated_statement = None
try:
evaluated_statement = eval(statement)
finally:
return evaluated_statement
@debug(lvl=logging.DEBUG, prefix='')
def delete_sub_max_date_time(self):
# noinspection PyUnresolvedReferences, PyUnusedLocal
import cep_price_console.db_management.ARW_PRF_Mapping as ARW_PRF_Mapping
max_date_time_per_date_statement = \
"self.session.query(" \
"func.max(ARW_PRF_Mapping.{table_name}.__table__.c.Date_Time_Stamp).label('DateTime')," \
"func.DATE(ARW_PRF_Mapping.{table_name}.__table__.c.Date_Time_Stamp).label('Date'))." \
"group_by(func.DATE(ARW_PRF_Mapping.{table_name}.__table__.c.Date_Time_Stamp)).subquery()".format(
table_name=self.table_name)
# noinspection PyUnusedLocal
max_date_time_per_date = eval(max_date_time_per_date_statement)
id_not_max_date_time_per_date_statement = \
"self.session.query(ARW_PRF_Mapping.{table_name}.__table__.c.ID)." \
"outerjoin(max_date_time_per_date, " \
"ARW_PRF_Mapping.{table_name}.__table__.c.Date_Time_Stamp == max_date_time_per_date.c.DateTime)." \
"filter(max_date_time_per_date.c.DateTime.is_(None))".format(
table_name=self.table_name)
id_not_max_date_time_per_date = eval(id_not_max_date_time_per_date_statement)
# noinspection PyUnusedLocal
delete_list = [r[0] for r in id_not_max_date_time_per_date]
delete_not_max_id_statement = \
"ARW_PRF_Mapping.{table_name}.__table__.delete().where(" \
"ARW_PRF_Mapping.{table_name}.__table__.c.ID.in_(delete_list))".format(
table_name=self.table_name)
delete_not_max_id = eval(delete_not_max_id_statement)
server_utils.mysql_engine.execute(delete_not_max_id)
logger = CustomAdapter(logging.getLogger(str(__name__)), None)
@debug(lvl=logging.NOTSET, prefix='')
def reset_table(table_obj):
# noinspection PyUnusedLocal
drop_and_create = True
if drop_and_create:
if not server_utils.mysql_engine.dialect.has_table(server_utils.mysql_engine,
table_obj.__table__.name,
schema=table_obj.__table__.schema):
logger.log(logging.NOTSET, "Table does not exist: {schema_name}.{table_name}".format(
schema_name=table_obj.__table__.schema, table_name=table_obj.__table__.name))
table_obj.__table__.create(server_utils.mysql_engine)
else:
logger.log(logging.NOTSET, "Table exists: {schema_name}.{table_name}".format(
schema_name=table_obj.__table__.schema, table_name=table_obj.__table__.name))
table_obj.__table__.drop(server_utils.mysql_engine)
table_obj.__table__.create(server_utils.mysql_engine)
else:
statement = ("TRUNCATE `{schema_name}`.`{table_name}`;".format(schema_name=table_obj.__table__.schema,
table_name=table_obj.__table__.name))
logger.log(logging.NOTSET, "{schema_name}.{table_name} Truncate Statement: {statement}".
format(schema_name=table_obj.__table__.schema,
table_name=table_obj.__table__.name,
statement=statement))
server_utils.mysql_engine.execute(statement)
@debug(lvl=logging.DEBUG, prefix='')
def schema_exists(schema_name):
try:
server_utils.mysql_engine.execute("SHOW CREATE SCHEMA `{0}`;".format(schema_name)).scalar()
PrimaryReportFile.logger.log(logging.NOTSET, "Schema Exists: {0}".format(schema_name))
return True
except exc.DBAPIError:
PrimaryReportFile.logger.log(logging.NOTSET, "Schema Does Not Exist: {0}".format(schema_name))
return False
@debug(lvl=logging.DEBUG, prefix='')
def schema_create(schema_name):
PrimaryReportFile.logger.log(logging.NOTSET, "Creating Schema: {0}".format(schema_name))
server_utils.mysql_engine.execute(CreateSchema(schema_name))
@debug(lvl=logging.DEBUG, prefix='')
def schema_create_if_not_exists(schema_name):
if not schema_exists(schema_name):
schema_create(schema_name)
| 1 |
4d81f9fd95cb285139f7a2febae1ab8f6cf26d42
|
Python
|
import rejig.pybytecode
from rejig.syntaxtree import *
def check(what_is, what_should_be):
global failed, total
env = {}
if "\n" in what_is or " = " in what_is or "def " in what_is or "print(" in what_is:
exec("def f():\n " + "\n ".join(what_is.split("\n")), env)
else:
exec("def f():\n return " + what_is, env)
ast = rejig.pybytecode.ast(env["f"])
print(str(ast))
assert ast == what_should_be, "\nshould be: " + repr(what_should_be) + "\nyet it is: " + repr(ast)
check('"hello"', Suite((Call('return', Const('hello')),)))
check('''.3''', Suite((Call('return', Const(.3)),)))
check('''-3''', Suite((Call('return', Const(-3)),)))
check('''--3''', Suite((Call('return', Const(--3)),)))
check('''+3''', Suite((Call('return', Const(+3)),)))
check('''++3''', Suite((Call('return', Const(++3)),)))
check('''+-3''', Suite((Call('return', Const(+-3)),)))
check('''3e1''', Suite((Call('return', Const(3e1)),)))
check('''-3e1''', Suite((Call('return', Const(-3e1)),)))
check('''+3e1''', Suite((Call('return', Const(+3e1)),)))
check('0x123', Suite((Call('return', Const(0x123)),)))
check('0o123', Suite((Call('return', Const(0o123)),)))
check('3+4j', Suite((Call('return', Const(3+4j)),)))
check('''[]''', Suite((Call('return', Call('list')),)))
check('''[3]''', Suite((Call('return', Call('list', Const(3))),)))
check('''[3,]''', Suite((Call('return', Call('list', Const(3))),)))
check('''[3, 4]''', Suite((Call('return', Call('list', Const(3), Const(4))),)))
check('''[3, 4,]''', Suite((Call('return', Call('list', Const(3), Const(4))),)))
check('''[3, 4, 5]''', Suite((Call('return', Call('list', Const(3), Const(4), Const(5))),)))
check('''[3, 4, 5,]''', Suite((Call('return', Call('list', Const(3), Const(4), Const(5))),)))
check('''[3, 4, 5, 6]''', Suite((Call('return', Call('list', Const(3), Const(4), Const(5), Const(6))),)))
check('''[3, 4, 5, 6,]''', Suite((Call('return', Call('list', Const(3), Const(4), Const(5), Const(6))),)))
check('''[[1], 2, 3, 4, 5]''', Suite((Call('return', Call('list', Call('list', Const(1)), Const(2), Const(3), Const(4), Const(5))),)))
check('''[[1, 2], 3, 4, 5]''', Suite((Call('return', Call('list', Call('list', Const(1), Const(2)), Const(3), Const(4), Const(5))),)))
check('''[[1, 2, 3], 4, 5]''', Suite((Call('return', Call('list', Call('list', Const(1), Const(2), Const(3)), Const(4), Const(5))),)))
check('''[[1, 2, 3, 4], 5]''', Suite((Call('return', Call('list', Call('list', Const(1), Const(2), Const(3), Const(4)), Const(5))),)))
check('''[[1, 2, 3, 4, 5]]''', Suite((Call('return', Call('list', Call('list', Const(1), Const(2), Const(3), Const(4), Const(5)))),)))
check('''[[[1], 2, 3, 4, 5]]''', Suite((Call('return', Call('list', Call('list', Call('list', Const(1)), Const(2), Const(3), Const(4), Const(5)))),)))
check('''[[[1, 2], 3, 4, 5]]''', Suite((Call('return', Call('list', Call('list', Call('list', Const(1), Const(2)), Const(3), Const(4), Const(5)))),)))
check('''[[[1, 2, 3], 4, 5]]''', Suite((Call('return', Call('list', Call('list', Call('list', Const(1), Const(2), Const(3)), Const(4), Const(5)))),)))
check('''[[[1, 2, 3, 4], 5]]''', Suite((Call('return', Call('list', Call('list', Call('list', Const(1), Const(2), Const(3), Const(4)), Const(5)))),)))
check('''[[[1, 2, 3, 4, 5]]]''', Suite((Call('return', Call('list', Call('list', Call('list', Const(1), Const(2), Const(3), Const(4), Const(5))))),)))
check('''[1, 2, 3, 4, [5]]''', Suite((Call('return', Call('list', Const(1), Const(2), Const(3), Const(4), Call('list', Const(5)))),)))
check('''[1, 2, 3, [4, 5]]''', Suite((Call('return', Call('list', Const(1), Const(2), Const(3), Call('list', Const(4), Const(5)))),)))
check('''[1, 2, [3, 4, 5]]''', Suite((Call('return', Call('list', Const(1), Const(2), Call('list', Const(3), Const(4), Const(5)))),)))
check('''[1, [2, 3, 4, 5]]''', Suite((Call('return', Call('list', Const(1), Call('list', Const(2), Const(3), Const(4), Const(5)))),)))
check('''[[1, 2, 3, 4, [5]]]''', Suite((Call('return', Call('list', Call('list', Const(1), Const(2), Const(3), Const(4), Call('list', Const(5))))),)))
check('''[[1, 2, 3, [4, 5]]]''', Suite((Call('return', Call('list', Call('list', Const(1), Const(2), Const(3), Call('list', Const(4), Const(5))))),)))
check('''[[1, 2, [3, 4, 5]]]''', Suite((Call('return', Call('list', Call('list', Const(1), Const(2), Call('list', Const(3), Const(4), Const(5))))),)))
check('''[[1, [2, 3, 4, 5]]]''', Suite((Call('return', Call('list', Call('list', Const(1), Call('list', Const(2), Const(3), Const(4), Const(5))))),)))
check('''x = (None)''', Suite((Assign((Name('x'),), Const(None)), Call('return', Const(None)),)))
check('''x = (3, None)''', Suite((Assign((Name('x'),), Call('tuple', Const(3), Const(None))), Call('return', Const(None)),)))
check('''x = (3, 4, None)''', Suite((Assign((Name('x'),), Call('tuple', Const(3), Const(4), Const(None))), Call('return', Const(None)),)))
check('''x = (3, 4, 5, None)''', Suite((Assign((Name('x'),), Call('tuple', Const(3), Const(4), Const(5), Const(None))), Call('return', Const(None)),)))
check('''x = (3, 4, 5, 6, None)''', Suite((Assign((Name('x'),), Call('tuple', Const(3), Const(4), Const(5), Const(6), Const(None))), Call('return', Const(None)),)))
check('''x = ((1, None), 2, 3, 4, 5, None)''', Suite((Assign((Name('x'),), Call('tuple', Call('tuple', Const(1), Const(None)), Const(2), Const(3), Const(4), Const(5), Const(None))), Call('return', Const(None)),)))
check('''x = ((1, 2, None), 3, 4, 5, None)''', Suite((Assign((Name('x'),), Call('tuple', Call('tuple', Const(1), Const(2), Const(None)), Const(3), Const(4), Const(5), Const(None))), Call('return', Const(None)),)))
check('''x = ((1, 2, 3, None), 4, 5, None)''', Suite((Assign((Name('x'),), Call('tuple', Call('tuple', Const(1), Const(2), Const(3), Const(None)), Const(4), Const(5), Const(None))), Call('return', Const(None)),)))
check('''x = ((1, 2, 3, 4, None), 5, None)''', Suite((Assign((Name('x'),), Call('tuple', Call('tuple', Const(1), Const(2), Const(3), Const(4), Const(None)), Const(5), Const(None))), Call('return', Const(None)),)))
check('''x = ((1, 2, 3, 4, 5, None), None)''', Suite((Assign((Name('x'),), Call('tuple', Call('tuple', Const(1), Const(2), Const(3), Const(4), Const(5), Const(None)), Const(None))), Call('return', Const(None)),)))
check('''x = (((1, None), 2, 3, 4, 5, None), None)''', Suite((Assign((Name('x'),), Call('tuple', Call('tuple', Call('tuple', Const(1), Const(None)), Const(2), Const(3), Const(4), Const(5), Const(None)), Const(None))), Call('return', Const(None)),)))
check('''x = (((1, 2, None), 3, 4, 5, None), None)''', Suite((Assign((Name('x'),), Call('tuple', Call('tuple', Call('tuple', Const(1), Const(2), Const(None)), Const(3), Const(4), Const(5), Const(None)), Const(None))), Call('return', Const(None)),)))
check('''x = (((1, 2, 3, None), 4, 5, None), None)''', Suite((Assign((Name('x'),), Call('tuple', Call('tuple', Call('tuple', Const(1), Const(2), Const(3), Const(None)), Const(4), Const(5), Const(None)), Const(None))), Call('return', Const(None)),)))
check('''x = (((1, 2, 3, 4, None), 5, None), None)''', Suite((Assign((Name('x'),), Call('tuple', Call('tuple', Call('tuple', Const(1), Const(2), Const(3), Const(4), Const(None)), Const(5), Const(None)), Const(None))), Call('return', Const(None)),)))
check('''x = (((1, 2, 3, 4, 5, None), None), None)''', Suite((Assign((Name('x'),), Call('tuple', Call('tuple', Call('tuple', Const(1), Const(2), Const(3), Const(4), Const(5), Const(None)), Const(None)), Const(None))), Call('return', Const(None)),)))
check('''x = (1, 2, 3, 4, (5, None), None)''', Suite((Assign((Name('x'),), Call('tuple', Const(1), Const(2), Const(3), Const(4), Call('tuple', Const(5), Const(None)), Const(None))), Call('return', Const(None)),)))
check('''x = (1, 2, 3, (4, 5, None), None)''', Suite((Assign((Name('x'),), Call('tuple', Const(1), Const(2), Const(3), Call('tuple', Const(4), Const(5), Const(None)), Const(None))), Call('return', Const(None)),)))
check('''x = (1, 2, (3, 4, 5, None), None)''', Suite((Assign((Name('x'),), Call('tuple', Const(1), Const(2), Call('tuple', Const(3), Const(4), Const(5), Const(None)), Const(None))), Call('return', Const(None)),)))
check('''x = (1, (2, 3, 4, 5, None), None)''', Suite((Assign((Name('x'),), Call('tuple', Const(1), Call('tuple', Const(2), Const(3), Const(4), Const(5), Const(None)), Const(None))), Call('return', Const(None)),)))
check('''x = ((1, 2, 3, 4, (5, None), None), None)''', Suite((Assign((Name('x'),), Call('tuple', Call('tuple', Const(1), Const(2), Const(3), Const(4), Call('tuple', Const(5), Const(None)), Const(None)), Const(None))), Call('return', Const(None)),)))
check('''x = ((1, 2, 3, (4, 5, None), None), None)''', Suite((Assign((Name('x'),), Call('tuple', Call('tuple', Const(1), Const(2), Const(3), Call('tuple', Const(4), Const(5), Const(None)), Const(None)), Const(None))), Call('return', Const(None)),)))
check('''x = ((1, 2, (3, 4, 5, None), None), None)''', Suite((Assign((Name('x'),), Call('tuple', Call('tuple', Const(1), Const(2), Call('tuple', Const(3), Const(4), Const(5), Const(None)), Const(None)), Const(None))), Call('return', Const(None)),)))
check('''x = ((1, (2, 3, 4, 5, None), None), None)''', Suite((Assign((Name('x'),), Call('tuple', Call('tuple', Const(1), Call('tuple', Const(2), Const(3), Const(4), Const(5), Const(None)), Const(None)), Const(None))), Call('return', Const(None)),)))
check('''3
''', Suite((Call('return', Const(None)),))) # hey look: Python does dead code removal!
check('''3
''', Suite((Call('return', Const(None)),)))
check('''3
''', Suite((Call('return', Const(None)),)))
check('''3
''', Suite((Call('return', Const(None)),)))
check('''
3''', Suite((Call('return', Const(None)),)))
check('''
3''', Suite((Call('return', Const(None)),)))
check('''
3''', Suite((Call('return', Const(None)),)))
check('''
3''', Suite((Call('return', Const(None)),)))
check('''a''', Suite((Call('return', Name('a')),)))
check('''a.b''', Suite((Call('return', Call('.', Name('a'), 'b')),)))
check('''a.b.c''', Suite((Call('return', Call('.', Call('.', Name('a'), 'b'), 'c')),)))
check('''a.b.c.d''', Suite((Call('return', Call('.', Call('.', Call('.', Name('a'), 'b'), 'c'), 'd')),)))
check('''a.b.c.d.e''', Suite((Call('return', Call('.', Call('.', Call('.', Call('.', Name('a'), 'b'), 'c'), 'd'), 'e')),)))
check('''a[1]''', Suite((Call('return', Call('[.]', Name('a'), Const(1))),)))
check('''a[1][2]''', Suite((Call('return', Call('[.]', Call('[.]', Name('a'), Const(1)), Const(2))),)))
check('''a[1][2][3]''', Suite((Call('return', Call('[.]', Call('[.]', Call('[.]', Name('a'), Const(1)), Const(2)), Const(3))),)))
check('''a[1][2][3][4]''', Suite((Call('return', Call('[.]', Call('[.]', Call('[.]', Call('[.]', Name('a'), Const(1)), Const(2)), Const(3)), Const(4))),)))
check('''(9, None).stuff''', Suite((Call('return', Call('.', Call('tuple', Const(9), Const(None)), 'stuff')),)))
check('''((9, None), None).stuff''', Suite((Call('return', Call('.', Call('tuple', Call('tuple', Const(9), Const(None)), Const(None)), 'stuff')),)))
check('''(((9, None), None), None).stuff''', Suite((Call('return', Call('.', Call('tuple', Call('tuple', Call('tuple', Const(9), Const(None)), Const(None)), Const(None)), 'stuff')),)))
check('''a[1]''', Suite((Call('return', Call('[.]', Name('a'), Const(1))),)))
check('''a["hey"]''', Suite((Call('return', Call('[.]', Name('a'), Const('hey'))),)))
check('''a[1:2]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(2), Const(None)))),)))
check('''a[:]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(None), Const(None)))),)))
check('''a[1:]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(None), Const(None)))),)))
check('''a[:1]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(1), Const(None)))),)))
check('''a[::]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(None), Const(None)))),)))
check('''a[1::]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(None), Const(None)))),)))
check('''a[:1:]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(1), Const(None)))),)))
check('''a[::1]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(None), Const(1)))),)))
check('''a[1:2:]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(2), Const(None)))),)))
check('''a[:1:2]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(1), Const(2)))),)))
check('''a[1::2]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(None), Const(2)))),)))
check('''a[1:2:3]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(2), Const(3)))),)))
check('''a[1,]''', Suite((Call('return', Call('[.]', Name('a'), Const(1))),)))
check('''a["hey",]''', Suite((Call('return', Call('[.]', Name('a'), Const('hey'))),)))
check('''a[1:2,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(2), Const(None)))),)))
check('''a[:,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(None), Const(None)))),)))
check('''a[1:,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(None), Const(None)))),)))
check('''a[:1,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(1), Const(None)))),)))
check('''a[::,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(None), Const(None)))),)))
check('''a[1::,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(None), Const(None)))),)))
check('''a[:1:,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(1), Const(None)))),)))
check('''a[::1,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(None), Const(1)))),)))
check('''a[1:2:,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(2), Const(None)))),)))
check('''a[:1:2,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(1), Const(2)))),)))
check('''a[1::2,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(None), Const(2)))),)))
check('''a[1:2:3,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(2), Const(3)))),)))
check('''a[1,5]''', Suite((Call('return', Call('[.]', Name('a'), Const(1), Const(5))),)))
check('''a["hey",5]''', Suite((Call('return', Call('[.]', Name('a'), Const('hey'), Const(5))),)))
check('''a[1:2,5]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(2), Const(None)), Const(5))),)))
check('''a[:,5]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(None), Const(None)), Const(5))),)))
check('''a[1:,5]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(None), Const(None)), Const(5))),)))
check('''a[:1,5]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(1), Const(None)), Const(5))),)))
check('''a[::,5]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(None), Const(None)), Const(5))),)))
check('''a[1::,5]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(None), Const(None)), Const(5))),)))
check('''a[:1:,5]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(1), Const(None)), Const(5))),)))
check('''a[::1,5]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(None), Const(1)), Const(5))),)))
check('''a[1:2:,5]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(2), Const(None)), Const(5))),)))
check('''a[:1:2,5]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(1), Const(2)), Const(5))),)))
check('''a[1::2,5]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(None), Const(2)), Const(5))),)))
check('''a[1:2:3,5]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(2), Const(3)), Const(5))),)))
check('''a[1,5,]''', Suite((Call('return', Call('[.]', Name('a'), Const(1), Const(5))),)))
check('''a["hey",5,]''', Suite((Call('return', Call('[.]', Name('a'), Const('hey'), Const(5))),)))
check('''a[1:2,5,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(2), Const(None)), Const(5))),)))
check('''a[:,5,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(None), Const(None)), Const(5))),)))
check('''a[1:,5,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(None), Const(None)), Const(5))),)))
check('''a[:1,5,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(1), Const(None)), Const(5))),)))
check('''a[::,5,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(None), Const(None)), Const(5))),)))
check('''a[1::,5,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(None), Const(None)), Const(5))),)))
check('''a[:1:,5,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(1), Const(None)), Const(5))),)))
check('''a[::1,5,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(None), Const(1)), Const(5))),)))
check('''a[1:2:,5,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(2), Const(None)), Const(5))),)))
check('''a[:1:2,5,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(1), Const(2)), Const(5))),)))
check('''a[1::2,5,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(None), Const(2)), Const(5))),)))
check('''a[1:2:3,5,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(2), Const(3)), Const(5))),)))
check('''a[1,"a":"b"]''', Suite((Call('return', Call('[.]', Name('a'), Const(1), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a["hey","a":"b"]''', Suite((Call('return', Call('[.]', Name('a'), Const('hey'), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[1:2,"a":"b"]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(1), Const(2), Const(None)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[:,"a":"b"]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(None), Const(None), Const(None)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[1:,"a":"b"]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(1), Const(None), Const(None)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[:1,"a":"b"]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(None), Const(1), Const(None)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[::,"a":"b"]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(None), Const(None), Const(None)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[1::,"a":"b"]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(1), Const(None), Const(None)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[:1:,"a":"b"]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(None), Const(1), Const(None)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[::1,"a":"b"]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(None), Const(None), Const(1)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[1:2:,"a":"b"]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(1), Const(2), Const(None)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[:1:2,"a":"b"]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(None), Const(1), Const(2)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[1::2,"a":"b"]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(1), Const(None), Const(2)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[1:2:3,"a":"b"]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(1), Const(2), Const(3)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[1,"a":"b",]''', Suite((Call('return', Call('[.]', Name('a'), Const(1), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a["hey","a":"b",]''', Suite((Call('return', Call('[.]', Name('a'), Const('hey'), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[1:2,"a":"b",]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(1), Const(2), Const(None)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[:,"a":"b",]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(None), Const(None), Const(None)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[1:,"a":"b",]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(1), Const(None), Const(None)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[:1,"a":"b",]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(None), Const(1), Const(None)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[::,"a":"b",]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(None), Const(None), Const(None)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[1::,"a":"b",]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(1), Const(None), Const(None)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[:1:,"a":"b",]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(None), Const(1), Const(None)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[::1,"a":"b",]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(None), Const(None), Const(1)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[1:2:,"a":"b",]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(1), Const(2), Const(None)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[:1:2,"a":"b",]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(None), Const(1), Const(2)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[1::2,"a":"b",]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(1), Const(None), Const(2)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[1:2:3,"a":"b",]''', Suite((Call('return', Call('[.]', Name('a'), Call("slice", Const(1), Const(2), Const(3)), Call('slice', Const('a'), Const('b'), Const(None)))),)))
check('''a[1,5,6]''', Suite((Call('return', Call('[.]', Name('a'), Const(1), Const(5), Const(6))),)))
check('''a["hey",5,6]''', Suite((Call('return', Call('[.]', Name('a'), Const('hey'), Const(5), Const(6))),)))
check('''a[1:2,5,6]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(2), Const(None)), Const(5), Const(6))),)))
check('''a[:,5,6]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(None), Const(None)), Const(5), Const(6))),)))
check('''a[1:,5,6]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(None), Const(None)), Const(5), Const(6))),)))
check('''a[:1,5,6]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(1), Const(None)), Const(5), Const(6))),)))
check('''a[::,5,6]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(None), Const(None)), Const(5), Const(6))),)))
check('''a[1::,5,6]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(None), Const(None)), Const(5), Const(6))),)))
check('''a[:1:,5,6]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(1), Const(None)), Const(5), Const(6))),)))
check('''a[::1,5,6]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(None), Const(1)), Const(5), Const(6))),)))
check('''a[1:2:,5,6]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(2), Const(None)), Const(5), Const(6))),)))
check('''a[:1:2,5,6]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(1), Const(2)), Const(5), Const(6))),)))
check('''a[1::2,5,6]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(None), Const(2)), Const(5), Const(6))),)))
check('''a[1:2:3,5,6]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(2), Const(3)), Const(5), Const(6))),)))
check('''a[1,5,6,]''', Suite((Call('return', Call('[.]', Name('a'), Const(1), Const(5), Const(6))),)))
check('''a["hey",5,6,]''', Suite((Call('return', Call('[.]', Name('a'), Const('hey'), Const(5), Const(6))),)))
check('''a[1:2,5,6,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(2), Const(None)), Const(5), Const(6))),)))
check('''a[:,5,6,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(None), Const(None)), Const(5), Const(6))),)))
check('''a[1:,5,6,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(None), Const(None)), Const(5), Const(6))),)))
check('''a[:1,5,6,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(1), Const(None)), Const(5), Const(6))),)))
check('''a[::,5,6,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(None), Const(None)), Const(5), Const(6))),)))
check('''a[1::,5,6,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(None), Const(None)), Const(5), Const(6))),)))
check('''a[:1:,5,6,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(1), Const(None)), Const(5), Const(6))),)))
check('''a[::1,5,6,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(None), Const(1)), Const(5), Const(6))),)))
check('''a[1:2:,5,6,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(2), Const(None)), Const(5), Const(6))),)))
check('''a[:1:2,5,6,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(None), Const(1), Const(2)), Const(5), Const(6))),)))
check('''a[1::2,5,6,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(None), Const(2)), Const(5), Const(6))),)))
check('''a[1:2:3,5,6,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Const(2), Const(3)), Const(5), Const(6))),)))
check('''a[1:[2]:3,[],5,6,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Call('list', Const(2)), Const(3)), Call('list'), Const(5), Const(6))),)))
check('''a[1:[[2]]:3,[[]],5,6,]''', Suite((Call('return', Call('[.]', Name('a'), Call('slice', Const(1), Call('list', Call('list', Const(2))), Const(3)), Call('list', Call('list')), Const(5), Const(6))),)))
check('''a[2].three''', Suite((Call('return', Call('.', Call('[.]', Name('a'), Const(2)), 'three')),)))
check('''a.three''', Suite((Call('return', Call('.', Name('a'), 'three')),)))
check('''a[2]''', Suite((Call('return', Call('[.]', Name('a'), Const(2))),)))
check('''a.three[2]''', Suite((Call('return', Call('[.]', Call('.', Name('a'), 'three'), Const(2))),)))
check('''x and y''', Suite((Call('return', Call('and', Name('x'), Name('y'))),)))
check('''x and y and z''', Suite((Call('return', Call('and', Name('x'), Call('and', Name('y'), Name('z')))),)))
check('''x and y and z and w''', Suite((Call('return', Call('and', Name('x'), Call('and', Name('y'), Call('and', Name('z'), Name('w'))))),)))
check('''not x''', Suite((Call('return', Call('not', Name('x'))),)))
check('''not x and y''', Suite((Call('return', Call('and', Call('not', Name('x')), Name('y'))),)))
check('''x or y''', Suite((Call('return', Call('or', Name('x'), Name('y'))),)))
check('''x or y and z''', Suite((Call('return', Call('or', Name('x'), Call('and', Name('y'), Name('z')))),)))
check('''x or y or z''', Suite((Call('return', Call('or', Name('x'), Call('or', Name('y'), Name('z')))),)))
check('''not x or y and z''', Suite((Call('return', Call('or', Call('not', Name('x')), Call('and', Name('y'), Name('z')))),)))
check('''x or not y and z''', Suite((Call('return', Call('or', Name('x'), Call('and', Call('not', Name('y')), Name('z')))),)))
check('''x or y and not z''', Suite((Call('return', Call('or', Name('x'), Call('and', Name('y'), Call('not', Name('z'))))),)))
check('''not x or not y and z''', Suite((Call('return', Call('or', Call('not', Name('x')), Call('and', Call('not', Name('y')), Name('z')))),)))
check('''not x or y and not z''', Suite((Call('return', Call('or', Call('not', Name('x')), Call('and', Name('y'), Call('not', Name('z'))))),)))
check('''x or not y and not z''', Suite((Call('return', Call('or', Name('x'), Call('and', Call('not', Name('y')), Call('not', Name('z'))))),)))
check('''not x or not y and not z''', Suite((Call('return', Call('or', Call('not', Name('x')), Call('and', Call('not', Name('y')), Call('not', Name('z'))))),)))
check('''x and y or z''', Suite((Call('return', Call('or', Call('and', Name('x'), Name('y')), Name('z'))),)))
check('''not x and y or z''', Suite((Call('return', Call('or', Call('and', Call('not', Name('x')), Name('y')), Name('z'))),)))
check('''x and not y or z''', Suite((Call('return', Call('or', Call('and', Name('x'), Call('not', Name('y'))), Name('z'))),)))
check('''x and y or not z''', Suite((Call('return', Call('or', Call('and', Name('x'), Name('y')), Call('not', Name('z')))),)))
check('''not x and not y or z''', Suite((Call('return', Call('or', Call('and', Call('not', Name('x')), Call('not', Name('y'))), Name('z'))),)))
check('''not x and y or not z''', Suite((Call('return', Call('or', Call('and', Call('not', Name('x')), Name('y')), Call('not', Name('z')))),)))
check('''x and not y or not z''', Suite((Call('return', Call('or', Call('and', Name('x'), Call('not', Name('y'))), Call('not', Name('z')))),)))
check('''x < y''', Suite((Call('return', Call('<', Name('x'), Name('y'))),)))
check('''x > y''', Suite((Call('return', Call('>', Name('x'), Name('y'))),)))
check('''x == y''', Suite((Call('return', Call('==', Name('x'), Name('y'))),)))
check('''x >= y''', Suite((Call('return', Call('>=', Name('x'), Name('y'))),)))
check('''x <= y''', Suite((Call('return', Call('<=', Name('x'), Name('y'))),)))
check('''x != y''', Suite((Call('return', Call('!=', Name('x'), Name('y'))),)))
check('''x in y''', Suite((Call('return', Call('in', Name('x'), Name('y'))),)))
check('''x not in y''', Suite((Call('return', Call('not-in', Name('x'), Name('y'))),)))
check('''1 < y < 2''', Suite((Call('return', Call('and', Call('<', Const(1), Name('y')), Call('<', Name('y'), Const(2)))),)))
check('''1 < y == 2''', Suite((Call('return', Call('and', Call('<', Const(1), Name('y')), Call('==', Name('y'), Const(2)))),)))
check('''(x, None) < y''', Suite((Call('return', Call('<', Call('tuple', Name('x'), Const(None)), Name('y'))),)))
check('''(x, None) > y''', Suite((Call('return', Call('>', Call('tuple', Name('x'), Const(None)), Name('y'))),)))
check('''(x, None) == y''', Suite((Call('return', Call('==', Call('tuple', Name('x'), Const(None)), Name('y'))),)))
check('''(x, None) >= y''', Suite((Call('return', Call('>=', Call('tuple', Name('x'), Const(None)), Name('y'))),)))
check('''(x, None) <= y''', Suite((Call('return', Call('<=', Call('tuple', Name('x'), Const(None)), Name('y'))),)))
check('''(x, None) != y''', Suite((Call('return', Call('!=', Call('tuple', Name('x'), Const(None)), Name('y'))),)))
check('''(x, None) in y''', Suite((Call('return', Call('in', Call('tuple', Name('x'), Const(None)), Name('y'))),)))
check('''(x, None) not in y''', Suite((Call('return', Call('not-in', Call('tuple', Name('x'), Const(None)), Name('y'))),)))
check('''(1, None) < y < 2''', Suite((Call('return', Call('and', Call('<', Call('tuple', Const(1), Const(None)), Name('y')), Call('<', Name('y'), Const(2)))),)))
check('''(1, None) < y == 2''', Suite((Call('return', Call('and', Call('<', Call('tuple', Const(1), Const(None)), Name('y')), Call('==', Name('y'), Const(2)))),)))
check('''x < (y, None)''', Suite((Call('return', Call('<', Name('x'), Call('tuple', Name('y'), Const(None)))),)))
check('''x > (y, None)''', Suite((Call('return', Call('>', Name('x'), Call('tuple', Name('y'), Const(None)))),)))
check('''x == (y, None)''', Suite((Call('return', Call('==', Name('x'), Call('tuple', Name('y'), Const(None)))),)))
check('''x >= (y, None)''', Suite((Call('return', Call('>=', Name('x'), Call('tuple', Name('y'), Const(None)))),)))
check('''x <= (y, None)''', Suite((Call('return', Call('<=', Name('x'), Call('tuple', Name('y'), Const(None)))),)))
check('''x != (y, None)''', Suite((Call('return', Call('!=', Name('x'), Call('tuple', Name('y'), Const(None)))),)))
check('''x in (y, None)''', Suite((Call('return', Call('in', Name('x'), Call('tuple', Name('y'), Const(None)))),)))
check('''x not in (y, None)''', Suite((Call('return', Call('not-in', Name('x'), Call('tuple', Name('y'), Const(None)))),)))
check('''1 < (y, None) < 2''', Suite((Call('return', Call('and', Call('<', Const(1), Call('tuple', Name('y'), Const(None))), Call('<', Call('tuple', Name('y'), Const(None)), Const(2)))),)))
check('''1 < (y, None) == 2''', Suite((Call('return', Call('and', Call('<', Const(1), Call('tuple', Name('y'), Const(None))), Call('==', Call('tuple', Name('y'), Const(None)), Const(2)))),)))
check('''1 < y < (2, None)''', Suite((Call('return', Call('and', Call('<', Const(1), Name('y')), Call('<', Name('y'), Call('tuple', Const(2), Const(None))))),)))
check('''1 < y == (2, None)''', Suite((Call('return', Call('and', Call('<', Const(1), Name('y')), Call('==', Name('y'), Call('tuple', Const(2), Const(None))))),)))
check('''(x, None) < (y, None)''', Suite((Call('return', Call('<', Call('tuple', Name('x'), Const(None)), Call('tuple', Name('y'), Const(None)))),)))
check('''(x, None) > (y, None)''', Suite((Call('return', Call('>', Call('tuple', Name('x'), Const(None)), Call('tuple', Name('y'), Const(None)))),)))
check('''(x, None) == (y, None)''', Suite((Call('return', Call('==', Call('tuple', Name('x'), Const(None)), Call('tuple', Name('y'), Const(None)))),)))
check('''(x, None) >= (y, None)''', Suite((Call('return', Call('>=', Call('tuple', Name('x'), Const(None)), Call('tuple', Name('y'), Const(None)))),)))
check('''(x, None) <= (y, None)''', Suite((Call('return', Call('<=', Call('tuple', Name('x'), Const(None)), Call('tuple', Name('y'), Const(None)))),)))
check('''(x, None) != (y, None)''', Suite((Call('return', Call('!=', Call('tuple', Name('x'), Const(None)), Call('tuple', Name('y'), Const(None)))),)))
check('''(x, None) in (y, None)''', Suite((Call('return', Call('in', Call('tuple', Name('x'), Const(None)), Call('tuple', Name('y'), Const(None)))),)))
check('''(x, None) not in (y, None)''', Suite((Call('return', Call('not-in', Call('tuple', Name('x'), Const(None)), Call('tuple', Name('y'), Const(None)))),)))
check('''(1, None) < (y, None) < 2''', Suite((Call('return', Call('and', Call('<', Call('tuple', Const(1), Const(None)), Call('tuple', Name('y'), Const(None))), Call('<', Call('tuple', Name('y'), Const(None)), Const(2)))),)))
check('''(1, None) < (y, None) == 2''', Suite((Call('return', Call('and', Call('<', Call('tuple', Const(1), Const(None)), Call('tuple', Name('y'), Const(None))), Call('==', Call('tuple', Name('y'), Const(None)), Const(2)))),)))
check('''(1, None) < y < (2, None)''', Suite((Call('return', Call('and', Call('<', Call('tuple', Const(1), Const(None)), Name('y')), Call('<', Name('y'), Call('tuple', Const(2), Const(None))))),)))
check('''(1, None) < y == (2, None)''', Suite((Call('return', Call('and', Call('<', Call('tuple', Const(1), Const(None)), Name('y')), Call('==', Name('y'), Call('tuple', Const(2), Const(None))))),)))
check('''x + y''', Suite((Call('return', Call('+', Name('x'), Name('y'))),)))
check('''x + y + z''', Suite((Call('return', Call('+', Call('+', Name('x'), Name('y')), Name('z'))),)))
check('''x + y + z + w''', Suite((Call('return', Call('+', Call('+', Call('+', Name('x'), Name('y')), Name('z')), Name('w'))),)))
check('''x - y''', Suite((Call('return', Call('-', Name('x'), Name('y'))),)))
check('''x - y - z''', Suite((Call('return', Call('-', Call('-', Name('x'), Name('y')), Name('z'))),)))
check('''x - y - z - w''', Suite((Call('return', Call('-', Call('-', Call('-', Name('x'), Name('y')), Name('z')), Name('w'))),)))
check('''x - y + z - w''', Suite((Call('return', Call('-', Call('+', Call('-', Name('x'), Name('y')), Name('z')), Name('w'))),)))
check('''x * y''', Suite((Call('return', Call('*', Name('x'), Name('y'))),)))
check('''x * y * z''', Suite((Call('return', Call('*', Call('*', Name('x'), Name('y')), Name('z'))),)))
check('''x * y * z * w''', Suite((Call('return', Call('*', Call('*', Call('*', Name('x'), Name('y')), Name('z')), Name('w'))),)))
check('''x * y - z * w''', Suite((Call('return', Call('-', Call('*', Name('x'), Name('y')), Call('*', Name('z'), Name('w')))),)))
check('''x / y''', Suite((Call('return', Call('/', Name('x'), Name('y'))),)))
check('''x / y / z''', Suite((Call('return', Call('/', Call('/', Name('x'), Name('y')), Name('z'))),)))
check('''x / y / z / w''', Suite((Call('return', Call('/', Call('/', Call('/', Name('x'), Name('y')), Name('z')), Name('w'))),)))
check('''x / y * z / w''', Suite((Call('return', Call('/', Call('*', Call('/', Name('x'), Name('y')), Name('z')), Name('w'))),)))
check('''x % y''', Suite((Call('return', Call('%', Name('x'), Name('y'))),)))
check('''x % y % z''', Suite((Call('return', Call('%', Call('%', Name('x'), Name('y')), Name('z'))),)))
check('''x % y % z % w''', Suite((Call('return', Call('%', Call('%', Call('%', Name('x'), Name('y')), Name('z')), Name('w'))),)))
check('''x % y / z % w''', Suite((Call('return', Call('%', Call('/', Call('%', Name('x'), Name('y')), Name('z')), Name('w'))),)))
check('''x // y''', Suite((Call('return', Call('//', Name('x'), Name('y'))),)))
check('''x // y // z''', Suite((Call('return', Call('//', Call('//', Name('x'), Name('y')), Name('z'))),)))
check('''x // y // z // w''', Suite((Call('return', Call('//', Call('//', Call('//', Name('x'), Name('y')), Name('z')), Name('w'))),)))
check('''x // y % z // w''', Suite((Call('return', Call('//', Call('%', Call('//', Name('x'), Name('y')), Name('z')), Name('w'))),)))
check('''+x''', Suite((Call('return', Call('u+', Name('x'))),)))
check('''-x''', Suite((Call('return', Call('u-', Name('x'))),)))
check('''++x''', Suite((Call('return', Call('u+', Call('u+', Name('x')))),)))
check('''+-x''', Suite((Call('return', Call('u+', Call('u-', Name('x')))),)))
check('''-+x''', Suite((Call('return', Call('u-', Call('u+', Name('x')))),)))
check('''--x''', Suite((Call('return', Call('u-', Call('u-', Name('x')))),)))
check('''+x + y''', Suite((Call('return', Call('+', Call('u+', Name('x')), Name('y'))),)))
check('''-x + y''', Suite((Call('return', Call('+', Call('u-', Name('x')), Name('y'))),)))
check('''++x + y''', Suite((Call('return', Call('+', Call('u+', Call('u+', Name('x'))), Name('y'))),)))
check('''+-x + y''', Suite((Call('return', Call('+', Call('u+', Call('u-', Name('x'))), Name('y'))),)))
check('''-+x + y''', Suite((Call('return', Call('+', Call('u-', Call('u+', Name('x'))), Name('y'))),)))
check('''--x + y''', Suite((Call('return', Call('+', Call('u-', Call('u-', Name('x'))), Name('y'))),)))
check('''x + +x''', Suite((Call('return', Call('+', Name('x'), Call('u+', Name('x')))),)))
check('''x + -x''', Suite((Call('return', Call('+', Name('x'), Call('u-', Name('x')))),)))
check('''x + ++x''', Suite((Call('return', Call('+', Name('x'), Call('u+', Call('u+', Name('x'))))),)))
check('''x + +-x''', Suite((Call('return', Call('+', Name('x'), Call('u+', Call('u-', Name('x'))))),)))
check('''x + -+x''', Suite((Call('return', Call('+', Name('x'), Call('u-', Call('u+', Name('x'))))),)))
check('''x + --x''', Suite((Call('return', Call('+', Name('x'), Call('u-', Call('u-', Name('x'))))),)))
check('''x ** y''', Suite((Call('return', Call('**', Name('x'), Name('y'))),)))
check('''x ** y ** z''', Suite((Call('return', Call('**', Name('x'), Call('**', Name('y'), Name('z')))),)))
check('''x ** y ** z ** w''', Suite((Call('return', Call('**', Name('x'), Call('**', Name('y'), Call('**', Name('z'), Name('w'))))),)))
check('''x ** y // z ** w''', Suite((Call('return', Call('//', Call('**', Name('x'), Name('y')), Call('**', Name('z'), Name('w')))),)))
check('''x.y**2''', Suite((Call('return', Call('**', Call('.', Name('x'), 'y'), Const(2))),)))
check('f(None)', Suite((Call('return', Call(Name('f'), Const(None))),)))
check('f(x, None)', Suite((Call('return', Call(Name('f'), Name('x'), Const(None))),)))
check('f(x, y, None)', Suite((Call('return', Call(Name('f'), Name('x'), Name('y'), Const(None))),)))
check('f(x, y, z, None)', Suite((Call('return', Call(Name('f'), Name('x'), Name('y'), Name('z'), Const(None))),)))
check('f(x=1)', Suite((Call('return', CallKeyword(Name('f'), (), (('x', Const(1)),))),)))
check('f(x, y=1)', Suite((Call('return', CallKeyword(Name('f'), (Name('x'),), (('y', Const(1)),))),)))
check('f(x, y, z=1)', Suite((Call('return', CallKeyword(Name('f'), (Name('x'), Name('y'),), (('z', Const(1)),))),)))
check('x = 1; x', Suite((Assign((Name('x'),), Const(1)), Name('x'), Call('return', Const(None)),)))
check('x = 1; x;', Suite((Assign((Name('x'),), Const(1)), Name('x'), Call('return', Const(None)),)))
check('x, = 1; x', Suite((Assign((Unpack((Name('x'),)),), Const(1)), Name('x'), Call('return', Const(None)),)))
check('x, y = 1; x', Suite((Assign((Unpack((Name('x'), Name('y'))),), Const(1)), Name('x'), Call('return', Const(None)),)))
check('x, y, = 1; x', Suite((Assign((Unpack((Name('x'), Name('y'))),), Const(1)), Name('x'), Call('return', Const(None)),)))
check('x, y, z = 1; x', Suite((Assign((Unpack((Name('x'), Name('y'), Name('z'))),), Const(1)), Name('x'), Call('return', Const(None)),)))
check('x, y, z, = 1; x', Suite((Assign((Unpack((Name('x'), Name('y'), Name('z'))),), Const(1)), Name('x'), Call('return', Const(None)),)))
check("False", Suite((Call('return', Const(False)),)))
check("True", Suite((Call('return', Const(True)),)))
check("not x", Suite((
| 0 |
4d81f9fd95cb285139f7a2febae1ab8f6cf26d42
|
Python
|
Call('return', Call('not', Name('x'))),)))
check("not x and not y", Suite((Call('return', Call('and', Call('not', Name('x')), Call('not', Name('y')))),)))
check("not x and not y and not z", Suite((Call('return', Call('and', Call('not', Name('x')), Call('and', Call('not', Name('y')), Call('not', Name('z'))))),)))
check("not x and not y and not z", Suite((Call('return', Call('and', Call('not', Name('x')), Call('and', Call('not', Name('y')), Call('not', Name('z'))))),)))
check("not x and not y and not z", Suite((Call('return', Call('and', Call('not', Name('x')), Call('and', Call('not', Name('y')), Call('not', Name('z'))))),)))
check("not x or not y", Suite((Call('return', Call('or', Call('not', Name('x')), Call('not', Name('y')))),)))
check("not x or not y or not z", Suite((Call('return', Call('or', Call('not', Name('x')), Call('or', Call('not', Name('y')), Call('not', Name('z'))))),)))
check("not x or not y or not z", Suite((Call('return', Call('or', Call('not', Name('x')), Call('or', Call('not', Name('y')), Call('not', Name('z'))))),)))
check("not x or not y or not z", Suite((Call('return', Call('or', Call('not', Name('x')), Call('or', Call('not', Name('y')), Call('not', Name('z'))))),)))
check("(not x or not y, None) and not z", Suite((Call('return', Call('and', Call('tuple', Call('or', Call('not', Name('x')), Call('not', Name('y'))), Const(None)), Call('not', Name('z')))),)))
check("not x and (not y or not z, None)", Suite((Call('return', Call('and', Call('not', Name('x')), Call('tuple', Call('or', Call('not', Name('y')), Call('not', Name('z'))), Const(None)))),)))
check("not x(1, None)", Suite((Call('return', Call('not', Call(Name('x'), Const(1), Const(None)))),)))
check("not x(1, None) and not y(2, None)", Suite((Call('return', Call('and', Call('not', Call(Name('x'), Const(1), Const(None))), Call('not', Call(Name('y'), Const(2), Const(None))))),)))
check("not x(1, None) and not y(2, None) and not z(3, None)", Suite((Call('return', Call('and', Call('not', Call(Name('x'), Const(1), Const(None))), Call('and', Call('not', Call(Name('y'), Const(2), Const(None))), Call('not', Call(Name('z'), Const(3), Const(None)))))),)))
check("not x(1, None) and not y(2, None) and not z(3, None)", Suite((Call('return', Call('and', Call('not', Call(Name('x'), Const(1), Const(None))), Call('and', Call('not', Call(Name('y'), Const(2), Const(None))), Call('not', Call(Name('z'), Const(3), Const(None)))))),)))
check("not x(1, None) and not y(2, None) and not z(3, None)", Suite((Call('return', Call('and', Call('not', Call(Name('x'), Const(1), Const(None))), Call('and', Call('not', Call(Name('y'), Const(2), Const(None))), Call('not', Call(Name('z'), Const(3), Const(None)))))),)))
check("not x(1, None) or not y(2, None)", Suite((Call('return', Call('or', Call('not', Call(Name('x'), Const(1), Const(None))), Call('not', Call(Name('y'), Const(2), Const(None))))),)))
check("not x(1, None) or not y(2, None) or not z(3, None)", Suite((Call('return', Call('or', Call('not', Call(Name('x'), Const(1), Const(None))), Call('or', Call('not', Call(Name('y'), Const(2), Const(None))), Call('not', Call(Name('z'), Const(3), Const(None)))))),)))
check("not x(1, None) or not y(2, None) or not z(3, None)", Suite((Call('return', Call('or', Call('not', Call(Name('x'), Const(1), Const(None))), Call('or', Call('not', Call(Name('y'), Const(2), Const(None))), Call('not', Call(Name('z'), Const(3), Const(None)))))),)))
check("not x(1, None) or not y(2, None) or not z(3, None)", Suite((Call('return', Call('or', Call('not', Call(Name('x'), Const(1), Const(None))), Call('or', Call('not', Call(Name('y'), Const(2), Const(None))), Call('not', Call(Name('z'), Const(3), Const(None)))))),)))
check("(not x(1, None) or not y(2, None), None) and not z(3, None)", Suite((Call('return', Call('and', Call('tuple', Call('or', Call('not', Call(Name('x'), Const(1), Const(None))), Call('not', Call(Name('y'), Const(2), Const(None)))), Const(None)), Call('not', Call(Name('z'), Const(3), Const(None))))),)))
check("not x(1, None) and (not y(2, None) or not z(3, None), None)", Suite((Call('return', Call('and', Call('not', Call(Name('x'), Const(1), Const(None))), Call('tuple', Call('or', Call('not', Call(Name('y'), Const(2), Const(None))), Call('not', Call(Name('z'), Const(3), Const(None)))), Const(None)))),)))
check("not x.a", Suite((Call('return', Call('not', Call('.', Name('x'), 'a'))),)))
check("not x.a and not y.b", Suite((Call('return', Call('and', Call('not', Call('.', Name('x'), 'a')), Call('not', Call('.', Name('y'), 'b')))),)))
check("not x.a and not y.b and not z.c", Suite((Call('return', Call('and', Call('not', Call('.', Name('x'), 'a')), Call('and', Call('not', Call('.', Name('y'), 'b')), Call('not', Call('.', Name('z'), 'c'))))),)))
check("not x.a and not y.b and not z.c", Suite((Call('return', Call('and', Call('not', Call('.', Name('x'), 'a')), Call('and', Call('not', Call('.', Name('y'), 'b')), Call('not', Call('.', Name('z'), 'c'))))),)))
check("not x.a and not y.b and not z.c", Suite((Call('return', Call('and', Call('not', Call('.', Name('x'), 'a')), Call('and', Call('not', Call('.', Name('y'), 'b')), Call('not', Call('.', Name('z'), 'c'))))),)))
check("not x.a or not y.b", Suite((Call('return', Call('or', Call('not', Call('.', Name('x'), 'a')), Call('not', Call('.', Name('y'), 'b')))),)))
check("not x.a or not y.b or not z.c", Suite((Call('return', Call('or', Call('not', Call('.', Name('x'), 'a')), Call('or', Call('not', Call('.', Name('y'), 'b')), Call('not', Call('.', Name('z'), 'c'))))),)))
check("not x.a or not y.b or not z.c", Suite((Call('return', Call('or', Call('not', Call('.', Name('x'), 'a')), Call('or', Call('not', Call('.', Name('y'), 'b')), Call('not', Call('.', Name('z'), 'c'))))),)))
check("not x.a or not y.b or not z.c", Suite((Call('return', Call('or', Call('not', Call('.', Name('x'), 'a')), Call('or', Call('not', Call('.', Name('y'), 'b')), Call('not', Call('.', Name('z'), 'c'))))),)))
check("(not x.a or not y.b, None) and not z.c", Suite((Call('return', Call('and', Call('tuple', Call('or', Call('not', Call('.', Name('x'), 'a')), Call('not', Call('.', Name('y'), 'b'))), Const(None)), Call('not', Call('.', Name('z'), 'c')))),)))
check("not x.a and (not y.b or not z.c, None)", Suite((Call('return', Call('and', Call('not', Call('.', Name('x'), 'a')), Call('tuple', Call('or', Call('not', Call('.', Name('y'), 'b')), Call('not', Call('.', Name('z'), 'c'))), Const(None)))),)))
check("False", Suite((Call('return', Const(False)),)))
check("True", Suite((Call('return', Const(True)),)))
check("not x", Suite((Call('return', Call('not', Name('x'))),)))
check("not x and not y", Suite((Call('return', Call('and', Call('not', Name('x')), Call('not', Name('y')))),)))
check("not x and not y and not z", Suite((Call('return', Call('and', Call('not', Name('x')), Call('and', Call('not', Name('y')), Call('not', Name('z'))))),)))
check("not x and not y and not z", Suite((Call('return', Call('and', Call('not', Name('x')), Call('and', Call('not', Name('y')), Call('not', Name('z'))))),)))
check("not x and not y and not z", Suite((Call('return', Call('and', Call('not', Name('x')), Call('and', Call('not', Name('y')), Call('not', Name('z'))))),)))
check("not x or not y", Suite((Call('return', Call('or', Call('not', Name('x')), Call('not', Name('y')))),)))
check("not x or not y or not z", Suite((Call('return', Call('or', Call('not', Name('x')), Call('or', Call('not', Name('y')), Call('not', Name('z'))))),)))
check("not x or not y or not z", Suite((Call('return', Call('or', Call('not', Name('x')), Call('or', Call('not', Name('y')), Call('not', Name('z'))))),)))
check("not x or not y or not z", Suite((Call('return', Call('or', Call('not', Name('x')), Call('or', Call('not', Name('y')), Call('not', Name('z'))))),)))
check("(not x or not y, None) and not z", Suite((Call('return', Call('and', Call('tuple', Call('or', Call('not', Name('x')), Call('not', Name('y'))), Const(None)), Call('not', Name('z')))),)))
check("not x and (not y or not z, None)", Suite((Call('return', Call('and', Call('not', Name('x')), Call('tuple', Call('or', Call('not', Name('y')), Call('not', Name('z'))), Const(None)))),)))
check("not x(1, None)", Suite((Call('return', Call('not', Call(Name('x'), Const(1), Const(None)))),)))
check("not x(1, None) and not y(2, None)", Suite((Call('return', Call('and', Call('not', Call(Name('x'), Const(1), Const(None))), Call('not', Call(Name('y'), Const(2), Const(None))))),)))
check("not x(1, None) and not y(2, None) and not z(3, None)", Suite((Call('return', Call('and', Call('not', Call(Name('x'), Const(1), Const(None))), Call('and', Call('not', Call(Name('y'), Const(2), Const(None))), Call('not', Call(Name('z'), Const(3), Const(None)))))),)))
check("not x(1, None) and not y(2, None) and not z(3, None)", Suite((Call('return', Call('and', Call('not', Call(Name('x'), Const(1), Const(None))), Call('and', Call('not', Call(Name('y'), Const(2), Const(None))), Call('not', Call(Name('z'), Const(3), Const(None)))))),)))
check("not x(1, None) and not y(2, None) and not z(3, None)", Suite((Call('return', Call('and', Call('not', Call(Name('x'), Const(1), Const(None))), Call('and', Call('not', Call(Name('y'), Const(2), Const(None))), Call('not', Call(Name('z'), Const(3), Const(None)))))),)))
check("not x(1, None) or not y(2, None)", Suite((Call('return', Call('or', Call('not', Call(Name('x'), Const(1), Const(None))), Call('not', Call(Name('y'), Const(2), Const(None))))),)))
check("not x(1, None) or not y(2, None) or not z(3, None)", Suite((Call('return', Call('or', Call('not', Call(Name('x'), Const(1), Const(None))), Call('or', Call('not', Call(Name('y'), Const(2), Const(None))), Call('not', Call(Name('z'), Const(3), Const(None)))))),)))
check("not x(1, None) or not y(2, None) or not z(3, None)", Suite((Call('return', Call('or', Call('not', Call(Name('x'), Const(1), Const(None))), Call('or', Call('not', Call(Name('y'), Const(2), Const(None))), Call('not', Call(Name('z'), Const(3), Const(None)))))),)))
check("not x(1, None) or not y(2, None) or not z(3, None)", Suite((Call('return', Call('or', Call('not', Call(Name('x'), Const(1), Const(None))), Call('or', Call('not', Call(Name('y'), Const(2), Const(None))), Call('not', Call(Name('z'), Const(3), Const(None)))))),)))
check("(not x(1, None) or not y(2, None), None) and not z(3, None)", Suite((Call('return', Call('and', Call('tuple', Call('or', Call('not', Call(Name('x'), Const(1), Const(None))), Call('not', Call(Name('y'), Const(2), Const(None)))), Const(None)), Call('not', Call(Name('z'), Const(3), Const(None))))),)))
check("not x(1, None) and (not y(2, None) or not z(3, None), None)", Suite((Call('return', Call('and', Call('not', Call(Name('x'), Const(1), Const(None))), Call('tuple', Call('or', Call('not', Call(Name('y'), Const(2), Const(None))), Call('not', Call(Name('z'), Const(3), Const(None)))), Const(None)))),)))
check("not x.a", Suite((Call('return', Call('not', Call('.', Name('x'), 'a'))),)))
check("not x.a and not y.b", Suite((Call('return', Call('and', Call('not', Call('.', Name('x'), 'a')), Call('not', Call('.', Name('y'), 'b')))),)))
check("not x.a and not y.b and not z.c", Suite((Call('return', Call('and', Call('not', Call('.', Name('x'), 'a')), Call('and', Call('not', Call('.', Name('y'), 'b')), Call('not', Call('.', Name('z'), 'c'))))),)))
check("not x.a and not y.b and not z.c", Suite((Call('return', Call('and', Call('not', Call('.', Name('x'), 'a')), Call('and', Call('not', Call('.', Name('y'), 'b')), Call('not', Call('.', Name('z'), 'c'))))),)))
check("not x.a and not y.b and not z.c", Suite((Call('return', Call('and', Call('not', Call('.', Name('x'), 'a')), Call('and', Call('not', Call('.', Name('y'), 'b')), Call('not', Call('.', Name('z'), 'c'))))),)))
check("not x.a or not y.b", Suite((Call('return', Call('or', Call('not', Call('.', Name('x'), 'a')), Call('not', Call('.', Name('y'), 'b')))),)))
check("not x.a or not y.b or not z.c", Suite((Call('return', Call('or', Call('not', Call('.', Name('x'), 'a')), Call('or', Call('not', Call('.', Name('y'), 'b')), Call('not', Call('.', Name('z'), 'c'))))),)))
check("not x.a or not y.b or not z.c", Suite((Call('return', Call('or', Call('not', Call('.', Name('x'), 'a')), Call('or', Call('not', Call('.', Name('y'), 'b')), Call('not', Call('.', Name('z'), 'c'))))),)))
check("not x.a or not y.b or not z.c", Suite((Call('return', Call('or', Call('not', Call('.', Name('x'), 'a')), Call('or', Call('not', Call('.', Name('y'), 'b')), Call('not', Call('.', Name('z'), 'c'))))),)))
check("(not x.a or not y.b, None) and not z.c", Suite((Call('return', Call('and', Call('tuple', Call('or', Call('not', Call('.', Name('x'), 'a')), Call('not', Call('.', Name('y'), 'b'))), Const(None)), Call('not', Call('.', Name('z'), 'c')))),)))
check("not x.a and (not y.b or not z.c, None)", Suite((Call('return', Call('and', Call('not', Call('.', Name('x'), 'a')), Call('tuple', Call('or', Call('not', Call('.', Name('y'), 'b')), Call('not', Call('.', Name('z'), 'c'))), Const(None)))),)))
check('''False''', Suite((Call('return', Const(False)),)))
check('''True''', Suite((Call('return', Const(True)),)))
check('''not x''', Suite((Call('return', Call('not', Name('x'))),)))
check('''not x and not y''', Suite((Call('return', Call('and', Call('not', Name('x')), Call('not', Name('y')))),)))
check('''not x and not y and not z''', Suite((Call('return', Call('and', Call('not', Name('x')), Call('and', Call('not', Name('y')), Call('not', Name('z'))))),)))
check('''not x and not y and not z''', Suite((Call('return', Call('and', Call('not', Name('x')), Call('and', Call('not', Name('y')), Call('not', Name('z'))))),)))
check('''not x and not y and not z''', Suite((Call('return', Call('and', Call('not', Name('x')), Call('and', Call('not', Name('y')), Call('not', Name('z'))))),)))
check('''not x or not y''', Suite((Call('return', Call('or', Call('not', Name('x')), Call('not', Name('y')))),)))
check('''not x or not y or not z''', Suite((Call('return', Call('or', Call('not', Name('x')), Call('or', Call('not', Name('y')), Call('not', Name('z'))))),)))
check('''not x or not y or not z''', Suite((Call('return', Call('or', Call('not', Name('x')), Call('or', Call('not', Name('y')), Call('not', Name('z'))))),)))
check('''not x or not y or not z''', Suite((Call('return', Call('or', Call('not', Name('x')), Call('or', Call('not', Name('y')), Call('not', Name('z'))))),)))
check('''(not x or not y, None) and not z''', Suite((Call('return', Call('and', Call('tuple', Call('or', Call('not', Name('x')), Call('not', Name('y'))), Const(None)), Call('not', Name('z')))),)))
check('''not x and (not y or not z, None)''', Suite((Call('return', Call('and', Call('not', Name('x')), Call('tuple', Call('or', Call('not', Name('y')), Call('not', Name('z'))), Const(None)))),)))
check('''not x(1, None)''', Suite((Call('return', Call('not', Call(Name('x'), Const(1), Const(None)))),)))
check('''not x(1, None) and not y(2, None)''', Suite((Call('return', Call('and', Call('not', Call(Name('x'), Const(1), Const(None))), Call('not', Call(Name('y'), Const(2), Const(None))))),)))
check('''not x(1, None) and not y(2, None) and not z(3, None)''', Suite((Call('return', Call('and', Call('not', Call(Name('x'), Const(1), Const(None))), Call('and', Call('not', Call(Name('y'), Const(2), Const(None))), Call('not', Call(Name('z'), Const(3), Const(None)))))),)))
check('''not x(1, None) and not y(2, None) and not z(3, None)''', Suite((Call('return', Call('and', Call('not', Call(Name('x'), Const(1), Const(None))), Call('and', Call('not', Call(Name('y'), Const(2), Const(None))), Call('not', Call(Name('z'), Const(3), Const(None)))))),)))
check('''not x(1, None) and not y(2, None) and not z(3, None)''', Suite((Call('return', Call('and', Call('not', Call(Name('x'), Const(1), Const(None))), Call('and', Call('not', Call(Name('y'), Const(2), Const(None))), Call('not', Call(Name('z'), Const(3), Const(None)))))),)))
check('''not x(1, None) or not y(2, None)''', Suite((Call('return', Call('or', Call('not', Call(Name('x'), Const(1), Const(None))), Call('not', Call(Name('y'), Const(2), Const(None))))),)))
check('''not x(1, None) or not y(2, None) or not z(3, None)''', Suite((Call('return', Call('or', Call('not', Call(Name('x'), Const(1), Const(None))), Call('or', Call('not', Call(Name('y'), Const(2), Const(None))), Call('not', Call(Name('z'), Const(3), Const(None)))))),)))
check('''not x(1, None) or not y(2, None) or not z(3, None)''', Suite((Call('return', Call('or', Call('not', Call(Name('x'), Const(1), Const(None))), Call('or', Call('not', Call(Name('y'), Const(2), Const(None))), Call('not', Call(Name('z'), Const(3), Const(None)))))),)))
check('''not x(1, None) or not y(2, None) or not z(3, None)''', Suite((Call('return', Call('or', Call('not', Call(Name('x'), Const(1), Const(None))), Call('or', Call('not', Call(Name('y'), Const(2), Const(None))), Call('not', Call(Name('z'), Const(3), Const(None)))))),)))
check('''(not x(1, None) or not y(2, None), None) and not z(3, None)''', Suite((Call('return', Call('and', Call('tuple', Call('or', Call('not', Call(Name('x'), Const(1), Const(None))), Call('not', Call(Name('y'), Const(2), Const(None)))), Const(None)), Call('not', Call(Name('z'), Const(3), Const(None))))),)))
check('''not x(1, None) and (not y(2, None) or not z(3, None), None)''', Suite((Call('return', Call('and', Call('not', Call(Name('x'), Const(1), Const(None))), Call('tuple', Call('or', Call('not', Call(Name('y'), Const(2), Const(None))), Call('not', Call(Name('z'), Const(3), Const(None)))), Const(None)))),)))
check('''not x.a''', Suite((Call('return', Call('not', Call('.', Name('x'), 'a'))),)))
check('''not x.a and not y.b''', Suite((Call('return', Call('and', Call('not', Call('.', Name('x'), 'a')), Call('not', Call('.', Name('y'), 'b')))),)))
check('''not x.a and not y.b and not z.c''', Suite((Call('return', Call('and', Call('not', Call('.', Name('x'), 'a')), Call('and', Call('not', Call('.', Name('y'), 'b')), Call('not', Call('.', Name('z'), 'c'))))),)))
check('''not x.a and not y.b and not z.c''', Suite((Call('return', Call('and', Call('not', Call('.', Name('x'), 'a')), Call('and', Call('not', Call('.', Name('y'), 'b')), Call('not', Call('.', Name('z'), 'c'))))),)))
check('''not x.a and not y.b and not z.c''', Suite((Call('return', Call('and', Call('not', Call('.', Name('x'), 'a')), Call('and', Call('not', Call('.', Name('y'), 'b')), Call('not', Call('.', Name('z'), 'c'))))),)))
check('''not x.a or not y.b''', Suite((Call('return', Call('or', Call('not', Call('.', Name('x'), 'a')), Call('not', Call('.', Name('y'), 'b')))),)))
check('''not x.a or not y.b or not z.c''', Suite((Call('return', Call('or', Call('not', Call('.', Name('x'), 'a')), Call('or', Call('not', Call('.', Name('y'), 'b')), Call('not', Call('.', Name('z'), 'c'))))),)))
check('''not x.a or not y.b or not z.c''', Suite((Call('return', Call('or', Call('not', Call('.', Name('x'), 'a')), Call('or', Call('not', Call('.', Name('y'), 'b')), Call('not', Call('.', Name('z'), 'c'))))),)))
check('''not x.a or not y.b or not z.c''', Suite((Call('return', Call('or', Call('not', Call('.', Name('x'), 'a')), Call('or', Call('not', Call('.', Name('y'), 'b')), Call('not', Call('.', Name('z'), 'c'))))),)))
check('''(not x.a or not y.b, None) and not z.c''', Suite((Call('return', Call('and', Call('tuple', Call('or', Call('not', Call('.', Name('x'), 'a')), Call('not', Call('.', Name('y'), 'b'))), Const(None)), Call('not', Call('.', Name('z'), 'c')))),)))
check('''not x.a and (not y.b or not z.c, None)''', Suite((Call('return', Call('and', Call('not', Call('.', Name('x'), 'a')), Call('tuple', Call('or', Call('not', Call('.', Name('y'), 'b')), Call('not', Call('.', Name('z'), 'c'))), Const(None)))),)))
check('''x != y''', Suite((Call('return', Call('!=', Name('x'), Name('y'))),)))
check('''x == y''', Suite((Call('return', Call('==', Name('x'), Name('y'))),)))
check('''x <= y''', Suite((Call('return', Call('<=', Name('x'), Name('y'))),)))
check('''x > y''', Suite((Call('return', Call('>', Name('x'), Name('y'))),)))
check('''x >= y''', Suite((Call('return', Call('>=', Name('x'), Name('y'))),)))
check('''x < y''', Suite((Call('return', Call('<', Name('x'), Name('y'))),)))
check('''x not in y''', Suite((Call('return', Call('not-in', Name('x'), Name('y'))),)))
check('''x in y''', Suite((Call('return', Call('in', Name('x'), Name('y'))),)))
check('''x == y and y == z''', Suite((Call('return', Call('and', Call('==', Name('x'), Name('y')), Call('==', Name('y'), Name('z')))),)))
check('''x == y and y == z''', Suite((Call('return', Call('and', Call('==', Name('x'), Name('y')), Call('==', Name('y'), Name('z')))),)))
check('''x == y or y == z''', Suite((Call('return', Call('or', Call('==', Name('x'), Name('y')), Call('==', Name('y'), Name('z')))),)))
check('''x != y or y != z''', Suite((Call('return', Call('or', Call('!=', Name('x'), Name('y')), Call('!=', Name('y'), Name('z')))),)))
check('''x != y or y != z''', Suite((Call('return', Call('or', Call('!=', Name('x'), Name('y')), Call('!=', Name('y'), Name('z')))),)))
check('''x != y or y == z''', Suite((Call('return', Call('or', Call('!=', Name('x'), Name('y')), Call('==', Name('y'), Name('z')))),)))
check('''a and b and c and d and e''', Suite((Call('return', Call('and', Name('a'), Call('and', Name('b'), Call('and', Name('c'), Call('and', Name('d'), Name('e')))))),)))
check('''a and b and c and d and e''', Suite((Call('return', Call('and', Name('a'), Call('and', Name('b'), Call('and', Name('c'), Call('and', Name('d'), Name('e')))))),)))
check("def g(x): return 3.14", Suite((Assign((Name('g'),), Def(('x',), (), Suite((Call('return', Const(3.14)),)))), Call('return', Const(None)),)))
check("""def g(x):
return 3.14""", Suite((Assign((Name('g'),), Def(('x',), (), Suite((Call('return', Const(3.14)),)))), Call('return', Const(None)),)))
check("def g(x, y): return x**2", Suite((Assign((Name('g'),), Def(('x', 'y'), (), Suite((Call('return', Call('**', Name('x'), Const(2))),)))), Call('return', Const(None)),)))
check("""def g(x, y):
return x**2""", Suite((Assign((Name('g'),), Def(('x', 'y'), (), Suite((Call('return', Call('**', Name('x'), Const(2))),)))), Call('return', Const(None)),)))
check("lambda: 3.14", Suite((Call('return', Def((), (), Suite((Call('return', Const(3.14)),)))),)))
check("lambda x: x**2", Suite((Call('return', Def(('x',), (), Suite((Call('return', Call('**', Name('x'), Const(2))),)))),)))
check("(lambda x: x**2, None)", Suite((Call('return', Call('tuple', Def(('x',), (), Suite((Call('return', Call('**', Name('x'), Const(2))),))), Const(None))),)))
check("1 if x == 0 else 2", Suite((Call('if', Call('==', Name('x'), Const(0)), Suite((Call('return', Const(1)),)), Suite((Call('return', Const(2)),))),)))
check("y = (1 if x == 0 else 2, None)", Suite((Assign((Name('y'),), Call('tuple', Call('?', Call('==', Name('x'), Const(0)), Const(1), Const(2)), Const(None))), Call('return', Const(None)),)))
check("1 if x == 0 else None", Suite((Call('if', Call('==', Name('x'), Const(0)), Suite((Call('return', Const(1)),)), Suite((Call('return', Const(None)),))),)))
check("(1 if x == 0 else 2, None)", Suite((Call('return', Call('tuple', Call('?', Call('==', Name('x'), Const(0)), Const(1), Const(2)), Const(None))),)))
check("(1 if x == 0 else None, None)", Suite((Call('return', Call('tuple', Call('?', Call('==', Name('x'), Const(0)), Const(1), Const(None)), Const(None))),)))
check("""if x == 0:
return 1""", Suite((Call('if', Call('==', Name('x'), Const(0)), Suite((Call('return', Const(1)),)), Suite((Call('return', Const(None)),))),)))
check("""if x == 0:
y = 1
return 1""", Suite((Call('if', Call('==', Name('x'), Const(0)), Suite((Assign((Name('y'),), Const(1)), Call('return', Const(1)),)), Suite((Call('return', Const(None)),))),)))
check('''if x == 0:
return 1
else:
return 2''', Suite((Call('if', Call('==', Name('x'), Const(0)), Suite((Call('return', Const(1)),)), Suite((Call('return', Const(2)),))),)))
check('''if x == 0:
y = 1
return 1
else:
y = 2
return 2''', Suite((Call('if', Call('==', Name('x'), Const(0)), Suite((Assign((Name('y'),), Const(1)), Call('return', Const(1)),)), Suite((Assign((Name('y'),), Const(2)), Call("return", Const(2))))),)))
check('''if x == 0:
return 1
elif x == 1:
return 2
else:
return 3''', Suite((Call('if', Call('==', Name('x'), Const(0)), Suite((Call('return', Const(1)),)), Suite((Call('if', Call('==', Name('x'), Const(1)), Suite((Call('return', Const(2)),)), Suite((Call('return', Const(3)),))),))),)))
check('''if x == 0:
y = 1
return 1
elif x == 1:
y = 2
return 2
else:
y = 3
return 3''', Suite((Call('if', Call('==', Name('x'), Const(0)), Suite((Assign((Name('y'),), Const(1)), Call('return', Const(1)),)), Suite((Call('if', Call('==', Name('x'), Const(1)), Suite((Assign((Name('y'),), Const(2)), Call('return', Const(2)),)), Suite((Assign((Name('y'),), Const(3)), Call("return", Const(3))))),))),)))
check('''if x == 0:
y = 1''', Suite((Call('if', Call('==', Name('x'), Const(0)), Suite((Assign((Name('y'),), Const(1)),))), Call('return', Const(None)),)))
check('''if x == 0:
y = 1
z = 1''', Suite((Call('if', Call('==', Name('x'), Const(0)), Suite((Assign((Name('y'),), Const(1)), Assign((Name('z'),), Const(1)),))), Call('return', Const(None)),)))
check('''if x == 0:
y = 1
else:
y = 2''', Suite((Call('if', Call('==', Name('x'), Const(0)), Suite((Assign((Name('y'),), Const(1)),)), Suite((Assign((Name('y'),), Const(2)),))), Call('return', Const(None)),)))
check('''if x == 0:
y = 1
z = 1
else:
y = 2
z = 2''', Suite((Call('if', Call('==', Name('x'), Const(0)), Suite((Assign((Name('y'),), Const(1)), Assign((Name('z'),), Const(1)),)), Suite((Assign((Name('y'),), Const(2)), Assign((Name('z'),), Const(2)),))), Call('return', Const(None)),)))
# check("print(None)", Suite((Call('return', Call(Name('print'), Const(None))),)))
# check("print(1, None)", Suite((Call('return', Call(Name('print'), Const(1), Const(None))),)))
# check("print(1, 2, 3, None)", Suite((Call('return', Call(Name('print'), Const(1), Const(2), Const(3), Const(None))),)))
check("[]", Suite((Call('return', Call('list')),)))
check("[1]", Suite((Call('return', Call('list', Const(1))),)))
check("[1, 2]", Suite((Call('return', Call('list', Const(1), Const(2))),)))
check("[one]", Suite((Call('return', Call('list', Name('one'))),)))
check("[one, two]", Suite((Call('return', Call('list', Name('one'), Name('two'))),)))
check("['one']", Suite((Call('return', Call('list', Const('one'))),)))
check("['one', 'two']", Suite((Call('return', Call('list', Const('one'), Const('two'))),)))
check("set([])", Suite((Call('return', Call(Name('set'), Call('list'))),)))
check("set([1])", Suite((Call('return', Call(Name('set'), Call('list', Const(1)))),)))
check("set([1, 2])", Suite((Call('return', Call(Name('set'), Call('list', Const(1), Const(2)))),)))
check("set([one])", Suite((Call('return', Call(Name('set'), Call('list', Name('one')))),)))
check("set([one, two])", Suite((Call('return', Call(Name('set'), Call('list', Name('one'), Name('two')))),)))
check("set(['one'])", Suite((Call('return', Call(Name('set'), Call('list', Const('one')))),)))
check("set(['one', 'two'])", Suite((Call('return', Call(Name('set'), Call('list', Const('one'), Const('two')))),)))
check("{}", Suite((Call('return', Call('dict')),)))
check("{1}", Suite((Call('return', Call('set', Const(1))),)))
check("{1, 2}", Suite((Call('return', Call('set', Const(1), Const(2))),)))
check("{one}", Suite((Call('return', Call('set', Name('one'))),)))
check("{one, two}", Suite((Call('return', Call('set', Name('one'), Name('two'))),)))
check("{'one'}", Suite((Call('return', Call('set', Const('one'))),)))
check("{'one', 'two'}", Suite((Call('return', Call('set', Const('one'), Const('two'))),)))
check("{'x': 1}", Suite((Call('return', Call('dict', Const('x'), Const(1))),)))
check("{'x': 1, 'y': 2}", Suite((Call('return', Call('dict', Const('x'), Const(1), Const('y'), Const(2))),)))
check("{'x': 1, 'y': 2, 'z': 3}", Suite((Call('return', Call('dict', Const('x'), Const(1), Const('y'), Const(2), Const('z'), Const(3))),)))
check("{'x': one}", Suite((Call('return', Call('dict', Const('x'), Name('one'))),)))
check("{'x': one, 'y': two}", Suite((Call('return', Call('dict', Const('x'), Name('one'), Const('y'), Name('two'))),)))
check("{'x': one, 'y': two, 'z': three}", Suite((Call('return', Call('dict', Const('x'), Name('one'), Const('y'), Name('two'), Const('z'), Name('three'))),)))
check("{1: 1}", Suite((Call('return', Call('dict', Const(1), Const(1))),)))
check("{1: 1, 2: 2}", Suite((Call('return', Call('dict', Const(1), Const(1), Const(2), Const(2))),)))
check("{1: 1, 2: 2, 3: 3}", Suite((Call('return', Call('dict', Const(1), Const(1), Const(2), Const(2), Const(3), Const(3))),)))
check("{1: one}", Suite((Call('return', Call('dict', Const(1), Name('one'))),)))
check("{1: one, 2: two}", Suite((Call('return', Call('dict', Const(1), Name('one'), Const(2), Name('two'))),)))
check("{1: one, 2: two, 3: three}", Suite((Call('return', Call('dict', Const(1), Name('one'), Const(2), Name('two'), Const(3), Name('three'))),)))
check("{one: 1}", Suite((Call('return', Call('dict', Name('one'), Const(1))),)))
check("{one: 1, two: 2}", Suite((Call('return', Call('dict', Name('one'), Const(1), Name('two'), Const(2))),)))
check("{one: 1, two: 2, three: 3}", Suite((Call('return', Call('dict', Name('one'), Const(1), Name('two'), Const(2), Name('three'), Const(3))),)))
check("{one: one}", Suite((Call('return', Call('dict', Name('one'), Name('one'))),)))
check("{one: one, two: two}", Suite((Call('return', Call('dict', Name('one'), Name('one'), Name('two'), Name('two'))),)))
check("{one: one, two: two, three: three}", Suite((Call('return', Call('dict', Name('one'), Name('one'), Name('two'), Name('two'), Name('three'), Name('three'))),)))
check("[x**2 for x in something]", Suite((Call('return', Call(Call('.', Name('something'), 'map'), Def(('x',), (), Suite((Call('return', Call('**', Name('x'), Const(2))),))))),)))
check("[x**2 for x in something if x > 0]", Suite((Call('return', Call(Call('.', Call(Call('.', Name('something'), 'filter'), Def(('x',), (), Suite((Call('return', Call('>', Name('x'), Const(0))),)))), 'map'), Def(('x',), (), Suite((Call('return', Call('**', Name('x'), Const(2))),))))),)))
check("[y**2 for x in something for y in x]", Suite((Call('return', Call(Call('.', Name('something'), 'map'), Def(('x',), (), Suite((Call('return', Call(Call('.', Name('x'), 'map'), Def(('y',), (), Suite((Call('return', Call('**', Name('y'), Const(2))),))))),))))),)))
check("[y**2 for x in something for y in x if x > 0]", Suite((Call('return', Call(Call('.', Name('something'), 'map'), Def(('x',), (), Suite((Call('return', Call(Call('.', Call(Call('.', Name('x'), 'filter'), Def(('y',), (), Suite((Call('return', Call('>', Name('x'), Const(0))),)))), 'map'), Def(('y',), (), Suite((Call('return', Call('**', Name('y'), Const(2))),))))),))))),)))
check("[y**2 for x in something for y in x if y > 0]", Suite((Call('return', Call(Call('.', Name('something'), 'map'), Def(('x',), (), Suite((Call('return', Call(Call('.', Call(Call('.', Name('x'), 'filter'), Def(('y',), (), Suite((Call('return', Call('>', Name('y'), Const(0))),)))), 'map'), Def(('y',), (), Suite((Call('return', Call('**', Name('y'), Const(2))),))))),))))),)))
check("[y**2 for x in something if x for y in x if x > 0]", Suite((Call('return', Call(Call('.', Call(Call('.', Name('something'), 'filter'), Def(('x',), (), Suite((Call('return', Name('x')),)))), 'map'), Def(('x',), (), Suite((Call('return', Call(Call('.', Call(Call('.', Name('x'), 'filter'), Def(('y',), (), Suite((Call('return', Call('>', Name('x'), Const(0))),)))), 'map'), Def(('y',), (), Suite((Call('return', Call('**', Name('y'), Const(2))),))))),))))),)))
check("f([x**2 for x in something], None)", Suite((Call('return', Call(Name('f'), Call(Call('.', Name('something'), 'map'), Def(('x',), (), Suite((Call('return', Call('**', Name('x'), Const(2))),)))), Const(None))),)))
check("f([x**2 for x in something if x > 0], None)", Suite((Call('return', Call(Name('f'), Call(Call('.', Call(Call('.', Name('something'), 'filter'), Def(('x',), (), Suite((Call('return', Call('>', Name('x'), Const(0))),)))), 'map'), Def(('x',), (), Suite((Call('return', Call('**', Name('x'), Const(2))),)))), Const(None))),)))
check("f([y**2 for x in something for y in x], None)", Suite((Call('return', Call(Name('f'), Call(Call('.', Name('something'), 'map'), Def(('x',), (), Suite((Call('return', Call(Call('.', Name('x'), 'map'), Def(('y',), (), Suite((Call('return', Call('**', Name('y'), Const(2))),))))),)))), Const(None))),)))
check("f([y**2 for x in something for y in x if x > 0], None)", Suite((Call('return', Call(Name('f'), Call(Call('.', Name('something'), 'map'), Def(('x',), (), Suite((Call('return', Call(Call('.', Call(Call('.', Name('x'), 'filter'), Def(('y',), (), Suite((Call('return', Call('>', Name('x'), Const(0))),)))), 'map'), Def(('y',), (), Suite((Call('return', Call('**', Name('y'), Const(2))),))))),)))), Const(None))),)))
check("f([y**2 for x in something for y in x if y > 0], None)", Suite((Call('return', Call(Name('f'), Call(Call('.', Name('something'), 'map'), Def(('x',), (), Suite((Call('return', Call(Call('.', Call(Call('.', Name('x'), 'filter'), Def(('y',), (), Suite((Call('return', Call('>', Name('y'), Const(0))),)))), 'map'), Def(('y',), (), Suite((Call('return', Call('**', Name('y'), Const(2))),))))),)))), Const(None))),)))
check("f([y**2 for x in something if x for y in x if x > 0], None)", Suite((Call('return', Call(Name('f'), Call(Call('.', Call(Call('.', Name('something'), 'filter'), Def(('x',), (), Suite((Call('return', Name('x')),)))), 'map'), Def(('x',), (), Suite((Call('return', Call(Call('.', Call(Call('.', Name('x'), 'filter'), Def(('y',), (), Suite((Call('return', Call('>', Name('x'), Const(0))),)))), 'map'), Def(('y',), (), Suite((Call('return', Call('**', Name('y'), Const(2))),))))),)))), Const(None))),)))
check("f((x**2 for x in something), None)", Suite((Call('return', Call(Name('f'), Call(Call('.', Name('something'), 'map'), Def(('x',), (), Suite((Call('return', Call('**', Name('x'), Const(2))),)))), Const(None))),)))
check("f((x**2 for x in something if x > 0), None)", Suite((Call('return', Call(Name('f'), Call(Call('.', Call(Call('.', Name('something'), 'filter'), Def(('x',), (), Suite((Call('return', Call('>', Name('x'), Const(0))),)))), 'map'), Def(('x',), (), Suite((Call('return', Call('**', Name('x'), Const(2))),)))), Const(None))),)))
check("f((y**2 for x in something for y in x), None)", Suite((Call('return', Call(Name('f'), Call(Call('.', Name('something'), 'map'), Def(('x',), (), Suite((Call('return', Call(Call('.', Name('x'), 'map'), Def(('y',), (), Suite((Call('return', Call('**', Name('y'), Const(2))),))))),)))), Const(None))),)))
check("f((y**2 for x in something for y in x if x > 0), None)", Suite((Call('return', Call(Name('f'), Call(Call('.', Name('something'), 'map'), Def(('x',), (), Suite((Call('return', Call(Call('.', Call(Call('.', Name('x'), 'filter'), Def(('y',), (), Suite((Call('return', Call('>', Name('x'), Const(0))),)))), 'map'), Def(('y',), (), Suite((Call('return', Call('**', Name('y'), Const(2))),))))),)))), Const(None))),)))
check("f((y**2 for x in something for y in x if y > 0), None)", Suite((Call('return', Call(Name('f'), Call(Call('.', Name('something'), 'map'), Def(('x',), (), Suite((Call('return', Call(Call('.', Call(Call('.', Name('x'), 'filter'), Def(('y',), (), Suite((Call('return', Call('>', Name('y'), Const(0))),)))), 'map'), Def(('y',), (), Suite((Call('return', Call('**', Name('y'), Const(2))),))))),)))), Const(None))),)))
check("f((y**2 for x in something if x for y in x if x > 0), None)", Suite((Call('return', Call(Name('f'), Call(Call('.', Call(Call('.', Name('something'), 'filter'), Def(('x',), (), Suite((Call('return', Name('x')),)))), 'map'), Def(('x',), (), Suite((Call('return', Call(Call('.', Call(Call('.', Name('x'), 'filter'), Def(('y',), (), Suite((Call('return', Call('>', Name('x'), Const(0))),)))), 'map'), Def(('y',), (), Suite((Call('return', Call('**', Name('y'), Const(2))),))))),)))), Const(None))),)))
check("f(one=1)", Suite((Call('return', CallKeyword(Name('f'), (), (('one', Const(1)),))),)))
check("f(one=1, two=2)", Suite((Call('return', CallKeyword(Name('f'), (), (('one', Const(1)), ('two', Const(2))))),)))
check("f(x, one=1)", Suite((Call('return', CallKeyword(Name('f'), (Name('x'),), (('one', Const(1)),))),)))
check("f(x, one=1, two=2)", Suite((Call('return', CallKeyword(Name('f'), (Name('x'),), (('one', Const(1)), ('two', Const(2))))),)))
check("x[..., :]", Suite((Call('return', Call('[.]', Name('x'), Const(Ellipsis), Call('slice', Const(None), Const(None), Const(None)))),)))
check('x = y = 1', Suite((Assign((Name('x'), Name('y')), Const(1)), Call('return', Const(None)),)))
check('x = y = z = 1', Suite((Assign((Name('x'), Name('y'), Name('z')), Const(1)), Call('return', Const(None)),)))
check('x, y = 1', Suite((Assign((Unpack((Name('x'), Name('y'))),), Const(1)), Call('return', Const(None)),)))
check('x, y = z = 1', Suite((Assign((Unpack((Name('x'), Name('y'))), Name('z')), Const(1)), Call('return', Const(None)),)))
check('x = y, z = 1', Suite((Assign((Name('x'), Unpack((Name('y'), Name('z')))), Const(1)), Call('return', Const(None)),)))
check('x.a = y = 1', Suite((Assign((Call('.', Name('x'), 'a'), Name('y'),), Const(1)), Call('return', Const(None)),)))
check('x.a = y = z = 1', Suite((Assign((Call('.', Name('x'), 'a'), Name('y'), Name('z'),), Const(1)), Call('return', Const(None)),)))
check('x.a, y = 1', Suite((Assign((Unpack((Call('.', Name('x'), 'a'), Name('y'))),), Const(1)), Call('return', Const(None)),)))
check('x.a, y = z = 1', Suite((Assign((Unpack((Call('.', Name('x'), 'a'), Name('y'))), Name('z')), Const(1)), Call('return', Const(None)),)))
check('x.a = y, z = 1', Suite((Assign((Call('.', Name('x'), 'a'), Unpack((Name('y'), Name('z')))), Const(1)), Call('return', Const(None)),)))
check('x = y.a = 1', Suite((Assign((Name('x'), Call('.', Name('y'), 'a'),), Const(1)), Call('return', Const(None)),)))
check('x = y.a = z = 1', Suite((Assign((Name('x'), Call('.', Name('y'), 'a'), Name('z')), Const(1)), Call('return', Const(None)),)))
check('x, y.a = 1', Suite((Assign((Unpack((Name('x'), Call('.', Name('y'), 'a'))),), Const(1)), Call('return', Const(None)),)))
check('x, y.a = z = 1', Suite((Assign((Unpack((Name('x'), Call('.', Name('y'), 'a'))), Name('z')), Const(1)), Call('return', Const(None)),)))
check('x = y.a, z = 1', Suite((Assign((Name('x'), Unpack((Call('.', Name('y'), 'a'), Name('z')))), Const(1)), Call('return', Const(None)),)))
check('x = y = z.a = 1', Suite((Assign((Name('x'), Name('y'), Call('.', Name('z'), 'a'),), Const(1)), Call('return', Const(None)),)))
check('x, y = z.a = 1', Suite((Assign((Unpack((Name('x'), Name('y'))), Call('.', Name('z'), 'a'),), Const(1)), Call('return', Const(None)),)))
check('x = y, z.a = 1', Suite((Assign((Name('x'), Unpack((Name('y'), Call('.', Name('z'), 'a'))),), Const(1)), Call('return', Const(None)),)))
check('x[0] = y = 1', Suite((Assign((Call('[.]', Name('x'), Const(0)), Name('y'),), Const(1)), Call('return', Const(None)),)))
check('x[0] = y = z = 1', Suite((Assign((Call('[.]', Name('x'), Const(0)), Name('y'), Name('z')), Const(1)), Call('return', Const(None)),)))
check('x[0], y = 1', Suite((Assign((Unpack((Call('[.]', Name('x'), Const(0)), Name('y'),)),), Const(1)), Call('return', Const(None)),)))
check('x[0], y = z = 1', Suite((Assign((Unpack((Call('[.]', Name('x'), Const(0)), Name('y'))), Name('z')), Const(1)), Call('return', Const(None)),)))
check('x[0] = y, z = 1', Suite((Assign((Call('[.]', Name('x'), Const(0)), Unpack((Name('y'), Name('z')))), Const(1)), Call('return', Const(None)),)))
check('x = y[0] = 1', Suite((Assign((Name('x'), Call('[.]', Name('y'), Const(0)),), Const(1)), Call('return', Const(None)),)))
check('x = y[0] = z = 1', Suite((Assign((Name('x'), Call('[.]', Name('y'), Const(0)), Name('z')), Const(1)), Call('return', Const(None)),)))
check('x, y[0] = 1', Suite((Assign((Unpack((Name('x'), Call('[.]', Name('y'), Const(0)))),), Const(1)), Call('return', Const(None)),)))
check('x, y[0] = z = 1', Suite((Assign((Unpack((Name('x'), Call('[.]', Name('y'), Const(0)))), Name('z')), Const(1)), Call('return', Const(None)),)))
check('x = y[0], z = 1', Suite((Assign((Name('x'), Unpack((Call('[.]', Name('y'), Const(0)), Name('z')))), Const(1)), Call('return', Const(None)),)))
check('x = y = z[0] = 1', Suite((Assign((Name('x'), Name('y'), Call('[.]', Name('z'), Const(0)),), Const(1)), Call('return', Const(None)),)))
check('x, y = z[0] = 1', Suite((Assign((Unpack((Name('x'), Name('y'))), Call('[.]', Name('z'), Const(0)),), Const(1)), Call('return', Const(None)),)))
check('x = y, z[0] = 1', Suite((Assign((Name('x'), Unpack((Name('y'), Call('[.]', Name('z'), Const(0)))),), Const(1)), Call('return', Const(None)),)))
check('x[:, ...] = y = 1', Suite((Assign((Call('[.]', Name('x'), Call('slice', Const(None), Const(None), Const(None)), Const(Ellipsis)), Name('y'),), Const(1)), Call('return', Const(None)),)))
check('x[:, ...] = y = z = 1', Suite((Assign((Call('[.]', Name('x'), Call('slice', Const(None), Const(None), Const(None)), Const(Ellipsis)), Name('y'), Name('z')), Const(1)), Call('return', Const(None)),)))
check('x[:, ...], y = 1', Suite((Assign((Unpack((Call('[.]', Name('x'), Call('slice', Const(None), Const(None), Const(None)), Const(Ellipsis)), Name('y'))),), Const(1)), Call('return', Const(None)),)))
check('x[:, ...], y = z = 1', Suite((Assign((Unpack((Call('[.]', Name('x'), Call('slice', Const(None), Const(None), Const(None)), Const(Ellipsis)), Name('y'))), Name('z')), Const(1)), Call('return', Const(None)),)))
check('x[:, ...] = y, z = 1', Suite((Assign((Call('[.]', Name('x'), Call('slice', Const(None), Const(None), Const(None)), Const(Ellipsis)), Unpack((Name('y'), Name('z')))), Const(1)), Call('return', Const(None)),)))
check('x = y[:, ...] = 1', Suite((Assign((Name('x'), Call('[.]', Name('y'), Call('slice', Const(None), Const(None), Const(None)), Const(Ellipsis)),), Const(1)), Call('return', Const(None)),)))
check('x = y[:, ...] = z = 1', Suite((Assign((Name('x'), Call('[.]', Name('y'), Call('slice', Const(None), Const(None), Const(None)), Const(Ellipsis)), Name('z')), Const(1)), Call('return', Const(None)),)))
check('x, y[:, ...] = 1', Suite((Assign((Unpack((Name('x'), Call('[.]', Name('y'), Call('slice', Const(None), Const(None), Const(None)), Const(Ellipsis)))),), Const(1)), Call('return', Const(None)),)))
check('x, y[:, ...] = z = 1', Suite((Assign((Unpack((Name('x'), Call('[.]', Name('y'), Call('slice', Const(None), Const(None), Const(None)), Const(Ellipsis)))), Name('z')), Const(1)), Call('return', Const(None)),)))
check('x = y[:, ...], z = 1', Suite((Assign((Name('x'), Unpack((Call('[.]', Name('y'), Call('slice', Const(None), Const(None), Const(None)), Const(Ellipsis)), Name('z')))), Const(1)), Call('return', Const(None)),)))
check('x = y = z[:, ...] = 1', Suite((Assign((Name('x'), Name('y'), Call('[.]', Name('z'), Call('slice', Const(None), Const(None), Const(None)), Const(Ellipsis)),), Const(1)), Call('return', Const(None)),)))
check('x, y = z[:, ...] = 1', Suite((Assign((Unpack((Name('x'), Name('y'))), Call('[.]', Name('z'), Call('slice', Const(None), Const(None), Const(None)), Const(Ellipsis)),), Const(1)), Call('return', Const(None)),)))
check('x = y, z[:, ...] = 1', Suite((Assign((Name('x'), Unpack((Name('y'), Call('[.]', Name('z'), Call('slice', Const(None), Const(None), Const(None)), Const(Ellipsis)))),), Const(1)), Call('return', Const(None)),)))
| 1 |
a212442d91b2807e6353f21b0a68c4ee74ec8db9
|
Python
|
#
# PySNMP MIB module TPT-POLICY-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/TPT-POLICY-MIB
# Produced by pysmi-0.3.4 at Wed May 1 15:26:23 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, OctetString, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "Integer", "OctetString", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsUnion, SingleValueConstraint, ValueRangeConstraint, ConstraintsIntersection, ValueSizeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsUnion", "SingleValueConstraint", "ValueRangeConstraint", "ConstraintsIntersection", "ValueSizeConstraint")
Ipv6Address, = mibBuilder.importSymbols("IPV6-TC", "Ipv6Address")
SnmpAdminString, = mibBuilder.importSymbols("SNMP-FRAMEWORK-MIB", "SnmpAdminString")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
Bits, Counter64, Integer32, iso, Counter32, NotificationType, TimeTicks, Unsigned32, ModuleIdentity, Gauge32, IpAddress, MibIdentifier, MibScalar, MibTable, MibTableRow, MibTableColumn, ObjectIdentity = mibBuilder.importSymbols("SNMPv2-SMI", "Bits", "Counter64", "Integer32", "iso", "Counter32", "NotificationType", "TimeTicks", "Unsigned32", "ModuleIdentity", "Gauge32", "IpAddress", "MibIdentifier", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "ObjectIdentity")
DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention")
tpt_tpa_objs, tpt_tpa_unkparams, tpt_tpa_eventsV2 = mibBuilder.importSymbols("TPT-TPAMIBS-MIB", "tpt-tpa-objs", "tpt-tpa-unkparams", "tpt-tpa-eventsV2")
tpt_policy = ModuleIdentity((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1)).setLabel("tpt-policy")
tpt_policy.setRevisions(('2016-05-25 18:54', '2015-06-19 18:30', '2015-05-28 13:30', '2014-12-15 11:42',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: tpt_policy.setRevisionsDescriptions(('Updated copyright information. Minor MIB syntax fixes.', 'Added SSL inspection notification.', 'Added SSL inspected flag parameter to policy notifications.', 'Updated table sequence entries to be SMI compliant.',))
if mibBuilder.loadTexts: tpt_policy.setLastUpdated('201605251854Z')
if mibBuilder.loadTexts: tpt_policy.setOrganization('Trend Micro, Inc.')
if mibBuilder.loadTexts: tpt_policy.setContactInfo('www.trendmicro.com')
if mibBuilder.loadTexts: tpt_policy.setDescription("TPA policy counters. Copyright (C) 2016 Trend Micro Incorporated. All Rights Reserved. Trend Micro makes no warranty of any kind with regard to this material, including, but not limited to, the implied warranties of merchantability and fitness for a particular purpose. Trend Micro shall not be liable for errors contained herein or for incidental or consequential damages in connection with the furnishing, performance, or use of this material. This document contains proprietary information, which is protected by copyright. No part of this document may be photocopied, reproduced, or translated into another language without the prior written consent of Trend Micro. The information is provided 'as is' without warranty of any kind and is subject to change without notice. The only warranties for Trend Micro products and services are set forth in the express warranty statements accompanying such products and services. Nothing herein should be construed as constituting an additional warranty. Trend Micro shall not be liable for technical or editorial errors or omissions contained herein. TippingPoint(R), the TippingPoint logo, and Digital Vaccine(R) are registered trademarks of Trend Micro. All other company and product names may be trademarks of their respective holders. All rights reserved. This document contains confidential information, trade secrets or both, which are the property of Trend Micro. No part of this documentation may be reproduced in any form or by any means or used to make any derivative work (such as translation, transformation, or adaptation) without written permission from Trend Micro or one of its subsidiaries. All other company and product names may be trademarks of their respective holders. ")
policyPacketsDropped = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: policyPacketsDropped.setStatus('current')
if mibBuilder.loadTexts: policyPacketsDropped.setDescription('The total number of packets discarded due to network congestion.')
policyPacketsBlocked = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: policyPacketsBlocked.setStatus('current')
if mibBuilder.loadTexts: policyPacketsBlocked.setDescription('The cumulative number of packets blocked because of policy actions.')
policyPacketsIncoming = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: policyPacketsIncoming.setStatus('current')
if mibBuilder.loadTexts: policyPacketsIncoming.setDescription('The total number of incoming packets.')
policyPacketsOutgoing = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: policyPacketsOutgoing.setStatus('current')
if mibBuilder.loadTexts: policyPacketsOutgoing.setDescription('The total number of outgoing packets.')
policyPacketsInvalid = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: policyPacketsInvalid.setStatus('current')
if mibBuilder.loadTexts: policyPacketsInvalid.setDescription('The total number of packets discarded because they were invalid.')
policyPacketsPermitted = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: policyPacketsPermitted.setStatus('current')
if mibBuilder.loadTexts: policyPacketsPermitted.setDescription('The cumulative number of packets permitted because of policy actions.')
policyPacketsDropped64 = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 31), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: policyPacketsDropped64.setStatus('current')
if mibBuilder.loadTexts: policyPacketsDropped64.setDescription('The total number of packets discarded due to network congestion.')
policyPacketsBlocked64 = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 32), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: policyPacketsBlocked64.setStatus('current')
if mibBuilder.loadTexts: policyPacketsBlocked64.setDescription('The cumulative number of packets blocked because of policy actions.')
policyPacketsIncoming64 = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 33), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: policyPacketsIncoming64.setStatus('current')
if mibBuilder.loadTexts: policyPacketsIncoming64.setDescription('The total number of incoming packets.')
policyPacketsOutgoing64 = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 34), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: policyPacketsOutgoing64.setStatus('current')
if mibBuilder.loadTexts: policyPacketsOutgoing64.setDescription('The total number of outgoing packets.')
policyPacketsInvalid64 = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 36), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: policyPacketsInvalid64.setStatus('current')
if mibBuilder.loadTexts: policyPacketsInvalid64.setDescription('The total number of packets discarded because they were invalid.')
policyPacketsPermitted64 = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 37), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: policyPacketsPermitted64.setStatus('current')
if mibBuilder.loadTexts: policyPacketsPermitted64.setDescription('The total number of packets permitted because of policy actions.')
policyPacketsRateLimited64 = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 38), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: policyPacketsRateLimited64.setStatus('current')
if mibBuilder.loadTexts: policyPacketsRateLimited64.setDescription('The total number of packets discarded by rate limiting filters.')
policyPacketsTrusted64 = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 39), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: policyPacketsTrusted64.setStatus('current')
if mibBuilder.loadTexts: policyPacketsTrusted64.setDescription('The cumulative number of packets trusted because of policy actions.')
policyDVObjs = ObjectIdentity((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 10))
if mibBuilder.loadTexts: policyDVObjs.setStatus('current')
if mibBuilder.loadTexts: policyDVObjs.setDescription('Sub-tree of Digital Vaccine information.')
policyDVVersion = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 10, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 80))).setMaxAccess("readonly")
if mibBuilder.loadTexts: policyDVVersion.setStatus('current')
if mibBuilder.loadTexts: policyDVVersion.setDescription('The version number of the Digital Vaccine on this machine.')
policyCounterTable = MibTable((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 5), )
if mibBuilder.loadTexts: policyCounterTable.setStatus('obsolete')
if mibBuilder.loadTexts: policyCounterTable.setDescription('Table of per-policy counter values.')
policyCounterEntry = MibTableRow((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 5, 1), ).setIndexNames((0, "TPT-POLICY-MIB", "policyGlobalID"))
if mibBuilder.loadTexts: policyCounterEntry.setStatus('obsolete')
if mibBuilder.loadTexts: policyCounterEntry.setDescription('An entry in the policy counter table. Rows cannot be created or deleted. ')
policyGlobalID = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 5, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 40)))
if mibBuilder.loadTexts: policyGlobalID.setStatus('obsolete')
if mibBuilder.loadTexts: policyGlobalID.setDescription('The global identifier of a policy.')
policyDescriptiveName = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 5, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 80)))
if mibBuilder.loadTexts: policyDescriptiveName.setStatus('obsolete')
if mibBuilder.loadTexts: policyDescriptiveName.setDescription('The human-readable name of a policy.')
policyCountBytes = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 5, 1, 3), Counter64())
if mibBuilder.loadTexts: policyCountBytes.setStatus('obsolete')
if mibBuilder.loadTexts: policyCountBytes.setDescription('The total number of bytes affected by the given policy.')
policyCountPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 5, 1, 4), Counter64())
if mibBuilder.loadTexts: policyCountPackets.setStatus('obsolete')
if mibBuilder.loadTexts: policyCountPackets.setDescription('The total number of packets affected by the given policy.')
policyCreationTime = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 5, 1, 5), Unsigned32())
if mibBuilder.loadTexts: policyCreationTime.setStatus('obsolete')
if mibBuilder.loadTexts: policyCreationTime.setDescription('The time the policy was pushed to NetPAL, in seconds since the epoch.')
class PolicyProtocol(TextualConvention, Integer32):
description = 'A selection from a set of networking protocols detected by a policy.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8))
namedValues = NamedValues(("icmp", 1), ("udp", 2), ("tcp", 3), ("other-ip", 4), ("arp", 5), ("other-eth", 6), ("icmpv6", 7), ("other-ipv6", 8))
class PolicyFrameSize(TextualConvention, Integer32):
description = 'A selection from a set of layer-2 frame size categories.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11))
namedValues = NamedValues(("fs64B", 1), ("fs65to127B", 2), ("fs128to255B", 3), ("fs256to511B", 4), ("fs512to1023B", 5), ("fs1024toMaxB", 6), ("fsMaxto4095B", 7), ("fs4096to9216B", 8), ("fsUnder", 9), ("fsOver", 10), ("fs9217to16383", 11))
class PolicyFrameType(TextualConvention, Integer32):
description = 'A selection from a set of layer-2 frame types based on addressing and error status.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7))
namedValues = NamedValues(("unicast", 1), ("broadcast", 2), ("multicast", 3), ("macControl", 4), ("fcsError", 5), ("alignError", 6), ("symbolError", 7))
class PolicySeverity(TextualConvention, Integer32):
description = 'A selection from a set of severity levels used by policies. Used for both statistical reports and notifications.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))
namedValues = NamedValues(("warning", 1), ("minor", 2), ("major", 3), ("critical", 4))
topTenHitsByPolicyTable = MibTable((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 11), )
if mibBuilder.loadTexts: topTenHitsByPolicyTable.setStatus('current')
if mibBuilder.loadTexts: topTenHitsByPolicyTable.setDescription('Table of policies with the ten greatest hit counts.')
topTenHitsByPolicyEntry = MibTableRow((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 11, 1), ).setIndexNames((0, "TPT-POLICY-MIB", "topTenRank"))
if mibBuilder.loadTexts: topTenHitsByPolicyEntry.setStatus('current')
if mibBuilder.loadTexts: topTenHitsByPolicyEntry.setDescription('An entry in the top ten policies table. Rows cannot be created or deleted. ')
topTenRank = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 11, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 10))).setMaxAccess("readonly")
if mibBuilder.loadTexts: topTenRank.setStatus('current')
if mibBuilder.loadTexts: topTenRank.setDescription('The numerical ranking 1 through 10 of a policy.')
policyHitCount = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 11, 1, 2), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: policyHitCount.setStatus('current')
if mibBuilder.loadTexts: policyHitCount.setDescription('The count of alerts generated by a policy.')
policyName = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 11, 1, 3), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 80))).setMaxAccess("readonly")
if mibBuilder.loadTexts: policyName.setStatus('current')
if mibBuilder.loadTexts: policyName.setDescription('The human-readable name of a policy.')
policyUUID = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 11, 1, 4), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 40))).setMaxAccess("readonly")
if mibBuilder.loadTexts: policyUUID.setStatus('current')
if mibBuilder.loadTexts: policyUUID.setDescription('The global identifier of a policy.')
alertsBySeverityTable = MibTable((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 12), )
if mibBuilder.loadTexts: alertsBySeverityTable.setStatus('current')
if mibBuilder.loadTexts: alertsBySeverityTable.setDescription('Table of alert counts of all policies at each severity level.')
alertsBySeverityEntry = MibTableRow((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 12, 1), ).setIndexNames((0, "TPT-POLICY-MIB", "alertSeverity"))
if mibBuilder.loadTexts: alertsBySeverityEntry.setStatus('current')
if mibBuilder.loadTexts: alertsBySeverityEntry.setDescription('An entry in the alerts by severity table. Rows cannot be created or deleted. ')
alertSeverity = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 12, 1, 1), PolicySeverity()).setMaxAccess("readonly")
if mibBuilder.loadTexts: alertSeverity.setStatus('current')
if mibBuilder.loadTexts: alertSeverity.setDescription('The severity of a policy.')
severityAlertCount = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 12, 1, 2), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: severityAlertCount.setStatus('current')
if mibBuilder.loadTexts: severityAlertCount.setDescription('The count of alerts generated by all policies of a given severity.')
alertsByProtocolTable = MibTable((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 13), )
if mibBuilder.loadTexts: alertsByProtocolTable.setStatus('current')
if mibBuilder.loadTexts: alertsByProtocolTable.setDescription('Table of alert counts of all policies at each protocol.')
alertsByProtocolEntry = MibTableRow((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 13, 1), ).setIndexNames((0, "TPT-POLICY-MIB", "alertProtocol"))
if mibBuilder.loadTexts: alertsByProtocolEntry.setStatus('current')
if mibBuilder.loadTexts: alertsByProtocolEntry.setDescription('An entry in the alerts by protocol table. Rows cannot be created or deleted. ')
alertProtocol = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 13, 1, 1), PolicyProtocol()).setMaxAccess("readonly")
if mibBuilder.loadTexts: alertProtocol.setStatus('current')
if mibBuilder.loadTexts: alertProtocol.setDescription('The protocol of a policy.')
protocolAlertCount = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 13, 1, 2), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: protocolAlertCount.setStatus('current')
if mibBuilder.loadTexts: protocolAlertCount.setDescription('The count of alerts generated by all policies of a given protocol.')
alertsByZoneTable = MibTable((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 14), )
if mibBuilder.loadTexts: alertsByZoneTable.setStatus('obsolete')
if mibBuilder.loadTexts: alertsByZoneTable.setDescription('Table of alert counts of all policies for each zone.')
alertsByZoneEntry = MibTableRow((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 14, 1), ).setIndexNames((0, "TPT-POLICY-MIB", "alertSlot"), (0, "TPT-POLICY-MIB", "alertPort"))
if mibBuilder.loadTexts: alertsByZoneEntry.setStatus('obsolete')
if mibBuilder.loadTexts: alertsByZoneEntry.setDescription('An entry in the alerts by zone table. Rows cannot be created or deleted. ')
alertSlot = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 14, 1, 1), Unsigned32())
if mibBuilder.loadTexts: alertSlot.setStatus('obsolete')
if mibBuilder.loadTexts: alertSlot.setDescription('The slot portion identifying the zone affected by a policy.')
alertPort = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 14, 1, 2), Unsigned32())
if mibBuilder.loadTexts: alertPort.setStatus('obsolete')
if mibBuilder.loadTexts: alertPort.setDescription('The port portion identifying the zone affected by a policy.')
zoneAlertCount = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 14, 1, 3), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zoneAlertCount.setStatus('obsolete')
if mibBuilder.loadTexts: zoneAlertCount.setDescription('The count of alerts generated by all policies of a given zone.')
permitsByZoneTable = MibTable((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 15), )
if mibBuilder.loadTexts: permitsByZoneTable.setStatus('obsolete')
if mibBuilder.loadTexts: permitsByZoneTable.setDescription('Table of permit counts of all policies for each zone.')
permitsByZoneEntry = MibTableRow((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 15, 1), ).setIndexNames((0, "TPT-POLICY-MIB", "permitSlot"), (0, "TPT-POLICY-MIB", "permitPort"))
if mibBuilder.loadTexts: permitsByZoneEntry.setStatus('obsolete')
if mibBuilder.loadTexts: permitsByZoneEntry.setDescription('An entry in the permits by zone table. Rows cannot be created or deleted. ')
permitSlot = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 15, 1, 1), Unsigned32())
if mibBuilder.loadTexts: permitSlot.setStatus('obsolete')
if mibBuilder.loadTexts: permitSlot.setDescription('The slot portion identifying the zone affected by a policy.')
permitPort = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 15, 1, 2), Unsigned32())
if mibBuilder.loadTexts: permitPort.setStatus('obsolete')
if mibBuilder.loadTexts: permitPort.setDescription('The port portion identifying the zone affected by a policy.')
zonePermitCount = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 15, 1, 3), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zonePermitCount.setStatus('obsolete')
if mibBuilder.loadTexts: zonePermitCount.setDescription('The count of permits generated by all policies of a given zone.')
blocksByZoneTable = MibTable((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 16), )
if mibBuilder.loadTexts: blocksByZoneTable.setStatus('obsolete')
if mibBuilder.loadTexts: blocksByZoneTable.setDescription('Table of block counts of all policies for each zone.')
blocksByZoneEntry = MibTableRow((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 16, 1), ).setIndexNames((0, "TPT-POLICY-MIB", "blockSlot"), (0, "TPT-POLICY-MIB", "blockPort"))
if mibBuilder.loadTexts: blocksByZoneEntry.setStatus('obsolete')
if mibBuilder.loadTexts: blocksByZoneEntry.setDescription('An entry in the blocks by zone table. Rows cannot be created or deleted. ')
blockSlot = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 16, 1, 1), Unsigned32())
if mibBuilder.loadTexts: blockSlot.setStatus('obsolete')
if mibBuilder.loadTexts: blockSlot.setDescription('The slot portion identifying the zone affected by a policy.')
blockPort = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 16, 1, 2), Unsigned32())
if mibBuilder.loadTexts: blockPort.setStatus('obsolete')
if mibBuilder.loadTexts: blockPort.setDescription('The port portion identifying the zone affected by a policy.')
zoneBlockCount = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 16, 1, 3), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zoneBlockCount.setStatus('obsolete')
if mibBuilder.loadTexts: zoneBlockCount.setDescription('The count of blocks generated by all policies of a given zone.')
p2psByZoneTable = MibTable((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 17), )
if mibBuilder.loadTexts: p2psByZoneTable.setStatus('obsolete')
if mibBuilder.loadTexts: p2psByZoneTable.setDescription('Table of p2p counts of all policies for each zone.')
p2psByZoneEntry = MibTableRow((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 17, 1), ).setIndexNames((0, "TPT-POLICY-MIB", "p2pSlot"), (0, "TPT-POLICY-MIB", "p2pPort"))
if mibBuilder.loadTexts: p2psByZoneEntry.setStatus('obsolete')
if mibBuilder.loadTexts: p2psByZoneEntry.setDescription('An entry in the p2ps by zone table. Rows cannot be created or deleted. ')
p2pSlot = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 17, 1, 1), Unsigned32())
if mibBuilder.loadTexts: p2pSlot.setStatus('obsolete')
if mibBuilder.loadTexts: p2pSlot.setDescription('The slot portion identifying the zone affected by a policy.')
p2pPort = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 17, 1, 2), Unsigned32())
if mibBuilder.loadTexts: p2pPort.setStatus('obsolete')
if mibBuilder.loadTexts: p2pPort.setDescription('The port portion identifying the zone affected by a policy.')
zoneP2pCount = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 17, 1, 3), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: zoneP2pCount.setStatus('obsolete')
if mibBuilder.loadTexts: zoneP2pCount.setDescription('The count of p2ps generated by all policies of a given zone.')
framesBySizeTable = MibTable((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 18), )
if mibBuilder.loadTexts: framesBySizeTable.setStatus('current')
if mibBuilder.loadTexts: framesBySizeTable.setDescription('Table of frame counts received in each size category.')
framesBySizeEntry = MibTableRow((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 18, 1), ).setIndexNames((0, "TPT-POLICY-MIB", "frameSize"))
if mibBuilder.loadTexts: framesBySizeEntry.setStatus('current')
if mibBuilder.loadTexts: framesBySizeEntry.setDescription('An entry in the frames by size table. Rows cannot be created or deleted. ')
frameSize = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 18, 1, 1), PolicyFrameSize()).setMaxAccess("readonly")
if mibBuilder.loadTexts: frameSize.setStatus('current')
if mibBuilder.loadTexts: frameSize.setDescription('The size category of a frame.')
sizeFrameCount = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 18, 1, 2), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: sizeFrameCount.setStatus('current')
if mibBuilder.loadTexts: sizeFrameCount.setDescription('The count of frames received in a given size category.')
framesByTypeTable = MibTable((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 19), )
if mibBuilder.loadTexts: framesByTypeTable.setStatus('current')
if mibBuilder.loadTexts: framesByTypeTable.setDescription('Table of frame counts received in each type classification.')
framesByTypeEntry = MibTableRow((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 19, 1), ).setIndexNames((0, "TPT-POLICY-MIB", "frameType"))
if mibBuilder.loadTexts: framesByTypeEntry.setStatus('current')
if mibBuilder.loadTexts: framesByTypeEntry.setDescription('An entry in the frames by type table. Rows cannot be created or deleted. ')
frameType = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 19, 1, 1), PolicyFrameType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: frameType.setStatus('current')
if mibBuilder.loadTexts: frameType.setDescription('The type classification (e.g., unicast, broadcast, FCS error) of a frame.')
typeFrameCount = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 19, 1, 2), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: typeFrameCount.setStatus('current')
if mibBuilder.loadTexts: typeFrameCount.setDescription('The count of frames received in a given type classification.')
packetsByProtocolTable = MibTable((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 20), )
if mibBuilder.loadTexts: packetsByProtocolTable.setStatus('current')
if mibBuilder.loadTexts: packetsByProtocolTable.setDescription('Table of packet counts received for each protocol.')
packetsByProtocolEntry = MibTableRow((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 20, 1), ).setIndexNames((0, "TPT-POLICY-MIB", "packetProtocol"))
if mibBuilder.loadTexts: packetsByProtocolEntry.setStatus('current')
if mibBuilder.loadTexts: packetsByProtocolEntry.setDescription('An entry in the packets by protocol table. Rows cannot be created or deleted. ')
packetProtocol = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 20, 1, 1), PolicyProtocol()).setMaxAccess("readonly")
if mibBuilder.loadTexts: packetProtocol.setStatus('current')
if mibBuilder.loadTexts: packetProtocol.setDescription('The protocol of a policy.')
protocolPacketCount = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 20, 1, 2), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: protocolPacketCount.setStatus('current')
if mibBuilder.loadTexts: protocolPacketCount.setDescription('The count of packets received for a given protocol.')
portStatsTable = MibTable((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 23), )
if mibBuilder.loadTexts: portStatsTable.setStatus('current')
if mibBuilder.loadTexts: portStatsTable.setDescription('Table of statistics for each physical port.')
portStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 23, 1), ).setIndexNames((0, "TPT-POLICY-MIB", "portNumber"))
if mibBuilder.loadTexts: portStatsEntry.setStatus('current')
if mibBuilder.loadTexts: portStatsEntry.setDescription('An entry in the port statistics table. Rows cannot be created or deleted. ')
portNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 23, 1, 1), Unsigned32())
if mibBuilder.loadTexts: portNumber.setStatus('current')
if mibBuilder.loadTexts: portNumber.setDescription('The numeric index of a port.')
portName = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 23, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 8))).setMaxAccess("readonly")
if mibBuilder.loadTexts: portName.setStatus('current')
if mibBuilder.loadTexts: portName.setDescription('The name of a port.')
portVlanTranslations = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 23, 1, 3), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: portVlanTranslations.setStatus('current')
if mibBuilder.loadTexts: portVlanTranslations.setDescription('Number of packets leaving this egress port whose VLAN IDs were translated.')
policyByNumberTable = MibTable((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 21), )
if mibBuilder.loadTexts: policyByNumberTable.setStatus('current')
if mibBuilder.loadTexts: policyByNumberTable.setDescription('Table of packet counts received for each protocol.')
policyByNumberEntry = MibTableRow((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 21, 1), ).setIndexNames((0, "TPT-POLICY-MIB", "policyNumber"))
if mibBuilder.loadTexts: policyByNumberEntry.setStatus('current')
if mibBuilder.loadTexts: policyByNumberEntry.setDescription('An entry in the policy by number table. Rows cannot be created or deleted. ')
policyNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 21, 1, 1), Unsigned32())
if mibBuilder.loadTexts: policyNumber.setStatus('current')
if mibBuilder.loadTexts: policyNumber.setDescription('The number of a policy.')
numberName = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 21, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 120))).setMaxAccess("readonly")
if mibBuilder.loadTexts: numberName.setStatus('current')
if mibBuilder.loadTexts: numberName.setDescription('The name of a policy.')
numberDesc = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 21, 1, 3), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 3000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: numberDesc.setStatus('current')
if mibBuilder.loadTexts: numberDesc.setDescription('The description of a policy.')
securityZonePairTable = MibTable((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 22), )
if mibBuilder.loadTexts: securityZonePairTable.setStatus('current')
if mibBuilder.loadTexts: securityZonePairTable.setDescription('Table of information and statistics for each security zone pair.')
securityZonePairEntry = MibTableRow((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 22, 1), ).setIndexNames((0, "TPT-POLICY-MIB", "szpUUID"))
if mibBuilder.loadTexts: securityZonePairEntry.setStatus('current')
if mibBuilder.loadTexts: securityZonePairEntry.setDescription('An entry in the security zone pair table. Rows cannot be created or deleted. ')
szpName = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 22, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 128))).setMaxAccess("readonly")
if mibBuilder.loadTexts: szpName.setStatus('current')
if mibBuilder.loadTexts: szpName.setDescription('The name of a security zone pair.')
szpInZoneName = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 22, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 128))).setMaxAccess("readonly")
if mibBuilder.loadTexts: szpInZoneName.setStatus('current')
if mibBuilder.loadTexts: szpInZoneName.setDescription('The name of the input security zone of a security zone pair.')
szpOutZoneName = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 22, 1, 3), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 128))).setMaxAccess("readonly")
if mibBuilder.loadTexts: szpOutZoneName.setStatus('current')
if mibBuilder.loadTexts: szpOutZoneName.setDescription('The name of the output security zone of a security zone pair.')
szpUUID = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 22, 1, 4), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 40))).setMaxAccess("readonly")
if mibBuilder.loadTexts: szpUUID.setStatus('current')
if mibBuilder.loadTexts: szpUUID.setDescription('The UUID of a security zone pair.')
szpInZoneUUID = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 22, 1, 5), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 40))).setMaxAccess("readonly")
if mibBuilder.loadTexts: szpInZoneUUID.setStatus('current')
if mibBuilder.loadTexts: szpInZoneUUID.setDescription('The UUID of the input security zone of a security zone pair.')
szpOutZoneUUID = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 22, 1, 6), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 40))).setMaxAccess("readonly")
if mibBuilder.loadTexts: szpOutZoneUUID.setStatus('current')
if mibBuilder.loadTexts: szpOutZoneUUID.setDescription('The UUID of the output security zone of a security zone pair.')
szpInPackets = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 22, 1, 7), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: szpInPackets.setStatus('current')
if mibBuilder.loadTexts: szpInPackets.setDescription('The number of packets received on this security zone pair.')
szpInOctets = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 22, 1, 8), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: szpInOctets.setStatus('current')
if mibBuilder.loadTexts: szpInOctets.setDescription('The number of bytes received on this security zone pair.')
szpAlerts = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 22, 1, 9), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: szpAlerts.setStatus('current')
if mibBuilder.loadTexts: szpAlerts.setDescription('The number of alerts (blocks + permits) on this security zone pair.')
szpBlocks = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 22, 1, 10), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: szpBlocks.setStatus('current')
if mibBuilder.loadTexts: szpBlocks.setDescription('The number of blocks generated on this security zone pair.')
szpPermits = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 22, 1, 11), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: szpPermits.setStatus('current')
if mibBuilder.loadTexts: szpPermits.setDescription('The number of permits generated on this security zone pair.')
szpPrecedence = MibTableColumn((1, 3, 6, 1, 4, 1, 10734, 3, 3, 2, 1, 22, 1, 12), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: szpPrecedence.setStatus('current')
if mibBuilder.loadTexts: szpPrecedence.setDescription('The precedence of security zone pair.')
class PolicyAction(TextualConvention, Integer32):
description = 'A selection between three fundamental actions of a policy: blocking the offending packets, detecting them but allowing them through, or rate-limiting them.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3))
namedValues = NamedValues(("deny", 1), ("allow", 2), ("ratelimit", 3))
class PolicyComponent(TextualConvention, Integer32):
description = 'A selection from among the components of a policy, corresponding to which log file is used to track the associated information.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(0, 1, 2, 7, 8, 9))
namedValues = NamedValues(("invalid", 0), ("deny", 1), ("allow", 2), ("alert", 7), ("block", 8), ("peer", 9))
class SslInspectedFlag(TextualConvention, Integer32):
description = 'A flag indicating if an action was taken on an inspected SSL data stream.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2))
namedValues = NamedValues(("yes", 1), ("no", 2))
tptPolicyNotifyDeviceID = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 3, 1, 11), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 40))).setMaxAccess("readonly")
if mibBuilder.loadTexts: tptPolicyNotifyDeviceID.setStatus('current')
if mibBuilder.loadTexts: tptPolicyNotifyDeviceID.setDescription('The unique identifier of the device sending this notification.')
tptPolicyNotifyPolicyID = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 3, 1, 12), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 40))).setMaxAccess("readonly")
if mibBuilder.loadTexts: tptPolicyNotifyPolicyID.setStatus('current')
if mibBuilder.loadTexts: tptPolicyNotifyPolicyID.setDescription('The unique identifier of the policy causing this notification.')
tptPolicyNotifySignatureID = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 3, 1, 13), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 40))).setMaxAccess("readonly")
if mibBuilder.loadTexts: tptPolicyNotifySignatureID.setStatus('current')
if mibBuilder.loadTexts: tptPolicyNotifySignatureID.setDescription('The unique identifier of the signature matching the incoming data stream.')
tptPolicyNotifySegmentName = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 3, 1, 14), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 128))).setMaxAccess("readonly")
if mibBuilder.loadTexts: tptPolicyNotifySegmentName.setStatus('obsolete')
if mibBuilder.loadTexts: tptPolicyNotifySegmentName.setDescription('A string of the format <slot>:<index> that uniquely identifies the segment pertaining to this notification.')
tptPolicyNotifySrcNetAddr = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 3, 1, 15), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tptPolicyNotifySrcNetAddr.setStatus('current')
if mibBuilder.loadTexts: tptPolicyNotifySrcNetAddr.setDescription('The network address of the source of the packet(s) triggering the policy action.')
tptPolicyNotifySrcNetAddrV6 = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 3, 1, 128), Ipv6Address()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tptPolicyNotifySrcNetAddrV6.setStatus('current')
if mibBuilder.loadTexts: tptPolicyNotifySrcNetAddrV6.setDescription('The IPv6 network address of the source of the packet(s) triggering the policy action.')
tptPolicyNotifySrcNetPort = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 3, 1, 16), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tptPolicyNotifySrcNetPort.setStatus('current')
if mibBuilder.loadTexts: tptPolicyNotifySrcNetPort.setDescription('The network port (if applicable) of the source of the packet(s) triggering the policy action.')
tptPolicyNotifyDestNetAddr = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 3, 1, 17), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tptPolicyNotifyDestNetAddr.setStatus('current')
if mibBuilder.loadTexts: tptPolicyNotifyDestNetAddr.setDescription('The network address of the destination of the packet(s) triggering the policy action.')
tptPolicyNotifyDestNetAddrV6 = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 3, 1, 129), Ipv6Address()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tptPolicyNotifyDestNetAddrV6.setStatus('current')
if mibBuilder.loadTexts: tptPolicyNotifyDestNetAddrV6.setDescription('The IPv6 network address of the destination of the packet(s) triggering the policy action.')
tptPolicyNotifyDestNetPort = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 3, 1, 18), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tptPolicyNotifyDestNetPort.setStatus('current')
if mibBuilder.loadTexts: tptPolicyNotifyDestNetPort.setDescription('The network port (if applicable) of the destination of the packet(s) triggering the policy action.')
tptPolicyNotifyStartTimeSec = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 3, 1, 19), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tptPolicyNotifyStartTimeSec.setStatus('current')
if mibBuilder.loadTexts: tptPolicyNotifyStartTimeSec.setDescription('The time of the first policy hit, marking the start of the aggregation period for this notification (in seconds since January 1, 1970).')
tptPolicyNotifyAlertAction = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 3, 1, 20), PolicyAction()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tptPolicyNotifyAlertAction.setStatus('current')
if mibBuilder.loadTexts: tptPolicyNotifyAlertAction.setDescription('The action associated with this notification: whether the affected packets were actually blocked, allowed through, or rate-limited.')
tptPolicyNotifyConfigAction = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 3, 1, 21), PolicyAction()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tptPolicyNotifyConfigAction.setStatus('current')
if mibBuilder.loadTexts: tptPolicyNotifyConfigAction.setDescription('The action configured for the policy, which in some cases may differ from the action associated with this notification.')
tptPolicyNotifyComponentID = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 3, 1, 22), PolicyComponent()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tptPolicyNotifyComponentID.setStatus('current')
if mibBuilder.loadTexts: tptPolicyNotifyComponentID.setDescription('The component identifier of the policy causing this notification.')
tptPolicyNotifyHitCount = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 3, 1, 23), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tptPolicyNotifyHitCount.setStatus('current')
if mibBuilder.loadTexts: tptPolicyNotifyHitCount.setDescription('The number of policy hits occurring during the aggregation period for this notification.')
tptPolicyNotifyAggregationPeriod = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 3, 1, 24), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tptPolicyNotifyAggregationPeriod.setStatus('current')
if mibBuilder.loadTexts: tptPolicyNotifyAggregationPeriod.setDescription('The duration (in minutes) of the aggregation period for this notification.')
tptPolicyNotifySeverity = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 3, 1, 25), PolicySeverity()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tptPolicyNotifySeverity.setStatus('current')
if mibBuilder.loadTexts: tptPolicyNotifySeverity.setDescription('The severity of the attack for this notification.')
tptPolicyNotifyProtocol = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 3, 1, 26), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 20))).setMaxAccess("readonly")
if mibBuilder.loadTexts: tptPolicyNotifyProtocol.setStatus('current')
if mibBuilder.loadTexts: tptPolicyNotifyProtocol.setDescription('The network protocol of the packet(s) triggering the policy action.')
tptPolicyNotifyAlertTimeSec = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 3, 1, 27), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tptPolicyNotifyAlertTimeSec.setStatus('current')
if mibBuilder.loadTexts: tptPolicyNotifyAlertTimeSec.setDescription('The time this alert was initiated, marking the end of the aggregation period for this notification (in seconds since January 1, 1970).')
tptPolicyNotifyAlertTimeNano = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3,
| 0 |
a212442d91b2807e6353f21b0a68c4ee74ec8db9
|
Python
|
3, 3, 1, 28), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tptPolicyNotifyAlertTimeNano.setStatus('current')
if mibBuilder.loadTexts: tptPolicyNotifyAlertTimeNano.setDescription('The nanoseconds portion of tptPolicyNotifyAlertTimeSec.')
tptPolicyNotifyPacketTrace = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 3, 1, 29), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tptPolicyNotifyPacketTrace.setStatus('current')
if mibBuilder.loadTexts: tptPolicyNotifyPacketTrace.setDescription('The value 1 if a corresponding packet trace was logged; 0 if not.')
tptPolicyNotifySequence = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 3, 1, 30), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tptPolicyNotifySequence.setStatus('current')
if mibBuilder.loadTexts: tptPolicyNotifySequence.setDescription('The log file entry sequence number corresponding to this notification.')
tptPolicyNotifyTraceBucket = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 3, 1, 36), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tptPolicyNotifyTraceBucket.setStatus('current')
if mibBuilder.loadTexts: tptPolicyNotifyTraceBucket.setDescription('The bucket identifier for a packet trace.')
tptPolicyNotifyTraceBegin = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 3, 1, 37), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tptPolicyNotifyTraceBegin.setStatus('current')
if mibBuilder.loadTexts: tptPolicyNotifyTraceBegin.setDescription('The starting sequence number for a packet trace.')
tptPolicyNotifyTraceEnd = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 3, 1, 38), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tptPolicyNotifyTraceEnd.setStatus('current')
if mibBuilder.loadTexts: tptPolicyNotifyTraceEnd.setDescription('The ending sequence number for a packet trace.')
tptPolicyNotifyMessageParams = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 3, 1, 39), OctetString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tptPolicyNotifyMessageParams.setStatus('current')
if mibBuilder.loadTexts: tptPolicyNotifyMessageParams.setDescription('A string containing parameters (separated by vertical bars) matching the Message in the Digital Vaccine (the XML tag is Message).')
tptPolicyNotifyStartTimeNano = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 3, 1, 40), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tptPolicyNotifyStartTimeNano.setStatus('current')
if mibBuilder.loadTexts: tptPolicyNotifyStartTimeNano.setDescription('The nanoseconds portion of tptPolicyNotifyStartTimeSec.')
tptPolicyNotifyAlertType = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 3, 1, 41), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tptPolicyNotifyAlertType.setStatus('current')
if mibBuilder.loadTexts: tptPolicyNotifyAlertType.setDescription('A bit field defined as follows: 0x0001 = Alert 0x0002 = Block 0x0020 = Peer-to-peer 0x0040 = Invalid 0x0080 = Threshold 0x0100 = Management.')
tptPolicyNotifyInputMphy = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 3, 1, 57), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tptPolicyNotifyInputMphy.setStatus('current')
if mibBuilder.loadTexts: tptPolicyNotifyInputMphy.setDescription('The physical input port of the triggering packet(s).')
tptPolicyNotifyVlanTag = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 3, 1, 58), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tptPolicyNotifyVlanTag.setStatus('current')
if mibBuilder.loadTexts: tptPolicyNotifyVlanTag.setDescription('The VLAN tag of the triggering packet(s).')
tptPolicyNotifyZonePair = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 3, 1, 59), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 128))).setMaxAccess("readonly")
if mibBuilder.loadTexts: tptPolicyNotifyZonePair.setStatus('current')
if mibBuilder.loadTexts: tptPolicyNotifyZonePair.setDescription('A string of the format <in zone UUID>:<out zone UUID> that identifies the zone pair pertaining to this notification.')
tptPolicyNotifyActionSetID = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 3, 1, 130), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 40))).setMaxAccess("readonly")
if mibBuilder.loadTexts: tptPolicyNotifyActionSetID.setStatus('current')
if mibBuilder.loadTexts: tptPolicyNotifyActionSetID.setDescription('The action set uuid associated with this notification.')
tptPolicyNotifyRate = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 3, 1, 131), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tptPolicyNotifyRate.setStatus('current')
if mibBuilder.loadTexts: tptPolicyNotifyRate.setDescription('The rate-limit, in kbps, of the action set associated with this notification.')
tptPolicyNotifyFlowControl = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 3, 1, 137), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 40))).setMaxAccess("readonly")
if mibBuilder.loadTexts: tptPolicyNotifyFlowControl.setStatus('current')
if mibBuilder.loadTexts: tptPolicyNotifyFlowControl.setDescription('The action set flow control associated with this notification.')
tptPolicyNotifyActionSetName = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 3, 1, 138), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 128))).setMaxAccess("readonly")
if mibBuilder.loadTexts: tptPolicyNotifyActionSetName.setStatus('current')
if mibBuilder.loadTexts: tptPolicyNotifyActionSetName.setDescription('The action set name associated with this notification.')
tptPolicyNotifyClientip = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 3, 1, 139), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 128))).setMaxAccess("readonly")
if mibBuilder.loadTexts: tptPolicyNotifyClientip.setStatus('current')
if mibBuilder.loadTexts: tptPolicyNotifyClientip.setDescription('The client-ip associated with this notification.')
tptPolicyNotifyMetadata = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 3, 1, 140), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 128))).setMaxAccess("readonly")
if mibBuilder.loadTexts: tptPolicyNotifyMetadata.setStatus('current')
if mibBuilder.loadTexts: tptPolicyNotifyMetadata.setDescription('The metadata associated with this notification.')
tptPolicyNotifySslInspected = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 3, 1, 180), SslInspectedFlag()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tptPolicyNotifySslInspected.setStatus('current')
if mibBuilder.loadTexts: tptPolicyNotifySslInspected.setDescription("A flag indicting if the notification is associated with an inspected SSL data stream. This flag is only present on IPS and Quarantine events and doesn't apply to Reputation.")
tptPolicyNotifyVirtualSegment = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 3, 1, 182), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 127))).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: tptPolicyNotifyVirtualSegment.setStatus('current')
if mibBuilder.loadTexts: tptPolicyNotifyVirtualSegment.setDescription('Virtual segment associated with this notification. ')
tptPolicyNotify = NotificationType((1, 3, 6, 1, 4, 1, 10734, 3, 3, 3, 0, 8)).setObjects(("TPT-POLICY-MIB", "tptPolicyNotifyDeviceID"), ("TPT-POLICY-MIB", "tptPolicyNotifyPolicyID"), ("TPT-POLICY-MIB", "tptPolicyNotifySignatureID"), ("TPT-POLICY-MIB", "tptPolicyNotifyZonePair"), ("TPT-POLICY-MIB", "tptPolicyNotifyInputMphy"), ("TPT-POLICY-MIB", "tptPolicyNotifyVlanTag"), ("TPT-POLICY-MIB", "tptPolicyNotifySrcNetAddr"), ("TPT-POLICY-MIB", "tptPolicyNotifySrcNetPort"), ("TPT-POLICY-MIB", "tptPolicyNotifyDestNetAddr"), ("TPT-POLICY-MIB", "tptPolicyNotifyDestNetPort"), ("TPT-POLICY-MIB", "tptPolicyNotifyProtocol"), ("TPT-POLICY-MIB", "tptPolicyNotifyMessageParams"), ("TPT-POLICY-MIB", "tptPolicyNotifyHitCount"), ("TPT-POLICY-MIB", "tptPolicyNotifyAggregationPeriod"), ("TPT-POLICY-MIB", "tptPolicyNotifyStartTimeSec"), ("TPT-POLICY-MIB", "tptPolicyNotifyStartTimeNano"), ("TPT-POLICY-MIB", "tptPolicyNotifyAlertTimeSec"), ("TPT-POLICY-MIB", "tptPolicyNotifyAlertTimeNano"), ("TPT-POLICY-MIB", "tptPolicyNotifyPacketTrace"), ("TPT-POLICY-MIB", "tptPolicyNotifyTraceBucket"), ("TPT-POLICY-MIB", "tptPolicyNotifyTraceBegin"), ("TPT-POLICY-MIB", "tptPolicyNotifyTraceEnd"), ("TPT-POLICY-MIB", "tptPolicyNotifyAlertAction"), ("TPT-POLICY-MIB", "tptPolicyNotifyConfigAction"), ("TPT-POLICY-MIB", "tptPolicyNotifyComponentID"), ("TPT-POLICY-MIB", "tptPolicyNotifyAlertType"), ("TPT-POLICY-MIB", "tptPolicyNotifySeverity"), ("TPT-POLICY-MIB", "tptPolicyNotifySequence"), ("TPT-POLICY-MIB", "tptPolicyNotifySrcNetAddrV6"), ("TPT-POLICY-MIB", "tptPolicyNotifyDestNetAddrV6"), ("TPT-POLICY-MIB", "tptPolicyNotifyActionSetID"), ("TPT-POLICY-MIB", "tptPolicyNotifyRate"), ("TPT-POLICY-MIB", "tptPolicyNotifyFlowControl"), ("TPT-POLICY-MIB", "tptPolicyNotifyActionSetName"), ("TPT-POLICY-MIB", "tptPolicyNotifyClientip"), ("TPT-POLICY-MIB", "tptPolicyNotifyMetadata"), ("TPT-POLICY-MIB", "tptPolicyNotifySslInspected"))
if mibBuilder.loadTexts: tptPolicyNotify.setStatus('current')
if mibBuilder.loadTexts: tptPolicyNotify.setDescription('Notification: Used to inform the management station of a policy alert action (either deny or allow) resulting from a signature match.')
class SslProtocol(TextualConvention, Integer32):
description = 'The SSL protocol version. '
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))
namedValues = NamedValues(("unknown", 1), ("sslv3", 2), ("tls10", 3), ("tls11", 4), ("tls12", 5))
class SslInspEventType(TextualConvention, Integer32):
description = 'The type of SSL connection, either inbound or outbound. '
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2))
namedValues = NamedValues(("inbound", 1), ("outbound", 2))
class SslInspAction(TextualConvention, Integer32):
description = 'The action taken on an SSL connection. '
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3))
namedValues = NamedValues(("decrypted", 1), ("notDecrypted", 2), ("blocked", 3))
tptPolicyNotifySslInspEventType = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 3, 1, 190), SslInspEventType()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: tptPolicyNotifySslInspEventType.setStatus('current')
if mibBuilder.loadTexts: tptPolicyNotifySslInspEventType.setDescription('The SSL connection type. ')
tptPolicyNotifySslInspAction = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 3, 1, 191), SslInspAction()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: tptPolicyNotifySslInspAction.setStatus('current')
if mibBuilder.loadTexts: tptPolicyNotifySslInspAction.setDescription('The SSL connection action taken. ')
tptPolicyNotifySslInspDetails = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 3, 1, 192), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 255))).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: tptPolicyNotifySslInspDetails.setStatus('current')
if mibBuilder.loadTexts: tptPolicyNotifySslInspDetails.setDescription('Free-form field that provides additional details for the action taken on a SSL connection. ')
tptPolicyNotifySslInspPolicy = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 3, 1, 193), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 127))).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: tptPolicyNotifySslInspPolicy.setStatus('current')
if mibBuilder.loadTexts: tptPolicyNotifySslInspPolicy.setDescription('The SSL inspection policy. ')
tptPolicyNotifySslInspCert = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 3, 1, 194), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 127))).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: tptPolicyNotifySslInspCert.setStatus('current')
if mibBuilder.loadTexts: tptPolicyNotifySslInspCert.setDescription('The certificate used to decrypt SSL traffic. ')
tptPolicyNotifySslInspCltIF = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 3, 1, 195), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 40))).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: tptPolicyNotifySslInspCltIF.setStatus('current')
if mibBuilder.loadTexts: tptPolicyNotifySslInspCltIF.setDescription('The client-side interface receiving SSL traffic. ')
tptPolicyNotifySslInspCltSslVer = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 3, 1, 196), SslProtocol()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: tptPolicyNotifySslInspCltSslVer.setStatus('current')
if mibBuilder.loadTexts: tptPolicyNotifySslInspCltSslVer.setDescription('The client-side SSL protocol version. ')
tptPolicyNotifySslInspCltCrypto = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 3, 1, 197), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 80))).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: tptPolicyNotifySslInspCltCrypto.setStatus('current')
if mibBuilder.loadTexts: tptPolicyNotifySslInspCltCrypto.setDescription('The client-side SSL crypto-suite. ')
tptPolicyNotifySslInspSrvIF = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 3, 1, 198), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 40))).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: tptPolicyNotifySslInspSrvIF.setStatus('current')
if mibBuilder.loadTexts: tptPolicyNotifySslInspSrvIF.setDescription('The server-side interface sending SSL traffic. ')
tptPolicyNotifySslInspSrvSslVer = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 3, 1, 199), SslProtocol()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: tptPolicyNotifySslInspSrvSslVer.setStatus('current')
if mibBuilder.loadTexts: tptPolicyNotifySslInspSrvSslVer.setDescription('The server-side SSL protocol version. ')
tptPolicyNotifySslInspSrvCrypto = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 3, 1, 200), SnmpAdminString().subtype(subtypeSpec=ValueSizeConstraint(0, 80))).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: tptPolicyNotifySslInspSrvCrypto.setStatus('current')
if mibBuilder.loadTexts: tptPolicyNotifySslInspSrvCrypto.setDescription('The server-side SSL crypto-suite. ')
tptPolicySslInspNotify = NotificationType((1, 3, 6, 1, 4, 1, 10734, 3, 3, 3, 0, 27)).setObjects(("TPT-POLICY-MIB", "tptPolicyNotifyDeviceID"), ("TPT-POLICY-MIB", "tptPolicyNotifyAlertTimeSec"), ("TPT-POLICY-MIB", "tptPolicyNotifyAlertTimeNano"), ("TPT-POLICY-MIB", "tptPolicyNotifySslInspEventType"), ("TPT-POLICY-MIB", "tptPolicyNotifySeverity"), ("TPT-POLICY-MIB", "tptPolicyNotifySslInspAction"), ("TPT-POLICY-MIB", "tptPolicyNotifySslInspDetails"), ("TPT-POLICY-MIB", "tptPolicyNotifyVirtualSegment"), ("TPT-POLICY-MIB", "tptPolicyNotifySslInspPolicy"), ("TPT-POLICY-MIB", "tptPolicyNotifySslInspCert"), ("TPT-POLICY-MIB", "tptPolicyNotifySslInspCltIF"), ("TPT-POLICY-MIB", "tptPolicyNotifySslInspCltSslVer"), ("TPT-POLICY-MIB", "tptPolicyNotifySslInspCltCrypto"), ("TPT-POLICY-MIB", "tptPolicyNotifySslInspSrvIF"), ("TPT-POLICY-MIB", "tptPolicyNotifySslInspSrvSslVer"), ("TPT-POLICY-MIB", "tptPolicyNotifySslInspSrvCrypto"), ("TPT-POLICY-MIB", "tptPolicyNotifySrcNetAddr"), ("TPT-POLICY-MIB", "tptPolicyNotifySrcNetPort"), ("TPT-POLICY-MIB", "tptPolicyNotifyDestNetAddr"), ("TPT-POLICY-MIB", "tptPolicyNotifyDestNetPort"))
if mibBuilder.loadTexts: tptPolicySslInspNotify.setStatus('current')
if mibBuilder.loadTexts: tptPolicySslInspNotify.setDescription('A notification sent when an action is taken on a SSL connection. ')
tptPolicyLogNotifyDeviceID = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 3, 1, 121), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 40))).setMaxAccess("readonly")
if mibBuilder.loadTexts: tptPolicyLogNotifyDeviceID.setStatus('current')
if mibBuilder.loadTexts: tptPolicyLogNotifyDeviceID.setDescription('The unique identifier of the device sending this notification.')
tptPolicyLogNotifyComponentID = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 3, 1, 122), PolicyComponent()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tptPolicyLogNotifyComponentID.setStatus('current')
if mibBuilder.loadTexts: tptPolicyLogNotifyComponentID.setDescription('The type of log (alert, block, or peer) pertaining to this notification.')
tptPolicyLogNotifyNumber = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 3, 1, 123), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tptPolicyLogNotifyNumber.setStatus('current')
if mibBuilder.loadTexts: tptPolicyLogNotifyNumber.setDescription('The number of policy log entries since the last SMS log file retrieval.')
tptPolicyLogNotifyTrigger = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 3, 1, 124), Unsigned32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tptPolicyLogNotifyTrigger.setStatus('current')
if mibBuilder.loadTexts: tptPolicyLogNotifyTrigger.setDescription('The number of policy log entries needed to trigger this notification.')
tptPolicyLogNotifySequence = MibScalar((1, 3, 6, 1, 4, 1, 10734, 3, 3, 3, 1, 125), Counter64()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tptPolicyLogNotifySequence.setStatus('current')
if mibBuilder.loadTexts: tptPolicyLogNotifySequence.setDescription('The current log file entry sequence number.')
tptPolicyLogNotify = NotificationType((1, 3, 6, 1, 4, 1, 10734, 3, 3, 3, 0, 19)).setObjects(("TPT-POLICY-MIB", "tptPolicyLogNotifyDeviceID"), ("TPT-POLICY-MIB", "tptPolicyLogNotifyComponentID"), ("TPT-POLICY-MIB", "tptPolicyLogNotifyNumber"), ("TPT-POLICY-MIB", "tptPolicyLogNotifyTrigger"), ("TPT-POLICY-MIB", "tptPolicyLogNotifySequence"))
if mibBuilder.loadTexts: tptPolicyLogNotify.setStatus('current')
if mibBuilder.loadTexts: tptPolicyLogNotify.setDescription('Notification: Used to inform the management station that some number of policy log entries of a particular type (alert, block, or peer) occurred since the last time the management station retrieved the corresponding log file.')
mibBuilder.exportSymbols("TPT-POLICY-MIB", tptPolicyNotifySslInspSrvSslVer=tptPolicyNotifySslInspSrvSslVer, tptPolicyLogNotifyComponentID=tptPolicyLogNotifyComponentID, tptPolicyNotifySrcNetPort=tptPolicyNotifySrcNetPort, policyCountBytes=policyCountBytes, alertSlot=alertSlot, framesBySizeEntry=framesBySizeEntry, tptPolicyNotify=tptPolicyNotify, tptPolicySslInspNotify=tptPolicySslInspNotify, alertsBySeverityTable=alertsBySeverityTable, blockSlot=blockSlot, szpInPackets=szpInPackets, alertsByProtocolTable=alertsByProtocolTable, tptPolicyNotifyClientip=tptPolicyNotifyClientip, szpPrecedence=szpPrecedence, tptPolicyNotifyActionSetID=tptPolicyNotifyActionSetID, policyCounterEntry=policyCounterEntry, tptPolicyNotifyMetadata=tptPolicyNotifyMetadata, policyPacketsDropped=policyPacketsDropped, tptPolicyNotifyInputMphy=tptPolicyNotifyInputMphy, tptPolicyNotifyDestNetAddr=tptPolicyNotifyDestNetAddr, policyHitCount=policyHitCount, tptPolicyNotifySslInspCltSslVer=tptPolicyNotifySslInspCltSslVer, policyPacketsIncoming=policyPacketsIncoming, policyDVVersion=policyDVVersion, tptPolicyNotifyTraceEnd=tptPolicyNotifyTraceEnd, policyGlobalID=policyGlobalID, policyPacketsBlocked64=policyPacketsBlocked64, packetsByProtocolTable=packetsByProtocolTable, szpUUID=szpUUID, permitPort=permitPort, portNumber=portNumber, framesByTypeEntry=framesByTypeEntry, policyDVObjs=policyDVObjs, topTenRank=topTenRank, sizeFrameCount=sizeFrameCount, p2psByZoneTable=p2psByZoneTable, permitsByZoneTable=permitsByZoneTable, tptPolicyNotifySrcNetAddr=tptPolicyNotifySrcNetAddr, portStatsTable=portStatsTable, tptPolicyNotifySslInspCltCrypto=tptPolicyNotifySslInspCltCrypto, alertsBySeverityEntry=alertsBySeverityEntry, policyPacketsBlocked=policyPacketsBlocked, policyName=policyName, p2psByZoneEntry=p2psByZoneEntry, tptPolicyNotifyProtocol=tptPolicyNotifyProtocol, tptPolicyNotifySslInspCltIF=tptPolicyNotifySslInspCltIF, alertsByZoneTable=alertsByZoneTable, szpBlocks=szpBlocks, tptPolicyNotifyRate=tptPolicyNotifyRate, PolicySeverity=PolicySeverity, tptPolicyNotifyStartTimeNano=tptPolicyNotifyStartTimeNano, topTenHitsByPolicyEntry=topTenHitsByPolicyEntry, policyNumber=policyNumber, PolicyFrameType=PolicyFrameType, tptPolicyNotifyAlertTimeNano=tptPolicyNotifyAlertTimeNano, szpOutZoneName=szpOutZoneName, policyByNumberTable=policyByNumberTable, tptPolicyLogNotifyDeviceID=tptPolicyLogNotifyDeviceID, tptPolicyNotifySslInspSrvCrypto=tptPolicyNotifySslInspSrvCrypto, szpOutZoneUUID=szpOutZoneUUID, szpInOctets=szpInOctets, PolicyComponent=PolicyComponent, packetsByProtocolEntry=packetsByProtocolEntry, policyPacketsOutgoing=policyPacketsOutgoing, p2pPort=p2pPort, framesBySizeTable=framesBySizeTable, tptPolicyNotifyMessageParams=tptPolicyNotifyMessageParams, tptPolicyNotifySslInspPolicy=tptPolicyNotifySslInspPolicy, tptPolicyNotifyZonePair=tptPolicyNotifyZonePair, tptPolicyNotifySslInspSrvIF=tptPolicyNotifySslInspSrvIF, policyPacketsInvalid=policyPacketsInvalid, tptPolicyNotifyFlowControl=tptPolicyNotifyFlowControl, tptPolicyNotifyTraceBegin=tptPolicyNotifyTraceBegin, policyByNumberEntry=policyByNumberEntry, PolicyFrameSize=PolicyFrameSize, SslInspectedFlag=SslInspectedFlag, topTenHitsByPolicyTable=topTenHitsByPolicyTable, blocksByZoneEntry=blocksByZoneEntry, SslInspEventType=SslInspEventType, zoneP2pCount=zoneP2pCount, policyPacketsInvalid64=policyPacketsInvalid64, tptPolicyNotifyConfigAction=tptPolicyNotifyConfigAction, zoneBlockCount=zoneBlockCount, permitsByZoneEntry=permitsByZoneEntry, tptPolicyNotifyDeviceID=tptPolicyNotifyDeviceID, tptPolicyNotifyVirtualSegment=tptPolicyNotifyVirtualSegment, tptPolicyNotifySslInspected=tptPolicyNotifySslInspected, policyPacketsTrusted64=policyPacketsTrusted64, policyPacketsPermitted64=policyPacketsPermitted64, policyPacketsOutgoing64=policyPacketsOutgoing64, tptPolicyNotifyPacketTrace=tptPolicyNotifyPacketTrace, tptPolicyNotifySignatureID=tptPolicyNotifySignatureID, policyPacketsPermitted=policyPacketsPermitted, alertsByZoneEntry=alertsByZoneEntry, blocksByZoneTable=blocksByZoneTable, protocolAlertCount=protocolAlertCount, permitSlot=permitSlot, frameSize=frameSize, packetProtocol=packetProtocol, portName=portName, PolicyProtocol=PolicyProtocol, securityZonePairTable=securityZonePairTable, szpInZoneName=szpInZoneName, PolicyAction=PolicyAction, policyPacketsRateLimited64=policyPacketsRateLimited64, tptPolicyNotifyComponentID=tptPolicyNotifyComponentID, tptPolicyNotifySslInspDetails=tptPolicyNotifySslInspDetails, tptPolicyNotifyAlertTimeSec=tptPolicyNotifyAlertTimeSec, numberName=numberName, tptPolicyLogNotifySequence=tptPolicyLogNotifySequence, policyCreationTime=policyCreationTime, tptPolicyNotifySslInspEventType=tptPolicyNotifySslInspEventType, tptPolicyLogNotifyNumber=tptPolicyLogNotifyNumber, framesByTypeTable=framesByTypeTable, tptPolicyNotifyAlertAction=tptPolicyNotifyAlertAction, tptPolicyNotifySegmentName=tptPolicyNotifySegmentName, policyDescriptiveName=policyDescriptiveName, tptPolicyNotifyTraceBucket=tptPolicyNotifyTraceBucket, alertPort=alertPort, policyCountPackets=policyCountPackets, zoneAlertCount=zoneAlertCount, tptPolicyNotifyDestNetAddrV6=tptPolicyNotifyDestNetAddrV6, SslInspAction=SslInspAction, tpt_policy=tpt_policy, alertsByProtocolEntry=alertsByProtocolEntry, policyCounterTable=policyCounterTable, szpInZoneUUID=szpInZoneUUID, policyPacketsDropped64=policyPacketsDropped64, tptPolicyLogNotify=tptPolicyLogNotify, tptPolicyNotifySeverity=tptPolicyNotifySeverity, tptPolicyLogNotifyTrigger=tptPolicyLogNotifyTrigger, policyPacketsIncoming64=policyPacketsIncoming64, PYSNMP_MODULE_ID=tpt_policy, SslProtocol=SslProtocol, typeFrameCount=typeFrameCount, alertSeverity=alertSeverity, tptPolicyNotifyAggregationPeriod=tptPolicyNotifyAggregationPeriod, szpAlerts=szpAlerts, severityAlertCount=severityAlertCount, portVlanTranslations=portVlanTranslations, tptPolicyNotifyVlanTag=tptPolicyNotifyVlanTag, tptPolicyNotifyHitCount=tptPolicyNotifyHitCount, zonePermitCount=zonePermitCount, tptPolicyNotifyAlertType=tptPolicyNotifyAlertType, szpPermits=szpPermits, tptPolicyNotifyActionSetName=tptPolicyNotifyActionSetName, tptPolicyNotifySslInspCert=tptPolicyNotifySslInspCert, securityZonePairEntry=securityZonePairEntry, tptPolicyNotifyStartTimeSec=tptPolicyNotifyStartTimeSec, tptPolicyNotifyPolicyID=tptPolicyNotifyPolicyID, szpName=szpName, tptPolicyNotifyDestNetPort=tptPolicyNotifyDestNetPort, protocolPacketCount=protocolPacketCount, numberDesc=numberDesc, tptPolicyNotifySrcNetAddrV6=tptPolicyNotifySrcNetAddrV6, tptPolicyNotifySequence=tptPolicyNotifySequence, policyUUID=policyUUID, alertProtocol=alertProtocol, frameType=frameType, p2pSlot=p2pSlot, blockPort=blockPort, portStatsEntry=portStatsEntry, tptPolicyNotifySslInspAction=tptPolicyNotifySslInspAction)
| 1 |
07488abfd841f0c5c54a32e5229e929f7569cca6
|
Python
|
"""
Module for the DomainMatrix class.
A DomainMatrix represents a matrix with elements that are in a particular
Domain. Each DomainMatrix internally wraps a DDM which is used for the
lower-level operations. The idea is that the DomainMatrix class provides the
convenience routines for converting between Expr and the poly domains as well
as unifying matrices with different domains.
"""
from collections import Counter
from functools import reduce
from typing import Union as tUnion, Tuple as tTuple
from sympy.utilities.decorator import doctest_depends_on
from sympy.core.sympify import _sympify
from ..domains import Domain
from ..constructor import construct_domain
from .exceptions import (
DMFormatError,
DMBadInputError,
DMShapeError,
DMDomainError,
DMNotAField,
DMNonSquareMatrixError,
DMNonInvertibleMatrixError
)
from .domainscalar import DomainScalar
from sympy.polys.domains import ZZ, EXRAW, QQ
from sympy.polys.densearith import dup_mul
from sympy.polys.densebasic import dup_convert
from sympy.polys.densetools import (
dup_mul_ground,
dup_quo_ground,
dup_content,
dup_clear_denoms,
dup_primitive,
dup_transform,
)
from sympy.polys.factortools import dup_factor_list
from sympy.polys.polyutils import _sort_factors
from .ddm import DDM
from .sdm import SDM
from .dfm import DFM
from .rref import _dm_rref, _dm_rref_den
def DM(rows, domain):
"""Convenient alias for DomainMatrix.from_list
Examples
========
>>> from sympy import ZZ
>>> from sympy.polys.matrices import DM
>>> DM([[1, 2], [3, 4]], ZZ)
DomainMatrix([[1, 2], [3, 4]], (2, 2), ZZ)
See Also
========
DomainMatrix.from_list
"""
return DomainMatrix.from_list(rows, domain)
class DomainMatrix:
r"""
Associate Matrix with :py:class:`~.Domain`
Explanation
===========
DomainMatrix uses :py:class:`~.Domain` for its internal representation
which makes it faster than the SymPy Matrix class (currently) for many
common operations, but this advantage makes it not entirely compatible
with Matrix. DomainMatrix are analogous to numpy arrays with "dtype".
In the DomainMatrix, each element has a domain such as :ref:`ZZ`
or :ref:`QQ(a)`.
Examples
========
Creating a DomainMatrix from the existing Matrix class:
>>> from sympy import Matrix
>>> from sympy.polys.matrices import DomainMatrix
>>> Matrix1 = Matrix([
... [1, 2],
... [3, 4]])
>>> A = DomainMatrix.from_Matrix(Matrix1)
>>> A
DomainMatrix({0: {0: 1, 1: 2}, 1: {0: 3, 1: 4}}, (2, 2), ZZ)
Directly forming a DomainMatrix:
>>> from sympy import ZZ
>>> from sympy.polys.matrices import DomainMatrix
>>> A = DomainMatrix([
... [ZZ(1), ZZ(2)],
... [ZZ(3), ZZ(4)]], (2, 2), ZZ)
>>> A
DomainMatrix([[1, 2], [3, 4]], (2, 2), ZZ)
See Also
========
DDM
SDM
Domain
Poly
"""
rep: tUnion[SDM, DDM, DFM]
shape: tTuple[int, int]
domain: Domain
def __new__(cls, rows, shape, domain, *, fmt=None):
"""
Creates a :py:class:`~.DomainMatrix`.
Parameters
==========
rows : Represents elements of DomainMatrix as list of lists
shape : Represents dimension of DomainMatrix
domain : Represents :py:class:`~.Domain` of DomainMatrix
Raises
======
TypeError
If any of rows, shape and domain are not provided
"""
if isinstance(rows, (DDM, SDM, DFM)):
raise TypeError("Use from_rep to initialise from SDM/DDM")
elif isinstance(rows, list):
rep = DDM(rows, shape, domain)
elif isinstance(rows, dict):
rep = SDM(rows, shape, domain)
else:
msg = "Input should be list-of-lists or dict-of-dicts"
raise TypeError(msg)
if fmt is not None:
if fmt == 'sparse':
rep = rep.to_sdm()
elif fmt == 'dense':
rep = rep.to_ddm()
else:
raise ValueError("fmt should be 'sparse' or 'dense'")
# Use python-flint for dense matrices if possible
if rep.fmt == 'dense' and DFM._supports_domain(domain):
rep = rep.to_dfm()
return cls.from_rep(rep)
def __reduce__(self):
rep = self.rep
if rep.fmt == 'dense':
arg = self.to_list()
elif rep.fmt == 'sparse':
arg = dict(rep)
else:
raise RuntimeError # pragma: no cover
args = (arg, rep.shape, rep.domain)
return (self.__class__, args)
def __getitem__(self, key):
i, j = key
m, n = self.shape
if not (isinstance(i, slice) or isinstance(j, slice)):
return DomainScalar(self.rep.getitem(i, j), self.domain)
if not isinstance(i, slice):
if not -m <= i < m:
raise IndexError("Row index out of range")
i = i % m
i = slice(i, i+1)
if not isinstance(j, slice):
if not -n <= j < n:
raise IndexError("Column index out of range")
j = j % n
j = slice(j, j+1)
return self.from_rep(self.rep.extract_slice(i, j))
def getitem_sympy(self, i, j):
return self.domain.to_sympy(self.rep.getitem(i, j))
def extract(self, rowslist, colslist):
return self.from_rep(self.rep.extract(rowslist, colslist))
def __setitem__(self, key, value):
i, j = key
if not self.domain.of_type(value):
raise TypeError
if isinstance(i, int) and isinstance(j, int):
self.rep.setitem(i, j, value)
else:
raise NotImplementedError
@classmethod
def from_rep(cls, rep):
"""Create a new DomainMatrix efficiently from DDM/SDM.
Examples
========
Create a :py:class:`~.DomainMatrix` with an dense internal
representation as :py:class:`~.DDM`:
>>> from sympy.polys.domains import ZZ
>>> from sympy.polys.matrices import DomainMatrix
>>> from sympy.polys.matrices.ddm import DDM
>>> drep = DDM([[ZZ(1), ZZ(2)], [ZZ(3), ZZ(4)]], (2, 2), ZZ)
>>> dM = DomainMatrix.from_rep(drep)
>>> dM
DomainMatrix([[1, 2], [3, 4]], (2, 2), ZZ)
Create a :py:class:`~.DomainMatrix` with a sparse internal
representation as :py:class:`~.SDM`:
>>> from sympy.polys.matrices import DomainMatrix
>>> from sympy.polys.matrices.sdm import SDM
>>> from sympy import ZZ
>>> drep = SDM({0:{1:ZZ(1)},1:{0:ZZ(2)}}, (2, 2), ZZ)
>>> dM = DomainMatrix.from_rep(drep)
>>> dM
DomainMatrix({0: {1: 1}, 1: {0: 2}}, (2, 2), ZZ)
Parameters
==========
rep: SDM or DDM
The internal sparse or dense representation of the matrix.
Returns
=======
DomainMatrix
A :py:class:`~.DomainMatrix` wrapping *rep*.
Notes
=====
This takes ownership of rep as its internal representation. If rep is
being mutated elsewhere then a copy should be provided to
``from_rep``. Only minimal verification or checking is done on *rep*
as this is supposed to be an efficient internal routine.
"""
if not (isinstance(rep, (DDM, SDM)) or (DFM is not None and isinstance(rep, DFM))):
raise TypeError("rep should be of type DDM or SDM")
self = super().__new__(cls)
self.rep = rep
self.shape = rep.shape
self.domain = rep.domain
return self
@classmethod
def from_list(cls, rows, domain):
r"""
Convert a list of lists into a DomainMatrix
Parameters
==========
rows: list of lists
Each element of the inner lists should be either the single arg,
or tuple of args, that would be passed to the domain constructor
in order to form an element of the domain. See examples.
Returns
=======
DomainMatrix containing elements defined in rows
Examples
========
>>> from sympy.polys.matrices import DomainMatrix
>>> from sympy import FF, QQ, ZZ
>>> A = DomainMatrix.from_list([[1, 0, 1], [0, 0, 1]], ZZ)
>>> A
DomainMatrix([[1, 0, 1], [0, 0, 1]], (2, 3), ZZ)
>>> B = DomainMatrix.from_list([[1, 0, 1], [0, 0, 1]], FF(7))
>>> B
DomainMatrix([[1 mod 7, 0 mod 7, 1 mod 7], [0 mod 7, 0 mod 7, 1 mod 7]], (2, 3), GF(7))
>>> C = DomainMatrix.from_list([[(1, 2), (3, 1)], [(1, 4), (5, 1)]], QQ)
>>> C
DomainMatrix([[1/2, 3], [1/4, 5]], (2, 2), QQ)
See Also
========
from_list_sympy
"""
nrows = len(rows)
ncols = 0 if not nrows else len(rows[0])
conv = lambda e: domain(*e) if isinstance(e, tuple) else domain(e)
domain_rows = [[conv(e) for e in row] for row in rows]
return DomainMatrix(domain_rows, (nrows, ncols), domain)
@classmethod
def from_list_sympy(cls, nrows, ncols, rows, **kwargs):
r"""
Convert a list of lists of Expr into a DomainMatrix using construct_domain
Parameters
==========
nrows: number of rows
ncols: number of columns
rows: list of lists
Returns
=======
DomainMatrix containing elements of rows
Examples
========
>>> from sympy.polys.matrices import DomainMatrix
>>> from sympy.abc import x, y, z
>>> A = DomainMatrix.from_list_sympy(1, 3, [[x, y, z]])
>>> A
DomainMatrix([[x, y, z]], (1, 3), ZZ[x,y,z])
See Also
========
sympy.polys.constructor.construct_domain, from_dict_sympy
"""
assert len(rows) == nrows
assert all(len(row) == ncols for row in rows)
items_sympy = [_sympify(item) for row in rows for item in row]
domain, items_domain = cls.get_domain(items_sympy, **kwargs)
domain_rows = [[items_domain[ncols*r + c] for c in range(ncols)] for r in range(nrows)]
return DomainMatrix(domain_rows, (nrows, ncols), domain)
@classmethod
def from_dict_sympy(cls, nrows, ncols, elemsdict, **kwargs):
"""
Parameters
==========
nrows: number of rows
ncols: number of cols
elemsdict: dict of dicts containing non-zero elements of the DomainMatrix
Returns
=======
DomainMatrix containing elements of elemsdict
Examples
========
>>> from sympy.polys.matrices import DomainMatrix
>>> from sympy.abc import x,y,z
>>> elemsdict = {0: {0:x}, 1:{1: y}, 2: {2: z}}
>>> A = DomainMatrix.from_dict_sympy(3, 3, elemsdict)
>>> A
DomainMatrix({0: {0: x}, 1: {1: y}, 2: {2: z}}, (3, 3), ZZ[x,y,z])
See Also
========
from_list_sympy
"""
if not all(0 <= r < nrows for r in elemsdict):
raise DMBadInputError("Row out of range")
if not all(0 <= c < ncols for row in elemsdict.values() for c in row):
raise DMBadInputError("Column out of range")
items_sympy = [_sympify(item) for row in elemsdict.values() for item in row.values()]
domain, items_domain = cls.get_domain(items_sympy, **kwargs)
idx = 0
items_dict = {}
for i, row in elemsdict.items():
items_dict[i] = {}
for j in row:
items_dict[i][j] = items_domain[idx]
idx += 1
return DomainMatrix(items_dict, (nrows, ncols), domain)
@classmethod
def from_Matrix(cls, M, fmt='sparse',**kwargs):
r"""
Convert Matrix to DomainMatrix
Parameters
==========
M: Matrix
Returns
=======
Returns DomainMatrix with identical elements as M
Examples
========
>>> from sympy import Matrix
>>> from sympy.polys.matrices import DomainMatrix
>>> M = Matrix([
... [1.0, 3.4],
... [2.4, 1]])
>>> A = DomainMatrix.from_Matrix(M)
>>> A
DomainMatrix({0: {0: 1.0, 1: 3.4}, 1: {0: 2.4, 1: 1.0}}, (2, 2), RR)
We can keep internal representation as ddm using fmt='dense'
>>> from sympy import Matrix, QQ
>>> from sympy.polys.matrices import DomainMatrix
>>> A = DomainMatrix.from_Matrix(Matrix([[QQ(1, 2), QQ(3, 4)], [QQ(0, 1), QQ(0, 1)]]), fmt='dense')
>>> A.rep
[[1/2, 3/4], [0, 0]]
See Also
========
Matrix
"""
if fmt == 'dense':
return cls.from_list_sympy(*M.shape, M.tolist(), **kwargs)
return cls.from_dict_sympy(*M.shape, M.todod(), **kwargs)
@classmethod
def get_domain(cls, items_sympy, **kwargs):
K, items_K = construct_domain(items_sympy, **kwargs)
return K, items_K
def choose_domain(self, **opts):
"""Convert to a domain found by :func:`~.construct_domain`.
Examples
========
>>> from sympy import ZZ
>>> from sympy.polys.matrices import DM
>>> M = DM([[1, 2], [3, 4]], ZZ)
>>> M
DomainMatrix([[1, 2], [3, 4]], (2, 2), ZZ)
>>> M.choose_domain(field=True)
DomainMatrix([[1, 2], [3, 4]], (2, 2), QQ)
>>> from sympy.abc import x
>>> M = DM([[1, x], [x**2, x**3]], ZZ[x])
>>> M.choose_domain(field=True).domain
ZZ(x)
Keyword arguments are passed to :func:`~.construct_domain`.
See Also
========
construct_domain
convert_to
"""
elements, data = self.to_sympy().to_flat_nz()
dom, elements_dom = construct_domain(elements, **opts)
return self.from_flat_nz(elements_dom, data, dom)
def copy(self):
return self.from_rep(self.rep.copy())
def convert_to(self, K):
r"""
Change the domain of DomainMatrix to desired domain or field
Parameters
==========
K : Represents the desired domain or field.
Alternatively, ``None`` may be passed, in which case this method
just returns a copy of this DomainMatrix.
Returns
=======
DomainMatrix
DomainMatrix with the desired domain or field
Examples
========
>>> from sympy import ZZ, ZZ_I
>>> from sympy.polys.matrices import DomainMatrix
>>> A = DomainMatrix([
... [ZZ(1), ZZ(2)],
... [ZZ(3), ZZ(4)]], (2, 2), ZZ)
>>> A.convert_to(ZZ_I)
DomainMatrix([[1, 2], [3, 4]], (2, 2), ZZ_I)
"""
if K == self.domain:
return self.copy()
rep = self.rep
# The DFM, DDM and SDM types do not do any implicit conversions so we
# manage switching between DDM and DFM here.
if rep.is_DFM and not DFM._supports_domain(K):
rep_K = rep.to_ddm().convert_to(K)
elif rep.is_DDM and DFM._supports_domain(K):
rep_K = rep.convert_to(K).to_dfm()
else:
rep_K = rep.convert_to(K)
return self.from_rep(rep_K)
def to_sympy(self):
return self.convert_to(EXRAW)
def to_field(self):
r"""
Returns a DomainMatrix with the appropriate field
Returns
=======
DomainMatrix
DomainMatrix with the appropriate field
Examples
========
>>> from sympy import ZZ
>>> from sympy.polys.matrices import DomainMatrix
>>> A = DomainMatrix([
... [ZZ(1), ZZ(2)],
... [ZZ(3), ZZ(4)]], (2, 2), ZZ)
>>> A.to_field()
DomainMatrix([[1, 2], [3, 4]], (2, 2), QQ)
"""
K = self.domain.get_field()
return self.convert_to(K)
def to_sparse(self):
"""
Return a sparse DomainMatrix representation of *self*.
Examples
========
>>> from sympy.polys.matrices import DomainMatrix
>>> from sympy import QQ
>>> A = DomainMatrix([[1, 0],[0, 2]], (2, 2), QQ)
>>> A.rep
[[1, 0], [0, 2]]
>>> B = A.to_sparse()
>>> B.rep
{0: {0: 1}, 1: {1: 2}}
"""
if self.rep.fmt == 'sparse':
return self
return self.from_rep(self.rep.to_sdm())
def to_dense(self):
"""
Return a dense DomainMatrix representation of *self*.
Examples
========
>>> from sympy.polys.matrices import DomainMatrix
>>> from sympy import QQ
>>> A = DomainMatrix({0: {0: 1}, 1: {1: 2}}, (2, 2), QQ)
>>> A.rep
{0: {0: 1}, 1: {1: 2}}
>>> B = A.to_dense()
>>> B.rep
[[1, 0], [0, 2]]
"""
rep = self.rep
if rep.fmt == 'dense':
return self
return self.from_rep(rep.to_dfm_or_ddm())
def to_ddm(self):
"""
Return a :class:`~.DDM` representation of *self*.
Examples
========
>>> from sympy.polys.matrices import DomainMatrix
>>> from sympy import QQ
>>> A = DomainMatrix({0: {0: 1}, 1: {1: 2}}, (2, 2), QQ)
>>> ddm = A.to_ddm()
>>> ddm
[[1, 0], [0, 2]]
>>> type(ddm)
<class 'sympy.polys.matrices.ddm.DDM'>
See Also
========
to_sdm
to_dense
sympy.polys.matrices.ddm.DDM.to_sdm
"""
return self.rep.to_ddm()
def to_sdm(self):
"""
Return a :class:`~.SDM` representation of *self*.
Examples
========
>>> from sympy.polys.matrices import DomainMatrix
>>> from sympy import QQ
>>> A = DomainMatrix([[1, 0],[0, 2]], (2, 2), QQ)
>>> sdm = A.to_sdm()
>>> sdm
{0: {0: 1}, 1: {1: 2}}
>>> type(sdm)
<class 'sympy.polys.matrices.sdm.SDM'>
See Also
========
to_ddm
to_sparse
sympy.polys.matrices.sdm.SDM.to_ddm
"""
return self.rep.to_sdm()
@doctest_depends_on(ground_types=['flint'])
def to_dfm(self):
"""
Return a :class:`~.DFM` representation of *self*.
Examples
========
>>> from sympy.polys.matrices import DomainMatrix
>>> from sympy import QQ
>>> A = DomainMatrix([[1, 0],[0, 2]], (2, 2), QQ)
>>> dfm = A.to_dfm()
>>> dfm
[[1, 0], [0, 2]]
>>> type(dfm)
<class 'sympy.polys.matrices._dfm.DFM'>
See Also
========
to_ddm
to_dense
DFM
"""
return self.rep.to_dfm()
@doctest_depends_on(ground_types=['flint'])
def to_dfm_or_ddm(self):
"""
Return a :class:`~.DFM` or :class:`~.DDM` representation of *self*.
Explanation
===========
The :class:`~.DFM` representation can only be used if the ground types
are ``flint`` and the ground domain is supported by ``python-flint``.
This method will return a :class:`~.DFM` representation if possible,
but will return a :class:`~.DDM` representation otherwise.
Examples
========
>>> from sympy.polys.matrices import DomainMatrix
>>> from sympy import QQ
>>> A = DomainMatrix([[1, 0],[0, 2]], (2, 2), QQ)
>>> dfm = A.to_dfm_or_ddm()
>>> dfm
[[1, 0], [0, 2]]
>>> type(dfm) # Depends on the ground domain and ground types
<class 'sympy.polys.matrices._dfm.DFM'>
See Also
========
to_ddm: Always return a :class:`~.DDM` representation.
to_dfm: Returns a :class:`~.DFM` representation or raise an error.
to_dense: Convert internally to a :class:`~.DFM` or :class:`~.DDM`
DFM: The :class:`~.DFM` dense FLINT matrix representation.
DDM: The Python :class:`~.DDM` dense domain matrix representation.
"""
return self.rep.to_dfm_or_ddm()
@classmethod
def _unify_domain(cls, *matrices):
"""Convert matrices to a common domain"""
domains = {matrix.domain for matrix in matrices}
if len(domains) == 1:
return matrices
domain = reduce(lambda x, y: x.unify(y), domains)
return tuple(matrix.convert_to(domain) for matrix in matrices)
@classmethod
def _unify_fmt(cls, *matrices, fmt=None):
"""Convert matrices to the same format.
If all matrices have the same format, then return unmodified.
Otherwise convert both to the preferred format given as *fmt* which
should be 'dense' or 'sparse'.
"""
formats = {matrix.rep.fmt for matrix in matrices}
if len(formats) == 1:
return matrices
if fmt == 'sparse':
return tuple(matrix.to_sparse() for matrix in matrices)
elif fmt == 'dense':
return tuple(matrix.to_dense() for matrix in matrices)
else:
raise ValueError("fmt should be 'sparse' or 'dense'")
def unify(self, *others, fmt=None):
"""
Unifies the domains and the format of self and other
matrices.
Parameters
==========
others : DomainMatrix
fmt: string 'dense', 'sparse' or `None` (default)
The preferred format to convert to if self and other are not
already in the same format. If `None` or not specified then no
conversion if performed.
Returns
=======
Tuple[DomainMatrix]
Matrices with unified domain and format
Examples
========
Unify the domain of DomainMatrix that have different domains:
>>> from sympy import ZZ, QQ
>>> from sympy.polys.matrices import DomainMatrix
>>> A = DomainMatrix([[ZZ(1), ZZ(2)]], (1, 2), ZZ)
>>> B = DomainMatrix([[QQ(1, 2), QQ(2)]], (1, 2), QQ)
>>> Aq, Bq = A.unify(B)
>>> Aq
DomainMatrix([[1, 2]], (1, 2), QQ)
>>> Bq
DomainMatrix([[1/2, 2]], (1, 2), QQ)
Unify the format (dense or sparse):
>>> A = DomainMatrix([[ZZ(1), ZZ(2)]], (1, 2), ZZ)
>>> B = DomainMatrix({0:{0: ZZ(1)}}, (2, 2), ZZ)
>>> B.rep
{0: {0: 1}}
>>> A2, B2 = A.unify(B, fmt='dense')
>>> B2.rep
[[1, 0], [0, 0]]
See Also
========
convert_to, to_dense, to_sparse
"""
matrices = (self,) + others
matrices = DomainMatrix._unify_domain(*matrices)
if fmt is not None:
matrices = DomainMatrix._unify_fmt(*matrices, fmt=fmt)
return matrices
def to_Matrix(self):
r"""
Convert DomainMatrix to Matrix
Returns
=======
Matrix
MutableDenseMatrix for the DomainMatrix
Examples
========
>>> from sympy import ZZ
>>> from sympy.polys.matrices import DomainMatrix
>>> A = DomainMatrix([
... [ZZ(1), ZZ(2)],
... [ZZ(3), ZZ(4)]], (2, 2), ZZ)
>>> A.to_Matrix()
Matrix([
[1, 2],
[3, 4]])
See Also
========
from_Matrix
"""
from sympy.matrices.dense import MutableDenseMatrix
# XXX: If the internal representation of RepMatrix changes then this
# might need to be changed also.
if self.domain in (ZZ, QQ, EXRAW):
if self.rep.fmt == "sparse":
rep = self.copy()
else:
rep = self.to_sparse()
else:
rep = self.convert_to(EXRAW).to_sparse()
return MutableDenseMatrix._fromrep(rep)
def to_list(self):
"""
Convert :class:`DomainMatrix` to list of lists.
See Also
========
from_list
to_list_flat
to_flat_nz
to_dok
"""
return self.rep.to_list()
def to_list_flat(self):
"""
Convert :class:`DomainMatrix` to flat list.
Examples
========
>>> from sympy import ZZ
>>> from sympy.polys.matrices import DomainMatrix
>>> A = DomainMatrix([[ZZ(1), ZZ(2)], [ZZ(3), ZZ(4)]], (2, 2), ZZ)
>>> A.to_list_flat()
[1, 2, 3, 4]
See Also
========
from_list_flat
to_list
to_flat_nz
to_dok
"""
return self.rep.to_list_flat()
@classmethod
def from_list_flat(cls, elements, shape, domain):
"""
Create :class:`DomainMatrix` from flat list.
Examples
========
>>> from sympy import ZZ
>>> from sympy.polys.matrices import DomainMatrix
>>> element_list = [ZZ(1), ZZ(2), ZZ(3), ZZ(4)]
>>> A = DomainMatrix.from_list_flat(element_list, (2, 2), ZZ)
>>> A
DomainMatrix([[1, 2], [3, 4]], (2, 2), ZZ)
>>> A == A.from_list_flat(A.to_list_flat(), A.shape, A.domain)
True
See Also
========
to_list_flat
"""
ddm = DDM.from_list_flat(elements, shape, domain)
return cls.from_rep(ddm.to_dfm_or_ddm())
def to_flat_nz(self):
"""
Convert :class:`DomainMatrix` to list of nonzero elements and data.
Explanation
===========
Returns a tuple ``(elements, data)`` where ``elements`` is a list of
elements of the matrix with zeros possibly excluded. The matrix can be
reconstructed by passing these to :meth:`from_flat_nz`. The idea is to
be able to modify a flat list of the elements and then create a new
matrix of the same shape with the modified elements in the same
positions.
The format of ``data`` differs depending on whether the underlying
representation is dense or sparse but either way it represents the
positions of the elements in the list in a way that
:meth:`from_flat_nz` can use to reconstruct the matrix. The
:meth:`from_flat_nz` method should be called on the same
:class:`DomainMatrix` that was used to call :meth:`to_flat_nz`.
Examples
========
>>> from sympy import ZZ
>>> from sympy.polys.matrices import DomainMatrix
>>> A = DomainMatrix([
... [ZZ(1), ZZ(2)],
... [ZZ(3), ZZ(4)]], (2, 2), ZZ)
>>> elements, data = A.to_flat_nz()
>>> elements
[1, 2, 3, 4]
>>> A == A.from_flat_nz(elements, data, A.domain)
True
Create a matrix with the elements doubled:
>>> elements_doubled = [2*x for x in elements]
>>> A2 = A.from_flat_nz(elements_doubled, data, A.domain)
>>> A2 == 2*A
True
See Also
========
from_flat_nz
"""
return self.rep.to_flat_nz()
def from_flat_nz(self, elements, data, domain):
"""
Reconstruct :class:`DomainMatrix` after calling :meth:`to_flat_nz`.
See :meth:`to_flat_nz` for explanation.
See Also
========
to_flat_nz
"""
rep = self.rep.from_flat_nz(elements, data, domain)
return self.from_rep(rep)
def to_dok(self):
"""
Convert :class:`DomainMatrix` to dictionary of keys (dok) format.
Examples
========
>>> from sympy import ZZ
>>> from sympy.polys.matrices import DomainMatrix
>>> A = DomainMatrix([
... [ZZ(1), ZZ(0)],
... [ZZ(0), ZZ(4)]], (2, 2), ZZ)
>>> A.to_dok()
{(0, 0): 1, (1, 1): 4}
The matrix can be reconstructed by calling :meth:`from_dok` although
the reconstructed matrix will always be in sparse format:
>>> A.to_sparse() == A.from_dok(A.to_dok(), A.shape, A.domain)
True
See Also
========
from_dok
to_list
to_list_flat
to_flat_nz
"""
return self.rep.to_dok()
@classmethod
def from_dok(cls, dok, shape, domain):
"""
Create :class:`DomainMatrix` from dictionary of keys (dok) format.
See :meth:`to_dok` for explanation.
See Also
========
to_dok
"""
return cls.from_rep(SDM.from_dok(dok, shape, domain))
def nnz(self):
"""
Number of nonzero elements in the matrix.
Examples
========
>>> from sympy import ZZ
>>> from sympy.polys.matrices import DM
>>> A = DM([[1, 0], [0, 4]], ZZ)
>>> A.nnz()
2
"""
return self.rep.nnz()
def __repr__(self):
return 'DomainMatrix(%s, %r, %r)' % (str(self.rep), self.shape, self.domain)
def transpose(self):
"""Matrix transpose of ``self``"""
return self.from_rep(self.rep.transpose())
def flat(self):
rows, cols = self.shape
return [self[i,j].element for i in range(rows) for j in range(cols)]
@property
def is_zero_matrix(self):
return self.rep.is_zero_matrix()
@property
def is_upper(self):
"""
Says whether this matrix is upper-triangular. True can be returned
even if the matrix is not square.
"""
return self.rep.is_upper()
@property
def is_lower(self):
"""
Says whether this matrix is lower-triangular. True can be returned
even if the matrix is not square.
"""
return self.rep.is_lower()
@property
def is_diagonal(self):
"""
True if the matrix is diagonal.
Can return true for non-square matrices. A matrix is diagonal if
``M[i,j] == 0`` whenever ``i != j``.
Examples
========
>>> from sympy import ZZ
>>> from sympy.polys.matrices import DM
>>> M = DM([[ZZ(1), ZZ(0)], [ZZ(0), ZZ(1)]], ZZ)
>>> M.is_diagonal
True
See Also
========
is_upper
is_lower
is_square
diagonal
"""
return self.rep.is_diagonal()
def diagonal(self):
"""
Get the diagonal entries of the matrix as a list.
Examples
========
>>> from sympy import ZZ
>>> from sympy.polys.matrices import DM
>>> M = DM([[ZZ(1), ZZ(2)], [ZZ(3), ZZ(4)]], ZZ)
>>> M.diagonal()
[1, 4]
See Also
========
is_diagonal
diag
"""
return self.rep.diagonal()
@property
def is_square(self):
"""
True if the matrix is square.
"""
return self.shape[0] == self.shape[1]
def rank(self):
rref, pivots = self.rref()
return len(pivots)
def hstack(A, *B):
r"""Horizontally stack the given matrices.
Parameters
==========
B: DomainMatrix
Matrices to stack horizontally.
Returns
=======
DomainMatrix
DomainMatrix by stacking horizontally.
Examples
========
>>> from sympy import ZZ
>>> from sympy.polys.matrices import DomainMatrix
>>> A = DomainMatrix([[ZZ(1), ZZ(2)], [ZZ(3), ZZ(4)]], (2, 2), ZZ)
>>> B = DomainMatrix([[ZZ(5), ZZ(6)], [ZZ(7), ZZ(8)]], (2, 2), ZZ)
>>> A.hstack(B)
DomainMatrix([[1, 2, 5, 6], [3, 4, 7, 8]], (2, 4), ZZ)
>>> C = DomainMatrix([[ZZ(9), ZZ(10)], [ZZ(11), ZZ(12)]], (2, 2), ZZ)
>>> A.hstack(B, C)
DomainMatrix([[1, 2, 5, 6, 9, 10], [3, 4, 7, 8, 11, 12]], (2, 6), ZZ)
See Also
========
unify
"""
A, *B = A.unify(*B, fmt=A.rep.fmt)
return DomainMatrix.from_rep(A.rep.hstack(*(Bk.rep for Bk in B)))
def vstack(A, *B):
r"""Vertically stack the given matrices.
Parameters
==========
B: DomainMatrix
Matrices to stack vertically.
Returns
=======
DomainMatrix
DomainMatrix by stacking vertically.
Examples
========
>>> from sympy import ZZ
>>> from sympy.polys.matrices import DomainMatrix
>>> A = DomainMatrix([[ZZ(1), ZZ(2)], [ZZ(3), ZZ(4)]], (2, 2), ZZ)
>>> B = DomainMatrix([[ZZ(5), ZZ(6)], [ZZ(7), ZZ(8)]], (2, 2), ZZ)
>>> A.vstack(B)
DomainMatrix([[1, 2], [3, 4], [5, 6], [7, 8]], (4, 2), ZZ)
>>> C = DomainMatrix([[ZZ(9), ZZ(10)], [ZZ(11), ZZ(12)]], (2, 2), ZZ)
>>> A.vstack(B, C)
DomainMatrix([[1, 2], [3, 4], [5, 6], [7, 8], [9, 10], [11, 12]], (6, 2), ZZ)
See Also
========
unify
"""
A, *B = A.unify(*B, fmt='dense')
return DomainMatrix.from_rep(A.rep.vstack(*(Bk.rep for Bk in B)))
def applyfunc(self, func, domain=None):
if domain is None:
domain = self.domain
return self.from_rep(self.rep.applyfunc(func, domain))
def __add__(A, B):
if not isinstance(B, DomainMatrix):
return NotImplemented
A, B = A.unify(B, fmt='dense')
return A.add(B)
def __sub__(A, B):
if not isinstance(B, DomainMatrix):
return NotImplemented
A, B = A.unify(B, fmt='dense')
return A.sub(B)
def __neg__(A):
return A.neg()
def __mul__(A, B):
"""A * B"""
if isinstance(B, DomainMatrix):
A, B = A.unify(B, fmt='dense')
return A.matmul(B)
elif B in A.domain:
return A.scalarmul(B)
elif isinstance(B, DomainScalar):
A, B = A.unify(B)
return A.scalarmul(B.element)
else:
return NotImplemented
def __rmul__(A, B):
if B in A.domain:
return A.rscalarmul(B)
elif isinstance(B, DomainScalar):
A, B = A.unify(B)
return A.rscalarmul(B.element)
else:
return NotImplemented
def __pow__(A, n):
"""A ** n"""
if not isinstance(n, int):
return NotImplemented
return A.pow(n)
def _check(a, op, b, ashape, bshape):
if a.domain != b.domain:
msg = "Domain mismatch: %s %s %s" % (a.domain, op, b.domain)
raise DMDomainError(msg)
if ashape != bshape:
msg = "Shape mismatch: %s %s %s" % (a.shape, op, b.shape)
raise DMShapeError(msg)
if a.rep.fmt != b.rep.fmt:
msg = "Format mismatch: %s %s %s" % (a.rep.fmt, op, b.rep.fmt)
raise DMFormatError(msg)
if type(a.rep) != type(b.rep):
msg = "Type mismatch: %s %s %s" % (type(a.rep), op, type(b.rep))
raise DMFormatError(msg)
def add(A, B):
r"""
Adds two DomainMatrix matrices of the same Domain
Parameters
==========
A, B: DomainMatrix
matrices to add
Returns
=======
DomainMatrix
DomainMatrix after Addition
Raises
======
DMShapeError
If the dimensions of the two DomainMatrix are not equal
ValueError
If the domain of the two DomainMatrix are not same
Examples
========
>>> from sympy import ZZ
>>> from sympy.polys.matrices import DomainMatrix
>>> A = DomainMatrix([
... [ZZ(1), ZZ(2)],
... [ZZ(3), ZZ(4)]], (2, 2), ZZ)
>>> B = DomainMatrix([
... [ZZ(4), ZZ(3)],
... [ZZ(2), ZZ(1)]], (2, 2), ZZ)
>>> A.add(B)
DomainMatrix([[5, 5], [5, 5]], (2, 2), ZZ)
See Also
========
sub, matmul
"""
A._check('+', B, A.shape, B.shape)
return A.from_rep(A.rep.add(B.rep))
def sub(A, B):
r"""
Subtracts two DomainMatrix matrices of the same Domain
Parameters
==========
A, B: DomainMatrix
matrices to subtract
Returns
=======
DomainMatrix
DomainMatrix after Subtraction
Raises
======
DMShapeError
If the dimensions of the two DomainMatrix are not equal
ValueError
If the domain of the two DomainMatrix are not same
Examples
========
>>> from sympy import ZZ
>>> from sympy.polys.matrices import DomainMatrix
>>> A = DomainMatrix([
... [ZZ(1), ZZ(2)],
... [ZZ(3), ZZ(4)]], (2, 2), ZZ)
>>> B = DomainMatrix([
... [ZZ(4), ZZ(3)],
... [ZZ(2), ZZ(1)]], (2, 2), ZZ)
>>> A.sub(B)
DomainMatrix([[-3, -1], [1, 3]], (2, 2), ZZ)
See Also
========
add, matmul
"""
A._check('-', B, A.shape, B.shape)
return A.from_rep(A.rep.sub(B.rep))
def neg(A):
r"""
Returns the negative of DomainMatrix
Parameters
==========
A : Represents a DomainMatrix
Returns
=======
DomainMatrix
DomainMatrix after Negation
Examples
========
>>> from sympy import ZZ
>>> from sympy.polys.matrices import DomainMatrix
>>> A = DomainMatrix([
... [ZZ(1), ZZ(2)],
... [ZZ(3), ZZ(4)]], (2, 2), ZZ)
>>> A.neg()
DomainMatrix([[-1, -2], [-3, -4]], (2, 2), ZZ)
"""
return A.from_rep(A.rep.neg())
def mul(A, b):
r"""
Performs term by term multiplication for the second DomainMatrix
w.r.t first DomainMatrix. Returns a DomainMatrix whose rows are
list of DomainMatrix matrices created after term by term multiplication.
Parameters
==========
A, B: DomainMatrix
matrices to multiply term-wise
Returns
=======
DomainMatrix
DomainMatrix after term by term multiplication
Examples
========
>>> from sympy import ZZ
>>> from sympy.polys.matrices import DomainMatrix
>>> A = DomainMatrix([
... [ZZ(1), ZZ(2)],
... [ZZ(3), ZZ(4)]], (2, 2), ZZ)
>>> b = ZZ(2)
>>> A.mul(b)
DomainMatrix([[2, 4], [6, 8]], (2, 2), ZZ)
See Also
========
matmul
"""
return A.from_rep(A.rep.mul(b))
def rmul(A, b):
return A.from_rep(A.rep.rmul(b))
def matmul(A, B):
r"""
Performs matrix multiplication of two DomainMatrix matrices
Parameters
==========
A, B: DomainMatrix
to multiply
Returns
=======
DomainMatrix
DomainMatrix after multiplication
Examples
========
>>> from sympy import ZZ
>>> from sympy.polys.matrices import DomainMatrix
>>> A = DomainMatrix([
... [ZZ(1), ZZ(2)],
... [ZZ(3), ZZ(4)]], (2, 2), ZZ)
>>> B = DomainMatrix([
... [ZZ(1), ZZ(1)],
... [ZZ(0), ZZ(1)]], (2, 2), ZZ)
>>> A.matmul(B)
DomainMatrix([[1, 3], [3, 7]], (2, 2), ZZ)
See Also
========
mul, pow, add, sub
"""
A._check('*', B, A.shape[1], B.shape[0])
return A.from_rep(A.rep.matmul(B.rep))
def _scalarmul(A, lamda, reverse):
if lamda == A.domain.zero:
return DomainMatrix.zeros(A.shape, A.domain)
elif lamda == A.domain.one:
return A.copy()
elif reverse:
return A.rmul(lamda)
else:
return A.mul(lamda)
def scalarmul(A, lamda):
return A._scalarmul(lamda, reverse=False)
def rscalarmul(A, lamda):
return A._scalarmul(lamda, reverse=True)
def mul_elementwise(A, B):
assert A.domain == B.domain
return A.from_rep(A.rep.mul_elementwise(B.rep))
def __truediv__(A, lamda):
""" Method for Scalar Division"""
if isinstance(lamda, int) or ZZ.of_type(lamda):
lamda = DomainScalar(ZZ(lamda), ZZ)
elif A.domain.is_Field and lamda in A.domain:
K = A.domain
lamda = DomainScalar(K.convert(lamda), K)
if not isinstance(lamda, DomainScalar):
return NotImplemented
A, lamda = A.to_field().unify(lamda)
if lamda.element == lamda.domain.zero:
raise ZeroDivisionError
if lamda.element == lamda.domain.one:
return A
return A.mul(1 / lamda.element)
def pow(A, n):
r"""
Computes A**n
Parameters
==========
A : DomainMatrix
n : exponent for A
Returns
=======
DomainMatrix
DomainMatrix on computing A**n
Raises
======
NotImplementedError
if n is negative.
Examples
========
>>> from sympy import ZZ
>>> from sympy.polys.matrices import DomainMatrix
>>> A = DomainMatrix([
... [ZZ(1), ZZ(1)],
... [ZZ(0), ZZ(1)]], (2, 2), ZZ)
>>> A.pow(2)
DomainMatrix([[1, 2], [0, 1]], (2, 2), ZZ)
See Also
========
matmul
"""
nrows, ncols = A.shape
if nrows != ncols:
raise DMNonSquareMatrixError('Power of a nonsquare matrix')
if n < 0:
raise NotImplementedError('Negative powers')
elif n == 0:
return A.eye(nrows, A.domain)
elif n == 1:
return A
elif n % 2 == 1:
return A * A**(n - 1)
else:
sqrtAn = A ** (n // 2)
return sqrtAn * sqrtAn
def scc(self):
"""Compute the strongly connected components of a DomainMatrix
Explanation
===========
A square matrix can be considered as the adjacency matrix for a
directed graph where the row and column indices are the vertices. In
this graph if there is an edge from vertex ``i`` to vertex ``j`` if
``M[i, j]`` is nonzero. This routine computes the strongly connected
components of that graph which are subsets of the rows and columns that
are connected by some nonzero element of the matrix. The strongly
connected components are useful because many operations such as the
determinant can be computed by working with the submatrices
corresponding to each component.
Examples
========
Find the strongly connected components of a matrix:
>>> from sympy import ZZ
>>> from sympy.polys.matrices import DomainMatrix
>>> M = DomainMatrix([[ZZ(1), ZZ(0), ZZ(2)],
... [ZZ(0), ZZ(3), ZZ(0)],
... [ZZ(4), ZZ(6), ZZ(5)]], (3, 3), ZZ)
>>> M.scc()
[[1], [0, 2]]
Compute the determinant from the components:
>>> MM = M.to_Matrix()
>>> MM
Matrix([
[1, 0, 2],
[0, 3, 0],
[4, 6, 5]])
>>> MM[[1], [1]]
Matrix([[3]])
>>> MM[[0, 2], [0, 2]]
Matrix([
[1, 2],
[4, 5]])
>>> MM.det()
-9
>>> MM[[1], [1]].det() * MM[[0, 2], [0, 2]].det()
-9
The components are given in reverse topological order and represent a
permutation of the rows and columns that will bring the matrix into
block lower-triangular form:
>>> MM[[1, 0, 2], [1, 0, 2]]
Matrix([
[3, 0, 0],
[0, 1, 2],
[6, 4, 5]])
Returns
=======
List of lists of integers
Each list represents a strongly connected component.
See also
========
sympy.matrices.matrices.MatrixBase.strongly_connected_components
sympy.utilities.iterables.strongly_connected_components
"""
if not self.is_square:
raise DMNonSquareMatrixError('Matrix must be square for scc')
return self.rep.scc()
def clear_denoms(self, convert=False):
"""
Clear denominators, but keep the domain unchanged.
Examples
========
>>> from sympy import QQ
>>> from sympy.polys.matrices import DM
>>> A = DM([[(1,2), (1,3)], [(1,4), (1,5)]], QQ)
>>> den, Anum = A.clear_denoms()
>>> den.to_sympy()
60
>>> Anum.to_Matrix()
Matrix([
[30, 20],
[15, 12]])
>>> den * A == Anum
True
The numerator matrix will be in the same domain as the original matrix
unless ``convert`` is set to ``True``:
>>> A.clear_denoms()[1].domain
QQ
>>> A.clear_denoms(convert=True)[1].domain
ZZ
The denominator is always in the associated ring:
>>> A.clear_denoms()[0].domain
ZZ
>>> A.domain.get_ring()
ZZ
See Also
========
sympy.polys.polytools.Poly.clear_denoms
"""
elems0, data = self.to_flat_nz()
K0 = self.domain
K1 = K0.get_ring() if K0.has_assoc_Ring else K0
den, elems1 = dup_clear_denoms(elems0, K0, K1, convert=convert)
if convert:
Kden, Knum = K1, K1
else:
Kden, Knum = K1, K0
den = DomainScalar(den, Kden)
num = self.from_flat_nz(elems1, data, Knum)
return den, num
def cancel_denom(self, denom):
"""
Cancel factors between a matrix and a denominator.
Returns a matrix and denominator on lowest terms.
Requires ``gcd`` in the ground domain.
Methods like :meth:`solve_den`, :meth:`inv_den` and :meth:`rref_den`
return a matrix and denominator but not necessarily on lowest terms.
Reduction to lowest terms without fractions can be performed with
:meth:`cancel_denom`.
Examples
========
>>> from sympy.polys.matrices import DM
>>> from sympy import ZZ
>>> M = DM([[2, 2, 0],
... [0, 2, 2],
... [0, 0, 2]], ZZ)
>>> Minv, den = M.inv_den()
>>> Minv.to_Matrix()
Matrix([
[1, -1, 1],
[0, 1, -1],
[0, 0, 1]])
>>> den
2
>>> Minv_reduced, den_reduced = Minv.cancel_denom(den)
>>> Minv_reduced.to_Matrix()
Matrix([
[1, -1, 1],
[0, 1, -1],
[0, 0, 1]])
>>> den_reduced
2
>>> Minv_reduced.to_field() / den_reduced == Minv.to_field() / den
True
The denominator is made canonical with respect to units (e.g. a
negative denominator is made positive):
>>> M = DM([[2, 2, 0]], ZZ)
>>> den = ZZ(-4)
>>> M.cancel_denom(den)
(DomainMatrix([[-1, -1, 0]], (1, 3), ZZ), 2)
Any factor common to _all_ elements will be cancelled but there can
still be factors in common between _some_ elements of the matrix and
the denominator. To cancel factors between each element and the
denominator, use :meth:`cancel_denom_elementwise` or otherwise convert
to a field and use division:
>>> M = DM([[4, 6]], ZZ)
>>> den = ZZ(12)
>>> M.cancel_denom(den)
(DomainMatrix([[2, 3]], (1, 2), ZZ), 6)
>>> numers, denoms = M.cancel_denom_elementwise(den)
>>> numers
DomainMatrix([[1, 1]], (1, 2), ZZ)
>>> denoms
DomainMatrix([[3, 2]], (1, 2), ZZ)
>>> M.to_field() / den
DomainMatrix([[1/3, 1/2]], (1, 2), QQ)
See Also
========
solve_den
inv_den
rref_den
cancel_denom_elementwise
"""
M = self
K = self.domain
if K.is_zero(denom):
raise ZeroDivisionError('denominator is zero')
elif K.is_one(denom):
return (M.copy(), denom)
elements, data = M.to_flat_nz()
# First canonicalize the denominator (e.g. multiply by -1).
if K.is_negative(denom):
u = -K.one
else:
u = K.canonical_unit(denom)
# Often after e.g. solve_den the denominator will be much more
# complicated than the elements of the numerator. Hopefully it will be
# quicker to find the gcd of the numerator and if there is no content
# then we do not need to look at the denominator at all.
content = dup_content(elements, K)
common = K.gcd(content, denom)
if not K.is_one(content):
common = K.gcd(content, denom)
if not K.is_one(common):
elements = dup_quo_ground(elements, common, K)
denom = K.quo(denom, common)
if not K.is_one(u):
elements = dup_mul_ground(elements, u, K)
denom = u * denom
elif K.is_one(common):
return (M.copy(), denom)
M_cancelled = M.from_flat_nz(elements, data, K)
return M_cancelled, denom
def cancel_denom_elementwise(self, denom):
"""
Cancel factors between the elements of a matrix and a denominator.
Returns a matrix of numerators and matrix of denominators.
Requires ``gcd`` in the ground domain.
Examples
========
>>> from sympy.polys.matrices import DM
| 0 |
07488abfd841f0c5c54a32e5229e929f7569cca6
|
Python
|
>>> from sympy import ZZ
>>> M = DM([[2, 3], [4, 12]], ZZ)
>>> denom = ZZ(6)
>>> numers, denoms = M.cancel_denom_elementwise(denom)
>>> numers.to_Matrix()
Matrix([
[1, 1],
[2, 2]])
>>> denoms.to_Matrix()
Matrix([
[3, 2],
[3, 1]])
>>> M_frac = (M.to_field() / denom).to_Matrix()
>>> M_frac
Matrix([
[1/3, 1/2],
[2/3, 2]])
>>> denoms_inverted = denoms.to_Matrix().applyfunc(lambda e: 1/e)
>>> numers.to_Matrix().multiply_elementwise(denoms_inverted) == M_frac
True
Use :meth:`cancel_denom` to cancel factors between the matrix and the
denominator while preserving the form of a matrix with a scalar
denominator.
See Also
========
cancel_denom
"""
K = self.domain
M = self
if K.is_zero(denom):
raise ZeroDivisionError('denominator is zero')
elif K.is_one(denom):
M_numers = M.copy()
M_denoms = M.ones(M.shape, M.domain)
return (M_numers, M_denoms)
elements, data = M.to_flat_nz()
cofactors = [K.cofactors(numer, denom) for numer in elements]
gcds, numers, denoms = zip(*cofactors)
M_numers = M.from_flat_nz(list(numers), data, K)
M_denoms = M.from_flat_nz(list(denoms), data, K)
return (M_numers, M_denoms)
def content(self):
"""
Return the gcd of the elements of the matrix.
Requires ``gcd`` in the ground domain.
Examples
========
>>> from sympy.polys.matrices import DM
>>> from sympy import ZZ
>>> M = DM([[2, 4], [4, 12]], ZZ)
>>> M.content()
2
See Also
========
primitive
cancel_denom
"""
K = self.domain
elements, _ = self.to_flat_nz()
return dup_content(elements, K)
def primitive(self):
"""
Factor out gcd of the elements of a matrix.
Requires ``gcd`` in the ground domain.
Examples
========
>>> from sympy.polys.matrices import DM
>>> from sympy import ZZ
>>> M = DM([[2, 4], [4, 12]], ZZ)
>>> content, M_primitive = M.primitive()
>>> content
2
>>> M_primitive
DomainMatrix([[1, 2], [2, 6]], (2, 2), ZZ)
>>> content * M_primitive == M
True
>>> M_primitive.content() == ZZ(1)
True
See Also
========
content
cancel_denom
"""
K = self.domain
elements, data = self.to_flat_nz()
content, prims = dup_primitive(elements, K)
M_primitive = self.from_flat_nz(prims, data, K)
return content, M_primitive
def rref(self, *, method='auto'):
r"""
Returns reduced-row echelon form (RREF) and list of pivots.
If the domain is not a field then it will be converted to a field. See
:meth:`rref_den` for the fraction-free version of this routine that
returns RREF with denominator instead.
The domain must either be a field or have an associated fraction field
(see :meth:`to_field`).
Examples
========
>>> from sympy import QQ
>>> from sympy.polys.matrices import DomainMatrix
>>> A = DomainMatrix([
... [QQ(2), QQ(-1), QQ(0)],
... [QQ(-1), QQ(2), QQ(-1)],
... [QQ(0), QQ(0), QQ(2)]], (3, 3), QQ)
>>> rref_matrix, rref_pivots = A.rref()
>>> rref_matrix
DomainMatrix([[1, 0, 0], [0, 1, 0], [0, 0, 1]], (3, 3), QQ)
>>> rref_pivots
(0, 1, 2)
Parameters
==========
method : str, optional (default: 'auto')
The method to use to compute the RREF. The default is ``'auto'``,
which will attempt to choose the fastest method. The other options
are:
- ``A.rref(method='GJ')`` uses Gauss-Jordan elimination with
division. If the domain is not a field then it will be converted
to a field with :meth:`to_field` first and RREF will be computed
by inverting the pivot elements in each row. This is most
efficient for very sparse matrices or for matrices whose elements
have complex denominators.
- ``A.rref(method='FF')`` uses fraction-free Gauss-Jordan
elimination. Elimination is performed using exact division
(``exquo``) to control the growth of the coefficients. In this
case the current domain is always used for elimination but if
the domain is not a field then it will be converted to a field
at the end and divided by the denominator. This is most efficient
for dense matrices or for matrices with simple denominators.
- ``A.rref(method='CD')`` clears the denominators before using
fraction-free Gauss-Jordan elimination in the assoicated ring.
This is most efficient for dense matrices with very simple
denominators.
- ``A.rref(method='GJ_dense')``, ``A.rref(method='FF_dense')``, and
``A.rref(method='CD_dense')`` are the same as the above methods
except that the dense implementations of the algorithms are used.
By default ``A.rref(method='auto')`` will usually choose the
sparse implementations for RREF.
Regardless of which algorithm is used the returned matrix will
always have the same format (sparse or dense) as the input and its
domain will always be the field of fractions of the input domain.
Returns
=======
(DomainMatrix, list)
reduced-row echelon form and list of pivots for the DomainMatrix
See Also
========
rref_den
RREF with denominator
sympy.polys.matrices.sdm.sdm_irref
Sparse implementation of ``method='GJ'``.
sympy.polys.matrices.sdm.sdm_rref_den
Sparse implementation of ``method='FF'`` and ``method='CD'``.
sympy.polys.matrices.dense.ddm_irref
Dense implementation of ``method='GJ'``.
sympy.polys.matrices.dense.ddm_irref_den
Dense implementation of ``method='FF'`` and ``method='CD'``.
clear_denoms
Clear denominators from a matrix, used by ``method='CD'`` and
by ``method='GJ'`` when the original domain is not a field.
"""
return _dm_rref(self, method=method)
def rref_den(self, *, method='auto', keep_domain=True):
r"""
Returns reduced-row echelon form with denominator and list of pivots.
Requires exact division in the ground domain (``exquo``).
Examples
========
>>> from sympy import ZZ, QQ
>>> from sympy.polys.matrices import DomainMatrix
>>> A = DomainMatrix([
... [ZZ(2), ZZ(-1), ZZ(0)],
... [ZZ(-1), ZZ(2), ZZ(-1)],
... [ZZ(0), ZZ(0), ZZ(2)]], (3, 3), ZZ)
>>> A_rref, denom, pivots = A.rref_den()
>>> A_rref
DomainMatrix([[6, 0, 0], [0, 6, 0], [0, 0, 6]], (3, 3), ZZ)
>>> denom
6
>>> pivots
(0, 1, 2)
>>> A_rref.to_field() / denom
DomainMatrix([[1, 0, 0], [0, 1, 0], [0, 0, 1]], (3, 3), QQ)
>>> A_rref.to_field() / denom == A.convert_to(QQ).rref()[0]
True
Parameters
==========
method : str, optional (default: 'auto')
The method to use to compute the RREF. The default is ``'auto'``,
which will attempt to choose the fastest method. The other options
are:
- ``A.rref(method='FF')`` uses fraction-free Gauss-Jordan
elimination. Elimination is performed using exact division
(``exquo``) to control the growth of the coefficients. In this
case the current domain is always used for elimination and the
result is always returned as a matrix over the current domain.
This is most efficient for dense matrices or for matrices with
simple denominators.
- ``A.rref(method='CD')`` clears denominators before using
fraction-free Gauss-Jordan elimination in the assoicated ring.
The result will be converted back to the original domain unless
``keep_domain=False`` is passed in which case the result will be
over the ring used for elimination. This is most efficient for
dense matrices with very simple denominators.
- ``A.rref(method='GJ')`` uses Gauss-Jordan elimination with
division. If the domain is not a field then it will be converted
to a field with :meth:`to_field` first and RREF will be computed
by inverting the pivot elements in each row. The result is
converted back to the original domain by clearing denominators
unless ``keep_domain=False`` is passed in which case the result
will be over the field used for elimination. This is most
efficient for very sparse matrices or for matrices whose elements
have complex denominators.
- ``A.rref(method='GJ_dense')``, ``A.rref(method='FF_dense')``, and
``A.rref(method='CD_dense')`` are the same as the above methods
except that the dense implementations of the algorithms are used.
By default ``A.rref(method='auto')`` will usually choose the
sparse implementations for RREF.
Regardless of which algorithm is used the returned matrix will
always have the same format (sparse or dense) as the input and if
``keep_domain=True`` its domain will always be the same as the
input.
keep_domain : bool, optional
If True (the default), the domain of the returned matrix and
denominator are the same as the domain of the input matrix. If
False, the domain of the returned matrix might be changed to an
associated ring or field if the algorithm used a different domain.
This is useful for efficiency if the caller does not need the
result to be in the original domain e.g. it avoids clearing
denominators in the case of ``A.rref(method='GJ')``.
Returns
=======
(DomainMatrix, scalar, list)
Reduced-row echelon form, denominator and list of pivot indices.
See Also
========
rref
RREF without denominator for field domains.
sympy.polys.matrices.sdm.sdm_irref
Sparse implementation of ``method='GJ'``.
sympy.polys.matrices.sdm.sdm_rref_den
Sparse implementation of ``method='FF'`` and ``method='CD'``.
sympy.polys.matrices.dense.ddm_irref
Dense implementation of ``method='GJ'``.
sympy.polys.matrices.dense.ddm_irref_den
Dense implementation of ``method='FF'`` and ``method='CD'``.
clear_denoms
Clear denominators from a matrix, used by ``method='CD'``.
"""
return _dm_rref_den(self, method=method, keep_domain=keep_domain)
def columnspace(self):
r"""
Returns the columnspace for the DomainMatrix
Returns
=======
DomainMatrix
The columns of this matrix form a basis for the columnspace.
Examples
========
>>> from sympy import QQ
>>> from sympy.polys.matrices import DomainMatrix
>>> A = DomainMatrix([
... [QQ(1), QQ(-1)],
... [QQ(2), QQ(-2)]], (2, 2), QQ)
>>> A.columnspace()
DomainMatrix([[1], [2]], (2, 1), QQ)
"""
if not self.domain.is_Field:
raise DMNotAField('Not a field')
rref, pivots = self.rref()
rows, cols = self.shape
return self.extract(range(rows), pivots)
def rowspace(self):
r"""
Returns the rowspace for the DomainMatrix
Returns
=======
DomainMatrix
The rows of this matrix form a basis for the rowspace.
Examples
========
>>> from sympy import QQ
>>> from sympy.polys.matrices import DomainMatrix
>>> A = DomainMatrix([
... [QQ(1), QQ(-1)],
... [QQ(2), QQ(-2)]], (2, 2), QQ)
>>> A.rowspace()
DomainMatrix([[1, -1]], (1, 2), QQ)
"""
if not self.domain.is_Field:
raise DMNotAField('Not a field')
rref, pivots = self.rref()
rows, cols = self.shape
return self.extract(range(len(pivots)), range(cols))
def nullspace(self, divide_last=False):
r"""
Returns the nullspace for the DomainMatrix
Returns
=======
DomainMatrix
The rows of this matrix form a basis for the nullspace.
Examples
========
>>> from sympy import QQ
>>> from sympy.polys.matrices import DM
>>> A = DM([
... [QQ(2), QQ(-2)],
... [QQ(4), QQ(-4)]], QQ)
>>> A.nullspace()
DomainMatrix([[1, 1]], (1, 2), QQ)
The returned matrix is a basis for the nullspace:
>>> A_null = A.nullspace().transpose()
>>> A * A_null
DomainMatrix([[0], [0]], (2, 1), QQ)
>>> rows, cols = A.shape
>>> nullity = rows - A.rank()
>>> A_null.shape == (cols, nullity)
True
Nullspace can also be computed for non-field rings. If the ring is not
a field then division is not used. Setting ``divide_last`` to True will
raise an error in this case:
>>> from sympy import ZZ
>>> B = DM([[6, -3],
... [4, -2]], ZZ)
>>> B.nullspace()
DomainMatrix([[3, 6]], (1, 2), ZZ)
>>> B.nullspace(divide_last=True)
Traceback (most recent call last):
...
DMNotAField: Cannot normalize vectors over a non-field
Over a ring with ``gcd`` defined the nullspace can potentially be
reduced with :meth:`primitive`:
>>> B.nullspace().primitive()
(3, DomainMatrix([[1, 2]], (1, 2), ZZ))
A matrix over a ring can often be normalized by converting it to a
field but it is often a bad idea to do so:
>>> from sympy.abc import a, b, c
>>> from sympy import Matrix
>>> M = Matrix([[ a*b, b + c, c],
... [ a - b, b*c, c**2],
... [a*b + a - b, b*c + b + c, c**2 + c]])
>>> M.to_DM().domain
ZZ[a,b,c]
>>> M.to_DM().nullspace().to_Matrix().transpose()
Matrix([
[ c**3],
[ -a*b*c**2 + a*c - b*c],
[a*b**2*c - a*b - a*c + b**2 + b*c]])
The unnormalized form here is nicer than the normalized form that
spreads a large denominator throughout the matrix:
>>> M.to_DM().to_field().nullspace(divide_last=True).to_Matrix().transpose()
Matrix([
[ c**3/(a*b**2*c - a*b - a*c + b**2 + b*c)],
[(-a*b*c**2 + a*c - b*c)/(a*b**2*c - a*b - a*c + b**2 + b*c)],
[ 1]])
Parameters
==========
divide_last : bool, optional
If False (the default), the vectors are not normalized and the RREF
is computed using :meth:`rref_den` and the denominator is
discarded. If True, then each row is divided by its final element;
the domain must be a field in this case.
See Also
========
nullspace_from_rref
rref
rref_den
rowspace
"""
A = self
K = A.domain
if divide_last and not K.is_Field:
raise DMNotAField("Cannot normalize vectors over a non-field")
if divide_last:
A_rref, pivots = A.rref()
else:
A_rref, den, pivots = A.rref_den()
# Ensure that the sign is canonical before discarding the
# denominator. Then M.nullspace().primitive() is canonical.
u = K.canonical_unit(den)
if u != K.one:
A_rref *= u
A_null = A_rref.nullspace_from_rref(pivots)
return A_null
def nullspace_from_rref(self, pivots=None):
"""
Compute nullspace from rref and pivots.
The domain of the matrix can be any domain.
The matrix must be in reduced row echelon form already. Otherwise the
result will be incorrect. Use :meth:`rref` or :meth:`rref_den` first
to get the reduced row echelon form or use :meth:`nullspace` instead.
See Also
========
nullspace
rref
rref_den
sympy.polys.matrices.sdm.SDM.nullspace_from_rref
sympy.polys.matrices.ddm.DDM.nullspace_from_rref
"""
null_rep, nonpivots = self.rep.nullspace_from_rref(pivots)
return self.from_rep(null_rep)
def inv(self):
r"""
Finds the inverse of the DomainMatrix if exists
Returns
=======
DomainMatrix
DomainMatrix after inverse
Raises
======
ValueError
If the domain of DomainMatrix not a Field
DMNonSquareMatrixError
If the DomainMatrix is not a not Square DomainMatrix
Examples
========
>>> from sympy import QQ
>>> from sympy.polys.matrices import DomainMatrix
>>> A = DomainMatrix([
... [QQ(2), QQ(-1), QQ(0)],
... [QQ(-1), QQ(2), QQ(-1)],
... [QQ(0), QQ(0), QQ(2)]], (3, 3), QQ)
>>> A.inv()
DomainMatrix([[2/3, 1/3, 1/6], [1/3, 2/3, 1/3], [0, 0, 1/2]], (3, 3), QQ)
See Also
========
neg
"""
if not self.domain.is_Field:
raise DMNotAField('Not a field')
m, n = self.shape
if m != n:
raise DMNonSquareMatrixError
inv = self.rep.inv()
return self.from_rep(inv)
def det(self):
r"""
Returns the determinant of a square :class:`DomainMatrix`.
Returns
=======
determinant: DomainElement
Determinant of the matrix.
Raises
======
ValueError
If the domain of DomainMatrix is not a Field
Examples
========
>>> from sympy import ZZ
>>> from sympy.polys.matrices import DomainMatrix
>>> A = DomainMatrix([
... [ZZ(1), ZZ(2)],
... [ZZ(3), ZZ(4)]], (2, 2), ZZ)
>>> A.det()
-2
"""
m, n = self.shape
if m != n:
raise DMNonSquareMatrixError
return self.rep.det()
def adj_det(self):
"""
Adjugate and determinant of a square :class:`DomainMatrix`.
Returns
=======
(adjugate, determinant) : (DomainMatrix, DomainScalar)
The adjugate matrix and determinant of this matrix.
Examples
========
>>> from sympy import ZZ
>>> from sympy.polys.matrices import DM
>>> A = DM([
... [ZZ(1), ZZ(2)],
... [ZZ(3), ZZ(4)]], ZZ)
>>> adjA, detA = A.adj_det()
>>> adjA
DomainMatrix([[4, -2], [-3, 1]], (2, 2), ZZ)
>>> detA
-2
See Also
========
adjugate
Returns only the adjugate matrix.
det
Returns only the determinant.
inv_den
Returns a matrix/denominator pair representing the inverse matrix
but perhaps differing from the adjugate and determinant by a common
factor.
"""
m, n = self.shape
I_m = self.eye((m, m), self.domain)
adjA, detA = self.solve_den_charpoly(I_m, check=False)
if self.rep.fmt == "dense":
adjA = adjA.to_dense()
return adjA, detA
def adjugate(self):
"""
Adjugate of a square :class:`DomainMatrix`.
The adjugate matrix is the transpose of the cofactor matrix and is
related to the inverse by::
adj(A) = det(A) * A.inv()
Unlike the inverse matrix the adjugate matrix can be computed and
expressed without division or fractions in the ground domain.
Examples
========
>>> from sympy import ZZ
>>> from sympy.polys.matrices import DM
>>> A = DM([[ZZ(1), ZZ(2)], [ZZ(3), ZZ(4)]], ZZ)
>>> A.adjugate()
DomainMatrix([[4, -2], [-3, 1]], (2, 2), ZZ)
Returns
=======
DomainMatrix
The adjugate matrix of this matrix with the same domain.
See Also
========
adj_det
"""
adjA, detA = self.adj_det()
return adjA
def inv_den(self, method=None):
"""
Return the inverse as a :class:`DomainMatrix` with denominator.
Returns
=======
(inv, den) : (:class:`DomainMatrix`, :class:`~.DomainElement`)
The inverse matrix and its denominator.
This is more or less equivalent to :meth:`adj_det` except that ``inv``
and ``den`` are not guaranteed to be the adjugate and inverse. The
ratio ``inv/den`` is equivalent to ``adj/det`` but some factors
might be cancelled between ``inv`` and ``den``. In simple cases this
might just be a minus sign so that ``(inv, den) == (-adj, -det)`` but
factors more complicated than ``-1`` can also be cancelled.
Cancellation is not guaranteed to be complete so ``inv`` and ``den``
may not be on lowest terms. The denominator ``den`` will be zero if and
only if the determinant is zero.
If the actual adjugate and determinant are needed, use :meth:`adj_det`
instead. If the intention is to compute the inverse matrix or solve a
system of equations then :meth:`inv_den` is more efficient.
Examples
========
>>> from sympy import ZZ
>>> from sympy.polys.matrices import DomainMatrix
>>> A = DomainMatrix([
... [ZZ(2), ZZ(-1), ZZ(0)],
... [ZZ(-1), ZZ(2), ZZ(-1)],
... [ZZ(0), ZZ(0), ZZ(2)]], (3, 3), ZZ)
>>> Ainv, den = A.inv_den()
>>> den
6
>>> Ainv
DomainMatrix([[4, 2, 1], [2, 4, 2], [0, 0, 3]], (3, 3), ZZ)
>>> A * Ainv == den * A.eye(A.shape, A.domain).to_dense()
True
Parameters
==========
method : str, optional
The method to use to compute the inverse. Can be one of ``None``,
``'rref'`` or ``'charpoly'``. If ``None`` then the method is
chosen automatically (see :meth:`solve_den` for details).
See Also
========
inv
det
adj_det
solve_den
"""
I = self.eye(self.shape, self.domain)
return self.solve_den(I, method=method)
def solve_den(self, b, method=None):
"""
Solve matrix equation $Ax = b$ without fractions in the ground domain.
Examples
========
Solve a matrix equation over the integers:
>>> from sympy import ZZ
>>> from sympy.polys.matrices import DM
>>> A = DM([[ZZ(1), ZZ(2)], [ZZ(3), ZZ(4)]], ZZ)
>>> b = DM([[ZZ(5)], [ZZ(6)]], ZZ)
>>> xnum, xden = A.solve_den(b)
>>> xden
-2
>>> xnum
DomainMatrix([[8], [-9]], (2, 1), ZZ)
>>> A * xnum == xden * b
True
Solve a matrix equation over a polynomial ring:
>>> from sympy import ZZ
>>> from sympy.abc import x, y, z, a, b
>>> R = ZZ[x, y, z, a, b]
>>> M = DM([[x*y, x*z], [y*z, x*z]], R)
>>> b = DM([[a], [b]], R)
>>> M.to_Matrix()
Matrix([
[x*y, x*z],
[y*z, x*z]])
>>> b.to_Matrix()
Matrix([
[a],
[b]])
>>> xnum, xden = M.solve_den(b)
>>> xden
x**2*y*z - x*y*z**2
>>> xnum.to_Matrix()
Matrix([
[ a*x*z - b*x*z],
[-a*y*z + b*x*y]])
>>> M * xnum == xden * b
True
The solution can be expressed over a fraction field which will cancel
gcds between the denominator and the elements of the numerator:
>>> xsol = xnum.to_field() / xden
>>> xsol.to_Matrix()
Matrix([
[ (a - b)/(x*y - y*z)],
[(-a*z + b*x)/(x**2*z - x*z**2)]])
>>> (M * xsol).to_Matrix() == b.to_Matrix()
True
When solving a large system of equations this cancellation step might
be a lot slower than :func:`solve_den` itself. The solution can also be
expressed as a ``Matrix`` without attempting any polynomial
cancellation between the numerator and denominator giving a less
simplified result more quickly:
>>> xsol_uncancelled = xnum.to_Matrix() / xnum.domain.to_sympy(xden)
>>> xsol_uncancelled
Matrix([
[ (a*x*z - b*x*z)/(x**2*y*z - x*y*z**2)],
[(-a*y*z + b*x*y)/(x**2*y*z - x*y*z**2)]])
>>> from sympy import cancel
>>> cancel(xsol_uncancelled) == xsol.to_Matrix()
True
Parameters
==========
self : :class:`DomainMatrix`
The ``m x n`` matrix $A$ in the equation $Ax = b$. Underdetermined
systems are not supported so ``m >= n``: $A$ should be square or
have more rows than columns.
b : :class:`DomainMatrix`
The ``n x m`` matrix $b$ for the rhs.
cp : list of :class:`~.DomainElement`, optional
The characteristic polynomial of the matrix $A$. If not given, it
will be computed using :meth:`charpoly`.
method: str, optional
The method to use for solving the system. Can be one of ``None``,
``'charpoly'`` or ``'rref'``. If ``None`` (the default) then the
method will be chosen automatically.
The ``charpoly`` method uses :meth:`solve_den_charpoly` and can
only be used if the matrix is square. This method is division free
and can be used with any domain.
The ``rref`` method is fraction free but requires exact division
in the ground domain (``exquo``). This is also suitable for most
domains. This method can be used with overdetermined systems (more
equations than unknowns) but not underdetermined systems as a
unique solution is sought.
Returns
=======
(xnum, xden) : (DomainMatrix, DomainElement)
The solution of the equation $Ax = b$ as a pair consisting of an
``n x m`` matrix numerator ``xnum`` and a scalar denominator
``xden``.
The solution $x$ is given by ``x = xnum / xden``. The division free
invariant is ``A * xnum == xden * b``. If $A$ is square then the
denominator ``xden`` will be a divisor of the determinant $det(A)$.
Raises
======
DMNonInvertibleMatrixError
If the system $Ax = b$ does not have a unique solution.
See Also
========
solve_den_charpoly
solve_den_rref
inv_den
"""
m, n = self.shape
bm, bn = b.shape
if m != bm:
raise DMShapeError("Matrix equation shape mismatch.")
if method is None:
method = 'rref'
elif method == 'charpoly' and m != n:
raise DMNonSquareMatrixError("method='charpoly' requires a square matrix.")
if method == 'charpoly':
xnum, xden = self.solve_den_charpoly(b)
elif method == 'rref':
xnum, xden = self.solve_den_rref(b)
else:
raise DMBadInputError("method should be 'rref' or 'charpoly'")
return xnum, xden
def solve_den_rref(self, b):
"""
Solve matrix equation $Ax = b$ using fraction-free RREF
Solves the matrix equation $Ax = b$ for $x$ and returns the solution
as a numerator/denominator pair.
Examples
========
>>> from sympy import ZZ
>>> from sympy.polys.matrices import DM
>>> A = DM([[ZZ(1), ZZ(2)], [ZZ(3), ZZ(4)]], ZZ)
>>> b = DM([[ZZ(5)], [ZZ(6)]], ZZ)
>>> xnum, xden = A.solve_den_rref(b)
>>> xden
-2
>>> xnum
DomainMatrix([[8], [-9]], (2, 1), ZZ)
>>> A * xnum == xden * b
True
See Also
========
solve_den
solve_den_charpoly
"""
A = self
m, n = A.shape
bm, bn = b.shape
if m != bm:
raise DMShapeError("Matrix equation shape mismatch.")
if m < n:
raise DMShapeError("Underdetermined matrix equation.")
Aaug = A.hstack(b)
Aaug_rref, denom, pivots = Aaug.rref_den()
# XXX: We check here if there are pivots after the last column. If
# there were than it possibly means that rref_den performed some
# unnecessary elimination. It would be better if rref methods had a
# parameter indicating how many columns should be used for elimination.
if len(pivots) != n or pivots and pivots[-1] >= n:
raise DMNonInvertibleMatrixError("Non-unique solution.")
xnum = Aaug_rref[:n, n:]
xden = denom
return xnum, xden
def solve_den_charpoly(self, b, cp=None, check=True):
"""
Solve matrix equation $Ax = b$ using the characteristic polynomial.
This method solves the square matrix equation $Ax = b$ for $x$ using
the characteristic polynomial without any division or fractions in the
ground domain.
Examples
========
Solve a matrix equation over the integers:
>>> from sympy import ZZ
>>> from sympy.polys.matrices import DM
>>> A = DM([[ZZ(1), ZZ(2)], [ZZ(3), ZZ(4)]], ZZ)
>>> b = DM([[ZZ(5)], [ZZ(6)]], ZZ)
>>> xnum, detA = A.solve_den_charpoly(b)
>>> detA
-2
>>> xnum
DomainMatrix([[8], [-9]], (2, 1), ZZ)
>>> A * xnum == detA * b
True
Parameters
==========
self : DomainMatrix
The ``n x n`` matrix `A` in the equation `Ax = b`. Must be square
and invertible.
b : DomainMatrix
The ``n x m`` matrix `b` for the rhs.
cp : list, optional
The characteristic polynomial of the matrix `A` if known. If not
given, it will be computed using :meth:`charpoly`.
check : bool, optional
If ``True`` (the default) check that the determinant is not zero
and raise an error if it is. If ``False`` then if the determinant
is zero the return value will be equal to ``(A.adjugate()*b, 0)``.
Returns
=======
(xnum, detA) : (DomainMatrix, DomainElement)
The solution of the equation `Ax = b` as a matrix numerator and
scalar denominator pair. The denominator is equal to the
determinant of `A` and the numerator is ``adj(A)*b``.
The solution $x$ is given by ``x = xnum / detA``. The division free
invariant is ``A * xnum == detA * b``.
If ``b`` is the identity matrix, then ``xnum`` is the adjugate matrix
and we have ``A * adj(A) == detA * I``.
See Also
========
solve_den
Main frontend for solving matrix equations with denominator.
solve_den_rref
Solve matrix equations using fraction-free RREF.
inv_den
Invert a matrix using the characteristic polynomial.
"""
A, b = self.unify(b)
m, n = self.shape
mb, nb = b.shape
if m != n:
raise DMNonSquareMatrixError("Matrix must be square")
if mb != m:
raise DMShapeError("Matrix and vector must have the same number of rows")
f, detA = self.adj_poly_det(cp=cp)
if check and not detA:
raise DMNonInvertibleMatrixError("Matrix is not invertible")
# Compute adj(A)*b = det(A)*inv(A)*b using Horner's method without
# constructing inv(A) explicitly.
adjA_b = self.eval_poly_mul(f, b)
return (adjA_b, detA)
def adj_poly_det(self, cp=None):
"""
Return the polynomial $p$ such that $p(A) = adj(A)$ and also the
determinant of $A$.
Examples
========
>>> from sympy import QQ
>>> from sympy.polys.matrices import DM
>>> A = DM([[QQ(1), QQ(2)], [QQ(3), QQ(4)]], QQ)
>>> p, detA = A.adj_poly_det()
>>> p
[-1, 5]
>>> p_A = A.eval_poly(p)
>>> p_A
DomainMatrix([[4, -2], [-3, 1]], (2, 2), QQ)
>>> p[0]*A**1 + p[1]*A**0 == p_A
True
>>> p_A == A.adjugate()
True
>>> A * A.adjugate() == detA * A.eye(A.shape, A.domain).to_dense()
True
See Also
========
adjugate
eval_poly
adj_det
"""
# Cayley-Hamilton says that a matrix satisfies its own minimal
# polynomial
#
# p[0]*A^n + p[1]*A^(n-1) + ... + p[n]*I = 0
#
# with p[0]=1 and p[n]=(-1)^n*det(A) or
#
# det(A)*I = -(-1)^n*(p[0]*A^(n-1) + p[1]*A^(n-2) + ... + p[n-1]*A).
#
# Define a new polynomial f with f[i] = -(-1)^n*p[i] for i=0..n-1. Then
#
# det(A)*I = f[0]*A^n + f[1]*A^(n-1) + ... + f[n-1]*A.
#
# Multiplying on the right by inv(A) gives
#
# det(A)*inv(A) = f[0]*A^(n-1) + f[1]*A^(n-2) + ... + f[n-1].
#
# So adj(A) = det(A)*inv(A) = f(A)
A = self
m, n = self.shape
if m != n:
raise DMNonSquareMatrixError("Matrix must be square")
if cp is None:
cp = A.charpoly()
if len(cp) % 2:
# n is even
detA = cp[-1]
f = [-cpi for cpi in cp[:-1]]
else:
# n is odd
detA = -cp[-1]
f = cp[:-1]
return f, detA
def eval_poly(self, p):
"""
Evaluate polynomial function of a matrix $p(A)$.
Examples
========
>>> from sympy import QQ
>>> from sympy.polys.matrices import DM
>>> A = DM([[QQ(1), QQ(2)], [QQ(3), QQ(4)]], QQ)
>>> p = [QQ(1), QQ(2), QQ(3)]
>>> p_A = A.eval_poly(p)
>>> p_A
DomainMatrix([[12, 14], [21, 33]], (2, 2), QQ)
>>> p_A == p[0]*A**2 + p[1]*A + p[2]*A**0
True
See Also
========
eval_poly_mul
"""
A = self
m, n = A.shape
if m != n:
raise DMNonSquareMatrixError("Matrix must be square")
if not p:
return self.zeros(self.shape, self.domain)
elif len(p) == 1:
return p[0] * self.eye(self.shape, self.domain)
# Evaluate p(A) using Horner's method:
# XXX: Use Paterson-Stockmeyer method?
I = A.eye(A.shape, A.domain)
p_A = p[0] * I
for pi in p[1:]:
p_A = A*p_A + pi*I
return p_A
def eval_poly_mul(self, p, B):
r"""
Evaluate polynomial matrix product $p(A) \times B$.
Evaluate the polynomial matrix product $p(A) \times B$ using Horner's
method without creating the matrix $p(A)$ explicitly. If $B$ is a
column matrix then this method will only use matrix-vector multiplies
and no matrix-matrix multiplies are needed.
If $B$ is square or wide or if $A$ can be represented in a simpler
domain than $B$ then it might be faster to evaluate $p(A)$ explicitly
(see :func:`eval_poly`) and then multiply with $B$.
Examples
========
>>> from sympy import QQ
>>> from sympy.polys.matrices import DM
>>> A = DM([[QQ(1), QQ(2)], [QQ(3), QQ(4)]], QQ)
>>> b = DM([[QQ(5)], [QQ(6)]], QQ)
>>> p = [QQ(1), QQ(2), QQ(3)]
>>> p_A_b = A.eval_poly_mul(p, b)
>>> p_A_b
DomainMatrix([[144], [303]], (2, 1), QQ)
>>> p_A_b == p[0]*A**2*b + p[1]*A*b + p[2]*b
True
>>> A.eval_poly_mul(p, b) == A.eval_poly(p)*b
True
See Also
========
eval_poly
solve_den_charpoly
"""
A = self
m, n = A.shape
mb, nb = B.shape
if m != n:
raise DMNonSquareMatrixError("Matrix must be square")
if mb != n:
raise DMShapeError("Matrices are not aligned")
if A.domain != B.domain:
raise DMDomainError("Matrices must have the same domain")
# Given a polynomial p(x) = p[0]*x^n + p[1]*x^(n-1) + ... + p[n-1]
# and matrices A and B we want to find
#
# p(A)*B = p[0]*A^n*B + p[1]*A^(n-1)*B + ... + p[n-1]*B
#
# Factoring out A term by term we get
#
# p(A)*B = A*(...A*(A*(A*(p[0]*B) + p[1]*B) + p[2]*B) + ...) + p[n-1]*B
#
# where each pair of brackets represents one iteration of the loop
# below starting from the innermost p[0]*B. If B is a column matrix
# then products like A*(...) are matrix-vector multiplies and products
# like p[i]*B are scalar-vector multiplies so there are no
# matrix-matrix multiplies.
if not p:
return B.zeros(B.shape, B.domain, fmt=B.rep.fmt)
p_A_B = p[0]*B
for p_i in p[1:]:
p_A_B = A*p_A_B + p_i*B
return p_A_B
def lu(self):
r"""
Returns Lower and Upper decomposition of the DomainMatrix
Returns
=======
(L, U, exchange)
L, U are Lower and Upper decomposition of the DomainMatrix,
exchange is the list of indices of rows exchanged in the
decomposition.
Raises
======
ValueError
If the domain of DomainMatrix not a Field
Examples
========
>>> from sympy import QQ
>>> from sympy.polys.matrices import DomainMatrix
>>> A = DomainMatrix([
... [QQ(1), QQ(-1)],
... [QQ(2), QQ(-2)]], (2, 2), QQ)
>>> L, U, exchange = A.lu()
>>> L
DomainMatrix([[1, 0], [2, 1]], (2, 2), QQ)
>>> U
DomainMatrix([[1, -1], [0, 0]], (2, 2), QQ)
>>> exchange
[]
See Also
========
lu_solve
"""
if not self.domain.is_Field:
raise DMNotAField('Not a field')
L, U, swaps = self.rep.lu()
return self.from_rep(L), self.from_rep(U), swaps
def lu_solve(self, rhs):
r"""
Solver for DomainMatrix x in the A*x = B
Parameters
==========
rhs : DomainMatrix B
Returns
=======
DomainMatrix
x in A*x = B
Raises
======
DMShapeError
If the DomainMatrix A and rhs have different number of rows
ValueError
If the domain of DomainMatrix A not a Field
Examples
========
>>> from sympy import QQ
>>> from sympy.polys.matrices import DomainMatrix
>>> A = DomainMatrix([
... [QQ(1), QQ(2)],
... [QQ(3), QQ(4)]], (2, 2), QQ)
>>> B = DomainMatrix([
... [QQ(1), QQ(1)],
... [QQ(0), QQ(1)]], (2, 2), QQ)
>>> A.lu_solve(B)
DomainMatrix([[-2, -1], [3/2, 1]], (2, 2), QQ)
See Also
========
lu
"""
if self.shape[0] != rhs.shape[0]:
raise DMShapeError("Shape")
if not self.domain.is_Field:
raise DMNotAField('Not a field')
sol = self.rep.lu_solve(rhs.rep)
return self.from_rep(sol)
def _solve(A, b):
# XXX: Not sure about this method or its signature. It is just created
# because it is needed by the holonomic module.
if A.shape[0] != b.shape[0]:
raise DMShapeError("Shape")
if A.domain != b.domain or not A.domain.is_Field:
raise DMNotAField('Not a field')
Aaug = A.hstack(b)
Arref, pivots = Aaug.rref()
particular = Arref.from_rep(Arref.rep.particular())
nullspace_rep, nonpivots = Arref[:,:-1].rep.nullspace()
nullspace = Arref.from_rep(nullspace_rep)
return particular, nullspace
def charpoly(self):
r"""
Characteristic polynomial of a square matrix.
Computes the characteristic polynomial in a fully expanded form using
division free arithmetic. If a factorization of the characteristic
polynomial is needed then it is more efficient to call
:meth:`charpoly_factor_list` than calling :meth:`charpoly` and then
factorizing the result.
Returns
=======
list: list of DomainElement
coefficients of the characteristic polynomial
Examples
========
>>> from sympy import ZZ
>>> from sympy.polys.matrices import DomainMatrix
>>> A = DomainMatrix([
... [ZZ(1), ZZ(2)],
... [ZZ(3), ZZ(4)]], (2, 2), ZZ)
>>> A.charpoly()
[1, -5, -2]
See Also
========
charpoly_factor_list
Compute the factorisation of the characteristic polynomial.
charpoly_factor_blocks
A partial factorisation of the characteristic polynomial that can
be computed more efficiently than either the full factorisation or
the fully expanded polynomial.
"""
M = self
K = M.domain
factors = M.charpoly_factor_blocks()
cp = [K.one]
for f, mult in factors:
for _ in range(mult):
cp = dup_mul(cp, f, K)
return cp
def charpoly_factor_list(self):
"""
Full factorization of the characteristic polynomial.
Examples
========
>>> from sympy.polys.matrices import DM
>>> from sympy import ZZ
>>> M = DM([[6, -1, 0, 0],
... [9, 12, 0, 0],
... [0, 0, 1, 2],
... [0, 0, 5, 6]], ZZ)
Compute the factorization of the characteristic polynomial:
>>> M.charpoly_factor_list()
[([1, -9], 2), ([1, -7, -4], 1)]
Use :meth:`charpoly` to get the unfactorized characteristic polynomial:
>>> M.charpoly()
[1, -25, 203, -495, -324]
The same calculations with ``Matrix``:
>>> M.to_Matrix().charpoly().as_expr()
lambda**4 - 25*lambda**3 + 203*lambda**2 - 495*lambda - 324
>>> M.to_Matrix().charpoly().as_expr().factor()
(lambda - 9)**2*(lambda**2 - 7*lambda - 4)
Returns
=======
list: list of pairs (factor, multiplicity)
A full factorization of the characteristic polynomial.
See Also
========
charpoly
Expanded form of the characteristic polynomial.
charpoly_factor_blocks
A partial factorisation of the characteristic polynomial that can
be computed more efficiently.
"""
M = self
K = M.domain
# It is more efficient to start from the partial factorization provided
# for free by M.charpoly_factor_blocks than the expanded M.charpoly.
factors = M.charpoly_factor_blocks()
factors_irreducible = []
for factor_i, mult_i in factors:
_, factors_list = dup_factor_list(factor_i, K)
for factor_j, mult_j in factors_list:
factors_irreducible.append((factor_j, mult_i * mult_j))
return _collect_factors(factors_irreducible)
def charpoly_factor_blocks(self):
"""
Partial factorisation of the characteristic polynomial.
This factorisation arises from a block structure of the matrix (if any)
and so the factors are not guaranteed to be irreducible. The
:meth:`charpoly_factor_blocks` method is the most efficient way to get
a representation of the characteristic polynomial but the result is
neither fully expanded nor fully factored.
Examples
========
>>> from sympy.polys.matrices import DM
>>> from sympy import ZZ
>>> M = DM([[6, -1, 0, 0],
... [9, 12, 0, 0],
... [0, 0, 1, 2],
... [0, 0, 5, 6]], ZZ)
This computes a partial factorization using only the block structure of
the matrix to reveal factors:
>>> M.charpoly_factor_blocks()
[([1, -18, 81], 1), ([1, -7, -4], 1)]
These factors correspond to the two diagonal blocks in the matrix:
>>> DM([[6, -1], [9, 12]], ZZ).charpoly()
[1, -18, 81]
>>> DM([[1, 2], [5, 6]], ZZ).charpoly()
[1, -7, -4]
Use :meth:`charpoly_factor_list` to get a complete factorization into
irreducibles:
>>> M.charpoly_factor_list()
[([1, -9], 2), ([1, -7, -4], 1)]
Use :meth:`charpoly` to get the expanded characteristic polynomial:
>>> M.charpoly()
[1, -25, 203, -495, -324]
Returns
=======
list: list of pairs (factor, multiplicity)
A partial factorization of the characteristic polynomial.
See Also
========
charpoly
Compute the fully expanded characteristic polynomial.
charpoly_factor_list
Compute a full factorization of the characteristic polynomial.
"""
M = self
if not M.is_square:
raise DMNonSquareMatrixError("not square")
# scc returns indices that permute the matrix into block triangular
# form and can extract the diagonal blocks. M.charpoly() is equal to
# the product of the diagonal block charpolys.
components = M.scc()
block_factors = []
for indices in components:
block = M.extract(indices, indices)
block_factors.append((block.charpoly_base(), 1))
return _collect_factors(block_factors)
def charpoly_base(self):
"""
Base case for :meth:`charpoly_factor_blocks` after block decomposition.
This method is used internally by :meth:`charpoly_factor_blocks` as the
base case for computing the characteristic polynomial of a block. It is
more efficient to call :meth:`charpoly_factor_blocks`, :meth:`charpoly`
or :meth:`charpoly_factor_list` rather than call this method directly.
This will use either the dense or the sparse implementation depending
on the sparsity of the matrix and will clear denominators if possible
before calling :meth:`charpoly_berk` to compute the characteristic
polynomial using the Berkowitz algorithm.
See Also
========
charpoly
charpoly_factor_list
charpoly_factor_blocks
charpoly_berk
"""
M = self
K = M.domain
# It seems that the sparse implementation is always faster for random
# matrices with fewer than 50% non-zero entries. This does not seem to
# depend on domain, size, bit count etc.
density = self.nnz() / self.shape[0]**2
if density < 0.5:
M = M.to_sparse()
else:
M = M.to_dense()
# Clearing denominators is always more efficient if it can be done.
# Doing it here after block decomposition is good because each block
# might have a smaller denominator. However it might be better for
# charpoly and charpoly_factor_list to restore the denominators only at
# the very end so that they can call e.g. dup_factor_list before
# restoring the denominators. The methods would need to be changed to
# return (poly, denom) pairs to make that work though.
clear_denoms = K.is_Field and K.has_assoc_Ring
if clear_denoms:
clear_denoms = True
d, M = M.clear_denoms(convert=True)
d = d.element
K_f = K
K_r = M.domain
# Berkowitz algorithm over K_r.
cp = M.charpoly_berk()
if clear_denoms:
# Restore the denominator in the charpoly over K_f.
#
# If M = N/d then p_M(x) = p_N(x*d)/d^n.
cp = dup_convert(cp, K_r, K_f)
p = [K_f.one, K_f.zero]
q = [K_f.one/d]
cp = dup_transform(cp, p, q, K_f)
return cp
def charpoly_berk(self):
"""Compute the characteristic polynomial using the Berkowitz algorithm.
This method directly calls the underlying implementation of the
Berkowitz algorithm (:meth:`sympy.polys.matrices.dense.ddm_berk` or
:meth:`sympy.polys.matrices.sdm.sdm_berk`).
This is used by :meth:`charpoly` and other methods as the base case for
for computing the characteristic polynomial. However those methods will
apply other optimizations such as block decomposition, clearing
denominators and converting between dense and sparse representations
before calling this method. It is more efficient to call those methods
instead of this one but this method is provided for direct access to
the Berkowitz algorithm.
Examples
========
>>> from sympy.polys.matrices import DM
>>> from sympy import QQ
>>> M = DM([[6, -1, 0, 0],
... [9, 12, 0, 0],
... [0, 0, 1, 2],
... [0, 0, 5, 6]], QQ)
>>> M.charpoly_berk()
[1, -25, 203, -495, -324]
See Also
========
charpoly
charpoly_base
charpoly_factor_list
charpoly_factor_blocks
sympy.polys.matrices.dense.ddm_berk
sympy.polys.matrices.sdm.sdm_berk
"""
return self.rep.charpoly()
@classmethod
def eye(cls, shape, domain):
r"""
Return identity matrix of size n or shape (m, n).
Examples
========
>>> from sympy.polys.matrices import DomainMatrix
>>> from sympy import QQ
>>> DomainMatrix.eye(3, QQ)
DomainMatrix({0: {0: 1}, 1: {1: 1}, 2: {2: 1}}, (3, 3), QQ)
"""
if isinstance(shape, int):
shape = (shape, shape)
return cls.from_rep(SDM.eye(shape, domain))
@classmethod
def diag(cls, diagonal, domain, shape=None):
r"""
Return diagonal matrix with entries from ``diagonal``.
Examples
========
>>> from sympy.polys.matrices import DomainMatrix
>>> from sympy import ZZ
>>> DomainMatrix.diag([ZZ(5), ZZ(6)], ZZ)
DomainMatrix({0: {0: 5}, 1: {1: 6}}, (2, 2), ZZ)
"""
if shape is None:
N = len(diagonal)
shape = (N, N)
return cls.from_rep(SDM.diag(diagonal, domain, shape))
@classmethod
def zeros(cls, shape, domain, *, fmt='sparse'):
"""Returns a zero DomainMatrix of size shape, belonging to the specified domain
Examples
========
>>> from sympy.polys.matrices import DomainMatrix
>>> from sympy import QQ
>>> DomainMatrix.zeros((2, 3), QQ)
DomainMatrix({}, (2, 3), QQ)
"""
return cls.from_rep(SDM.zeros(shape, domain))
@classmethod
def ones(cls, shape, domain):
"""Returns a DomainMatrix of 1s, of size shape, belonging to the specified domain
Examples
========
>>> from sympy.polys.matrices import DomainMatrix
>>> from sympy import QQ
>>> DomainMatrix.ones((2,3), QQ)
DomainMatrix([[1, 1, 1], [1, 1, 1]], (2, 3), QQ)
"""
return cls.from_rep(DDM.ones(shape, domain).to_dfm_or_ddm())
| 1 |
07488abfd841f0c5c54a32e5229e929f7569cca6
|
Python
|
def __eq__(A, B):
r"""
Checks for two DomainMatrix matrices to be equal or not
Parameters
==========
A, B: DomainMatrix
to check equality
Returns
=======
Boolean
True for equal, else False
Raises
======
NotImplementedError
If B is not a DomainMatrix
Examples
========
>>> from sympy import ZZ
>>> from sympy.polys.matrices import DomainMatrix
>>> A = DomainMatrix([
... [ZZ(1), ZZ(2)],
... [ZZ(3), ZZ(4)]], (2, 2), ZZ)
>>> B = DomainMatrix([
... [ZZ(1), ZZ(1)],
... [ZZ(0), ZZ(1)]], (2, 2), ZZ)
>>> A.__eq__(A)
True
>>> A.__eq__(B)
False
"""
if not isinstance(A, type(B)):
return NotImplemented
return A.domain == B.domain and A.rep == B.rep
def unify_eq(A, B):
if A.shape != B.shape:
return False
if A.domain != B.domain:
A, B = A.unify(B)
return A == B
def lll(A, delta=QQ(3, 4)):
"""
Performs the Lenstra–Lenstra–Lovász (LLL) basis reduction algorithm.
See [1]_ and [2]_.
Parameters
==========
delta : QQ, optional
The Lovász parameter. Must be in the interval (0.25, 1), with larger
values producing a more reduced basis. The default is 0.75 for
historical reasons.
Returns
=======
The reduced basis as a DomainMatrix over ZZ.
Throws
======
DMValueError: if delta is not in the range (0.25, 1)
DMShapeError: if the matrix is not of shape (m, n) with m <= n
DMDomainError: if the matrix domain is not ZZ
DMRankError: if the matrix contains linearly dependent rows
Examples
========
>>> from sympy.polys.domains import ZZ, QQ
>>> from sympy.polys.matrices import DM
>>> x = DM([[1, 0, 0, 0, -20160],
... [0, 1, 0, 0, 33768],
... [0, 0, 1, 0, 39578],
... [0, 0, 0, 1, 47757]], ZZ)
>>> y = DM([[10, -3, -2, 8, -4],
... [3, -9, 8, 1, -11],
... [-3, 13, -9, -3, -9],
... [-12, -7, -11, 9, -1]], ZZ)
>>> assert x.lll(delta=QQ(5, 6)) == y
Notes
=====
The implementation is derived from the Maple code given in Figures 4.3
and 4.4 of [3]_ (pp.68-69). It uses the efficient method of only calculating
state updates as they are required.
See also
========
lll_transform
References
==========
.. [1] https://en.wikipedia.org/wiki/Lenstra%E2%80%93Lenstra%E2%80%93Lov%C3%A1sz_lattice_basis_reduction_algorithm
.. [2] https://web.archive.org/web/20221029115428/https://web.cs.elte.hu/~lovasz/scans/lll.pdf
.. [3] Murray R. Bremner, "Lattice Basis Reduction: An Introduction to the LLL Algorithm and Its Applications"
"""
return DomainMatrix.from_rep(A.rep.lll(delta=delta))
def lll_transform(A, delta=QQ(3, 4)):
"""
Performs the Lenstra–Lenstra–Lovász (LLL) basis reduction algorithm
and returns the reduced basis and transformation matrix.
Explanation
===========
Parameters, algorithm and basis are the same as for :meth:`lll` except that
the return value is a tuple `(B, T)` with `B` the reduced basis and
`T` a transformation matrix. The original basis `A` is transformed to
`B` with `T*A == B`. If only `B` is needed then :meth:`lll` should be
used as it is a little faster.
Examples
========
>>> from sympy.polys.domains import ZZ, QQ
>>> from sympy.polys.matrices import DM
>>> X = DM([[1, 0, 0, 0, -20160],
... [0, 1, 0, 0, 33768],
... [0, 0, 1, 0, 39578],
... [0, 0, 0, 1, 47757]], ZZ)
>>> B, T = X.lll_transform(delta=QQ(5, 6))
>>> T * X == B
True
See also
========
lll
"""
reduced, transform = A.rep.lll_transform(delta=delta)
return DomainMatrix.from_rep(reduced), DomainMatrix.from_rep(transform)
def _collect_factors(factors_list):
"""
Collect repeating factors and sort.
>>> from sympy.polys.matrices.domainmatrix import _collect_factors
>>> _collect_factors([([1, 2], 2), ([1, 4], 3), ([1, 2], 5)])
[([1, 4], 3), ([1, 2], 7)]
"""
factors = Counter()
for factor, exponent in factors_list:
factors[tuple(factor)] += exponent
factors_list = [(list(f), e) for f, e in factors.items()]
return _sort_factors(factors_list)
| 2 |
118a4483ca1cf645d66de85cad611d4b936981a8
|
Python
|
# SOFTVEROVY NASTROJ PLGP
# Furtkevicova Ludmila, cast diplomovej prace
# script: okno s piatimi zalozkami, funkcie, tlacidla, modely
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
import os
import tkMessageBox
import Tkinter
import ttk
from Tkinter import *
from ttk import *
import sys
import subprocess
import ScrolledText
import tkFileDialog
from tkFileDialog import askdirectory, asksaveasfile
# trieda tykajuca sa programu GRASS GIS
class GRASS:
def __init__(self):
# spustenie GRASS GIS
grass7bin_win = r'C:\Program Files (x86)\GRASS GIS 7.0.0\grass70.bat'
# definovanie GRASS DATABASE (GRASS GIS database) directory
# cestaL z GUI
self.gisdb = "C:\\DP_LF"
# SOFTVER
grass7bin = grass7bin_win
# GRASS 7 a GISBASE
startcmd = [grass7bin, '--config', 'path']
p = subprocess.Popen(startcmd, shell=False,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = p.communicate()
if p.returncode != 0:
print >>sys.stderr, "ERROR: Cannot find GRASS GIS 7 start script (%s)" % startcmd
sys.exit(-1)
self.gisbase = out.strip('\n\r')
# premenna GISBASE a PATH
os.environ['GISBASE'] = self.gisbase
os.environ['PATH'] += os.pathsep + os.path.join(self.gisbase, 'extrabin')
# definicia GRASS-Python environment
gpydir = os.path.join(self.gisbase, "etc", "python")
sys.path.append(gpydir)
os.environ['GISDBASE'] = self.gisdb
# trieda tykajuca sa presmerovania (obsah okna do konkretneho suboru)
class Presmerovanie(object):
def __init__(self, text_ctrl):
self.output = text_ctrl
def write(self, string):
self.output.insert(Tkinter.END, string)
# trieda tykajuca sa pouzivatelskeho rozhrania
class GUI(Tkinter.Frame):
Gobj = GRASS()
cestaV = ""
cestaI = ""
cestaL = ""
recl1 = "recl1"
cesta = "C:\\DP_LF\\vypocet\\"
# GUI
def __init__(self,gui):
Tkinter.Frame.__init__(self, gui)
self.gui = gui
self.gui.title(u"PLGP (Ludmila Furtkevicova, 2015) ")
note = Notebook(self.gui)
# pat zaloziek
tab1 = Tkinter.Frame(note)
tab2 = Tkinter.Frame(note)
tab3 = Tkinter.Frame(note)
tab4 = Tkinter.Frame(note)
tab5 = Tkinter.Frame(note)
# nastavenie stylu v zalozkach
ttk.Style().configure('TLabelframe.Label', foreground='forest green',font="Verdana 8 bold")
ttk.Style().configure('TButton', foreground='cadet blue',font="Helvetica 8 bold")
ttk.Style().configure("TNotebook.Tab", foreground="dim gray",font="Helvetica 8 bold")
# nastavenie popisov zaloziek
note.add(tab1, text = " 1. Settings ")
note.add(tab2, text = " 2. Parametric maps ")
note.add(tab3, text = " 3. Weight calculation ")
note.add(tab4, text = " 4. Prediction ")
note.add(tab5, text = " 5. Validation ")
note.pack()
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~ PRVA ZALOZKA ~~~~~~~~~~~~~~~~~~~~~~~~~
# nastavenie ciest a tvorba potrebnych suborov pre dalsie zalozky
ttk.Label(tab1, text=u" \n\nHandy software tool for geologists", anchor="s",\
foreground="forest green", font = "Verdana 9 italic").grid(in_=tab1,column=0, row=0,\
columnspan=7, sticky="S",padx=70, pady=17)
ttk.Label(tab1, text=u"\nPredict landslide with GRASS GIS and Python", anchor="n",\
foreground="dark green", font = "Verdana 13 bold").grid(in_=tab1,column=0, row=0,\
columnspan=7, sticky="N",padx=30, pady=1)
# prve podokno v ramci prvej zalozky (vstupne data)
self.one = ttk.Labelframe(tab1, text = " 1. Input data: ")
self.one.grid(row=1, column=0, columnspan=2, sticky='S', padx=5, pady=5, ipadx=4,\
ipady=1)
L21 = ttk.Label(self.one, text=" Path to folder with vector data: ")
L21.grid(row=2, column=0, sticky='W', pady=5)
self.E21 = ttk.Entry(self.one, width=40)
self.E21.grid(row=2, column=1, columnspan=2, sticky="WE", pady=5, padx = 5)
B21 = ttk.Button(self.one, text=" Browse ...",command = self.openV)
B21.grid(row=2, column=3, sticky='W',pady=5, padx = 2)
# druhe podokno vramci prvej zalozky (nazov lokacie, epsg kod, ...)
self.two = ttk.Labelframe(tab1, text = " 2. New LOCATION and new MAPSETs:\n ")
self.two.grid(row=3, column=0, columnspan=2, sticky='S', padx=5, pady=5, ipadx=4,\
ipady=5)
L10 = ttk.Label(self.two, text=" LOCATION name: ")
L10.grid(row=4, column=0, sticky='W', padx=5, pady = 5)
self.E10 = ttk.Entry(self.two, width=30)
self.E10.grid(row=4, column=1, columnspan=2, sticky="WE", pady=2)
self.E10.insert(1,"Mapy")
self.nameL = self.E10.get()
L11 = ttk.Label(self.two, text=" EPSG code:")
L11.grid(row=5, column=0, sticky='W', padx=5, pady=2)
self.E11 = ttk.Entry(self.two, width=7)
self.E11.grid(row=5, column=1, columnspan=2, sticky="WE", pady=2)
self.E11.insert(1,"2065")
self.epsg = self.E11.get()
L12 = ttk.Label(self.two, text=" Path for new LOCATION:")
L12.grid(row=6, column=0, sticky='W', padx=5, pady=2)
self.E12 = ttk.Entry(self.two, width=10)
self.E12.grid(row=6, column=1, columnspan=2, sticky="WE", pady=2)
B12 = ttk.Button(self.two, text=" Browse ...",command = self.openL)
B12.grid(row=6, column=3, sticky='W', padx=5, pady=2)
L13 = ttk.Label(self.two, text=" Name of MAPSET for input data: ")
L13.grid(row=7, column=0, sticky='W', padx=5, pady=2)
self.E13 = ttk.Entry(self.two, width=10)
self.E13.grid(row=7, column=1, columnspan=2, sticky="WE", pady=2)
self.E13.insert(1,"VSTUP")
self.nameMV = self.E13.get()
L14 = ttk.Label(self.two, text=" Name of MAPSET for intermediate data: ")
L14.grid(row=8, column=0, sticky='W', padx=5, pady=2)
self.E14 = ttk.Entry(self.two, width=10)
self.E14.grid(row=8, column=1, columnspan=2, sticky="WE", pady=2)
self.E14.insert(1,"PM")
self.nameMM = self.E14.get()
L15 = ttk.Label(self.two, text=" Name of MAPSET for results: ")
L15.grid(row=9, column=0, sticky='W', padx=5, pady=2)
self.E15 = ttk.Entry(self.two, width=10)
self.E15.grid(row=9, column=1, columnspan=2, sticky="WE", pady=2)
self.E15.insert(1,"PREDIKCIA")
self.nameM = self.E15.get()
# tretie podokno vramci prvej zalozky (vysledky)
self.three = ttk.Labelframe(tab1, text = " 3. Reports, reclassification rules, information about calculation:\n ")
self.three.grid(row=10, column=0, columnspan=2, sticky='S', padx=5, pady=1, ipadx=5,\
ipady=5)
L31 = ttk.Label(self.three, text=" Path to folder for results: ")
L31.grid(row=11, column=0, sticky='WE', padx=5, pady=2)
self.E31 = ttk.Entry(self.three, width=39)
self.E31.grid(row=11, column=1, columnspan=2, sticky="WE", pady=2)
B31 = ttk.Button(self.three, text="Browse ...",command = self.openI)
B31.grid(row=11, column=3, sticky='W', padx=5, pady=2)
# tlacidlo REFRESH na zmazanie predvolene nastavenych vstupov
ttk.Button(tab1, text="REFRESH",command=self.refreshALL).grid(row=13, column=0,\
sticky="WE", padx=5, pady=5,columnspan=1, rowspan=1)
# tlacidlo na ukoncenie prace
ttk.Button(tab1, text="QUIT",command=self.gEND).grid(row=13, column=1, \
sticky='WE', padx=5, pady=5,columnspan=1, rowspan=1)
# tlacidlo na ziskanie obsahu vyplnenych poloziek a tvorbu potrebnych suborov
ttk.Button(tab1, text="NEXT", command=lambda: self.valueGET(self.E10.get(),\
self.E11.get(), self.E13.get(), self.E14.get(),\
self.E15.get())).grid(row=14, column=0, \
sticky='WE', padx=5, columnspan=2, rowspan=1,pady=5)
# tlacidlo ako alternativa HELP
ttk.Button(tab1, text='INFO',command=tkMessageBox.showinfo).grid(row=12, column=0,\
sticky="WE", padx=5, pady=5,columnspan=1, rowspan=1)
# tlacidlo ktorym sa da spustit prostredie GRASS GIS
ttk.Button(tab1, text='RUN GRASS GIS',command=self.RG).grid(row=12, column=1,\
sticky="WE", padx=5, pady=5,columnspan=1, rowspan=1)
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ DRUHA ZALOZKA ~~~~~~~~~~~~~~~~~~~~~~
# zobrazenie obsahu mapsetu PERMANENT, tvorba parametrickych map
# zobrazenie informacii o mapach a ich prvotna reklasifikacia
ttk.Label(tab2, text=u" \n\nHandy software tool for geologists", anchor="s",\
foreground="forest green", font = "Verdana 9 italic").grid(in_=tab2,column=0, row=0,\
columnspan=7, sticky="S",padx=70, pady=17)
ttk.Label(tab2, text=u"\nPredict landslide with GRASS GIS and Python", anchor="n",\
foreground="dark green", font = "Verdana 13 bold").grid(in_=tab2,column=0, row=0,\
columnspan=7, sticky="N",padx=30, pady=1)
# prve podokno vramci druhej zalozky na zobrazenie obsahu map v mapsete
self.four = ttk.Labelframe(tab2, text = " 4. MAPSET content: " )
self.four.grid(row=1, column=0, columnspan=2, sticky='E', padx=10, pady=5)
self.txf1 = ScrolledText.ScrolledText(self.four, height = 5, width = 61)
self.txf1.grid(row=2, column=0,columnspan=2, rowspan=3, sticky='NS', padx=5, pady=5)
ttk.Button(tab2, text='VIEW CONTENT',command = self.wrZM).grid(row=2,\
column=1, sticky='E', padx=10, pady=5,columnspan=1, rowspan=1)
# druhe podokno vramci druhej zalozky na zobrazenie info o param. mapach
self.five = ttk.Labelframe(tab2, text = " 5. Information in TXT file: " )
self.five.grid(row=3, column=0, columnspan=2, sticky='E', padx=10, pady=5)
self.txf3 = ScrolledText.ScrolledText(self.five, height = 9, width = 61)
self.txf3.grid(row=4, column=0,columnspan=2, rowspan=3, sticky='NS', padx=5, pady=5)
ttk.Button(tab2, text='INFO',command=self.showexample).grid(row=7, column=0,\
sticky="WE", padx=5, pady=5,columnspan=1, rowspan=1)
ttk.Button(tab2, text='RUN GRASS GIS',command=self.RG).grid(row=8, column=0,\
sticky="WE", padx=5, pady=5,columnspan=1, rowspan=1)
ttk.Button(tab2, text='NEXT', command = self.II).grid(row=9, column=1,sticky='WE', padx=5,\
pady=5,columnspan=1, rowspan=1)
# tlacidlo, ktorym sa ulozi obsah v okne do konkretneho suboru
ttk.Button(tab2, text='SAVE AS',command=self.edit_save).grid(row=8, column=1,sticky='WE', padx=5,\
pady=5,columnspan=1, rowspan=1)
ttk.Button(tab2, text="QUIT", command=self.gEND).grid(row=9, column=0,sticky='WE',\
padx=5, columnspan=1, rowspan=10,pady=5)
# tlacidlo, ktorym sa nacita obsah konkretneho txt suboru do okna PLGP
ttk.Button(tab2, text='LOAD TXT', command = self.open_file).grid(row=7,\
column=1, sticky='WE', padx=5, pady=5,columnspan=1, rowspan=1)
# ~~~~~~~~~~~~~~~~~~~~~~~~ TRETIA ZALOZKA ~~~~~~~~~~~~~~~~~~~~~~~
# zobrazenie vypocitanych vah a dalsich informacii, zobrazenie rovnice Y
ttk.Label(tab3, text=u" \n\nHandy software tool for geologists", anchor="s",\
foreground="forest green", font = "Verdana 9 italic").grid(in_=tab3,column=0, row=0,\
columnspan=7, sticky="S",padx=70, pady=17)
ttk.Label(tab3, text=u"\nPredict landslide with GRASS GIS and Python", anchor="n",\
foreground="dark green", font = "Verdana 13 bold").grid(in_=tab3,column=0, row=0,\
columnspan=7, sticky="N",padx=30, pady=1)
self.six = ttk.Labelframe(tab3, text = " 6. Information about calculated weights of all factors : " )
self.six.grid(row=1, column=0, columnspan=2, sticky='E', padx=10, pady=5)
self.txf2 = ScrolledText.ScrolledText(self.six, height = 12, width = 61)
self.txf2.grid(row=2, column=0,columnspan=2, rowspan=3, sticky='NS', padx=5, pady=5)
self.seven = ttk.Labelframe(tab3, text = " 7. The equation to calculate value Y : " )
self.seven.grid(row=3, column=0, columnspan=2, sticky='E', padx=10, pady=5)
self.txf4 = ScrolledText.ScrolledText(self.seven, height = 3.5, width = 61)
self.txf4.grid(row=4, column=0,columnspan=2, rowspan=3, sticky='NS', padx=5, pady=5)
ttk.Button(tab3, text='INFO',command=tkMessageBox.showinfo).grid(row=7, column=0,\
sticky="WE", padx=5, pady=5,columnspan=1, rowspan=1)
ttk.Button(tab3, text='RUN GRASS GIS',command=self.RG).grid(row=8, column=0,\
sticky="WE", padx=5, pady=5,columnspan=1, rowspan=1)
ttk.Button(tab3, text='NEXT', command = self.III).grid(row=9, column=1,\
sticky='WE', padx=5,pady=5,columnspan=1, rowspan=1)
# zobrazenie rovnice Y
ttk.Button(tab3, text='EQUATION',command = self.WrRovnica).grid(row=8, column=1,\
sticky='WE', padx=5,pady=5,columnspan=1, rowspan=1)
ttk.Button(tab3, text="QUIT", command=self.gEND).grid(row=9, column=0,\
sticky='WE',padx=5, columnspan=1, rowspan=1,pady=5)
# vypocet vah
ttk.Button(tab3, text='CALCULATE WEIGHTS', command=self.CalculateFactors).grid(row=7,\
column=1, sticky='WE', padx=5, pady=5,columnspan=1, rowspan=1)
# ~~~~~~~~~~~~~~~~~~~~~~~~~ STVRTA ZALOZKA ~~~~~~~~~~~~~~~~~~~~~~~~~
# zobrazenie MIN a MAX hodnoty v bunke rasta Y
# reklasifikacia spojiteho intervalu
ttk.Label(tab4, text=u" \n\nHandy software tool for geologists", anchor="s",\
foreground="forest green", font = "Verdana 9 italic").grid(in_=tab4,column=0, row=0,\
columnspan=7, sticky="S",padx=70, pady=17)
ttk.Label(tab4, text=u"\nPredict landslide with GRASS GIS and Python", anchor="n",\
foreground="dark green", font = "Verdana 13 bold").grid(in_=tab4,column=0, row=0,\
columnspan=7, sticky="N",padx=30, pady=1)
self.eight = ttk.Labelframe(tab4, text = " 8. The result of equation: " )
self.eight.grid(row=1, column=0, columnspan=2, sticky='E', padx=5, pady=5)
self.txf5 = ScrolledText.ScrolledText(self.eight, height = 5, width = 62)
self.txf5.grid(row=2, column=0,columnspan=2, rowspan=3, sticky='NS', padx=5, pady=5)
self.eightt = ttk.Labelframe(tab4, text = " is raster map with MIN and MAX value:" )
self.eightt.grid(row=3, column=0, columnspan=2, sticky='E', padx=5, pady=5)
self.txf6 = ScrolledText.ScrolledText(self.eightt, height = 3, width = 62)
self.txf6.grid(row=4, column=0,columnspan=2, rowspan=3, sticky='NS', padx=5, pady=5)
ttk.Button(tab4, text='INFO',command=tkMessageBox.showinfo).grid(row=4,\
column=0,sticky="WE", padx=5, pady=5,columnspan=1, rowspan=1)
ttk.Button(tab4, text='MIN MAX',command = self.open_filey).grid(row=4,\
column=1, sticky='WE', padx=5, pady=5,columnspan=1, rowspan=1)
self.nine = ttk.Labelframe(tab4, text = " 9. Reclassification rules for result map: " )
self.nine.grid(row=5, column=0, columnspan=2, sticky='E', padx=5, pady=5)
self.txf7 = ScrolledText.ScrolledText(self.nine, height = 5.3, width = 62)
self.txf7.grid(row=6, column=0,columnspan=2, rowspan=3, sticky='NS', padx=5, pady=5)
ttk.Button(tab4, text='SAVE AS',command=self.edit_savey).grid(row=6, column=1,\
sticky="WE", padx=5, pady=5,columnspan=1, rowspan=1)
ttk.Button(tab4, text='NEXT', command = self.IV).grid(row=7,\
column=1,sticky='WE',padx=5, columnspan=1, rowspan=1,pady=5)
ttk.Button(tab4,text='RUN GRASS GIS',command=self.RG ).grid(row=6, column=0,sticky='WE',\
padx=5, pady = 5, columnspan=1, rowspan=1)
ttk.Button(tab4, text="QUIT", command=self.gEND).grid(row=7, column=0,sticky='WE',\
padx=5, columnspan=1, rowspan=10,pady=5)
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~ PIATA ZALOZKA ~~~~~~~~~~~~~~~~~~~~~~~~
# verifikacia vysledkov
# COV1, COV2 a COV3 sa tykaju cutoffvalues, co je hranica, ktora rozdeli
# interval Y na dve kategorie: stabilne a nestabilne oblasti
# v diplomovej praci je len jedna hranica (COV1,2,3 su rovnake),
# preto je ROC hranata: plot.png
ttk.Label(tab5, text=u" \n\nHandy software tool for geologists", anchor="s",\
foreground="forest green", font = "Verdana 9 italic").grid(in_=tab5,column=0, row=0,\
columnspan=7, sticky="S",padx=70, pady=17)
ttk.Label(tab5, text=u"\nPredict landslide with GRASS GIS and Python", anchor="n",\
foreground="dark green", font = "Verdana 13 bold").grid(in_=tab5,column=0, row=0,\
columnspan=7, sticky="N",padx=30, pady=1)
self.ten = ttk.Labelframe(tab5, text = " 10. Validation")
self.ten.grid(row=1, column=0, columnspan=2, sticky='E', padx=10, pady=5)
# zobrazenie intervalov, reklasifikacne pravidla pre rozdelenie vysledku
# na kategorie: stabilne a nestabilne oblasti
self.tenL = ttk.Label(self.ten,text=" Intervals according to set cutoff value:",foreground="cadet blue")
self.tenL.grid(row=2, column = 0, columnspan=2, sticky='W', padx=1, pady=1)
self.txf8 = ScrolledText.ScrolledText(self.ten, height = 8, width = 30)
self.txf8.grid(row=3, column=0,columnspan=2, rowspan=1, sticky='NS', padx=5, pady=5)
self.tenL = ttk.Label(self.ten,text=" Receiver operating characteristic :",foreground="cadet blue")
self.tenL.grid(row=2, column = 2, columnspan=2, sticky='W', padx=1, pady=5)
self.txf9 = ScrolledText.ScrolledText(self.ten, height = 17, width = 27)
self.txf9.grid(row=3, column=2,columnspan=2, rowspan=3, sticky='NS', padx=5, pady=5)
# zobrazenie plochkategorii: stabilne a nestabilne oblasti
self.tenL = ttk.Label(self.ten,text=" Area according to set cutoff value:",foreground="cadet blue")
self.tenL.grid(row=4, column = 0, columnspan=2, sticky='W', padx=1, pady=5)
self.txf10 = ScrolledText.ScrolledText(self.ten, height = 6, width = 30)
self.txf10.grid(row=5, column=0,columnspan=2, rowspan=1, sticky='NS', padx=5, pady=5)
# zobrazenie hodnot pre vypocet plochy pod ROC krivkou
ttk.Button(tab5, text="SHOW VALUES ", command = self.valid).grid(row=7,\
column=0,sticky="WE", padx=5, pady=5,columnspan=1, rowspan=1)
ttk.Button(tab5, text='RUN GRASS GIS',command=self.RG).grid(row=8,\
column=0,sticky="WE",padx=5, pady=5,columnspan=1, rowspan=1)
# zobrazenie orientacneho vysledku: bez legendy, existujucich zosuvov, ...
ttk.Button(tab5, text="SHOW MAP",command = self.showimg).grid(row=8, column=1,sticky='WE',\
padx=5, pady=5,columnspan=1, rowspan=1)
# zobrazenie ROC krivky
ttk.Button(tab5, text="SHOW ROC", command = self.showROC).grid(row=7, column=1,sticky='WE',\
padx=5,pady=5,columnspan=1, rowspan=1)
ttk.Button(tab5, text="QUIT", command=self.gEND).grid(row=9,\
column=0,sticky='WE',\
padx=5, columnspan=2, rowspan=1,pady=5)
# funkcia na zobrazenie prikladu ako maju vyzerat reklasifikacne pravidla
# pre pouzitie modulu r.recode na reklasifikaciu FLOAT map
def showexample(self):
tkMessageBox.showinfo("recl_file", "\nText file for reclassification:\n\n\
MIN : ? : ?\n ? : ? : ?\n ? : ? : ?\n . . . \n \n ? : ? : ?\n ? : MAX : ? ")
# funkcie na zobrazenie okna o pokracovani dalsou zalozkou
def II(self):
tkMessageBox.showinfo("GO NEXT"," Continue with third tab ... ")
def III(self):
tkMessageBox.showinfo("GO NEXT"," Continue with fourth tab ... ")
def IV(self):
tkMessageBox.showinfo("GO NEXT"," Continue with fifth tab ... ")
# funkcia na spustenie GRASS GIS
def RG(self):
try:
os.startfile(r'C:\Program Files (x86)\GRASS GIS 7.0.0\grass70.bat')
except:
tkMessageBox.showwarning(""," Cannot run GRASS GIS. ")
# funkcia na zistenie PATH k hlavnemu priecnku
def openL(self):
self.E12.delete(0,"end")
#DEFAULT CESTA
pr = askdirectory(initialdir="C:\\DP_LF")
self.cestaL = os.path.abspath(pr)
self.E12.insert(0, self.cestaL)
self.cestaL = self.cestaL.encode("ascii","ignore")
return self.cestaL
# funkcia na ziskanie PATH, kde su ulozene vstupne data
def openV(self):
self.E21.delete(0,"end")
#DEFAULT CESTA
priecinok = askdirectory(initialdir="C:\\DP_LF\\data")
self.cestaV = os.path.abspath(priecinok)
self.E21.insert(0, self.cestaV)
self.cestaV = self.cestaV.encode("ascii","ignore")
return self.cestaV
# funkcia na ziskanie PATH, kde budu ulozene INFO o vypocte
def openI(self):
self.E31.delete(0,"end")
#DEFAULT CESTA
priecinok = askdirectory(initialdir="C:\\DP_LF\\vypocet")
self.cestaI = os.path.abspath(priecinok)
self.E31.insert(0, self.cestaI)
self.cestaI = self.cestaI.encode("ascii","ignore")
return self.cestaI
# funkcia na vykonanie akcii po stlaceni POKRACOVAT v prvej zalozke
# precitanie vyplnenych policok v prvej zalozke
def valueGET(self,a,b,c,d,e):
self.createL()
self.nameL = str(a)
self.epsg = str(b)
self.nameMV = str(c)
self.nameMM = str(d)
self.nameM = str(e)
try:
self.epsg=int(self.epsg)
except:
tkMessageBox.showerror( ""," EPSG code must be numeric ! " )
self.gui.destroy()
self.epsg=str(self.epsg)
if ((self.nameL != "") and (self.epsg != "") and (self.nameMV != "")\
and (self.nameMM != "") and (self.nameM != "") and (self.cestaL != "")\
and (self.cestaV != "") and (self.cestaI != "")):
if tkMessageBox.askquestion("Settings", " New LOCATION, new MAPSETs and other\n\
necessary folders and *.txt files will be created.\n\
All existing files with the same name will be \n\
deleted.\n\n Do you really want to continue?")=="yes":
# vytvorenie novych foldrov
nf_info = self.cestaI+"\\info"
if not os.path.isdir(nf_info):
os.makedirs(nf_info)
nf_recl1 = self.cestaI+"\\recl1" #robim new folder
if not os.path.isdir(nf_recl1):
os.makedirs(nf_recl1)
nf_report = self.cestaI+"\\report" #robim new folder
if not os.path.isdir(nf_report):
os.makedirs(nf_report)
nf_recl2 = self.cestaI+"\\recl2" #robim new folder
if not os.path.isdir(nf_recl2):
os.makedirs(nf_recl2)
# vytvorenie txt suborov na prvotnu reklasifikaciu
r1_G = nf_recl1+"\\recl1_G.txt"
open(r1_G, 'w')
r1_DMR = nf_recl1+"\\recl1_DMR.txt"
open(r1_DMR, 'w')
r1_S = nf_recl1+"\\recl1_S.txt"
open(r1_S, 'w')
r1_E = nf_recl1+"\\recl1_E.txt"
open(r1_E, 'w')
r1_DS = nf_recl1+"\\recl1_DS.txt"
open(r1_DS, 'w')
r1_M = nf_recl1+"\\recl1_M.txt"
open(r1_M, 'w')
r1_K = nf_recl1+"\\recl1_K.txt"
open(r1_K, 'w')
r1_VK = nf_recl1+"\\recl1_VK.txt"
open(r1_VK, 'w')
# vytvorenie dalsich potrebnych txt suborov
open(self.cesta + "recl_y.txt","wb")
open(self.cesta + "recl_COV1.txt","wb")
open(self.cesta + "recl_COV2.txt","wb")
open(self.cesta + "recl_COV3.txt","wb")
tkMessageBox.showinfo("New folders", " In %s these folders have already been created:\
\n 1. info - information about parametric maps\
\n 2. recl1 - necessary rules for first reclassification\
\n 3. report - information about classes: areas\
\n 4. recl2 - necessary rules for second reclassification\n"\
%self.cestaI)
tkMessageBox.showinfo("First reclassification", " In %s these *.txt files have already been created:\n\
\n 1. recl1_G.txt - geology factor\
\n 2. recl1_DMR.txt - DEM factor\
\n 3. recl1_S.txt - slope factor\
\n 4. recl1_E.txt - aspect factor\
\n 5. recl1_DS.txt - flowlength factor\
\n 6. recl1_M.txt - accumulation factor\
\n 7. recl1_K.txt - curvature factor\
\n 8. recl1_VK.txt - landuse factor\n" %nf_recl1)
tkMessageBox.showinfo("GO NEXT"," Continue with second tab ... ")
else:
self.gui.destroy()
else:
tkMessageBox.showerror("", " ERROR \n\n Check the input values !" )
return self.cestaL
# funkcia na vymazanie obsahu defaultne vyplnenych policok
def refreshALL(self):
self.E10.delete(0,"end")
self.E11.delete(0,"end")
self.E12.delete(0,"end")
self.E13.delete(0,"end")
self.E14.delete(0,"end")
self.E15.delete(0,"end")
self.E21.delete(0,"end")
self.E31.delete(0,"end")
# funkcia na ukoncenie prace v PLGP
def gEND(self):
if tkMessageBox.askyesno('Verification', ' Do you really want to quit? '):
self.gui.destroy()
else:
tkMessageBox.askretrycancel("No", ' Press ENTER to continue ')
def wrZM(self):
# vymazanie obsahu a vypisanie mapsetov, rastrov a vektorov do okna txf1
self.txf1.delete(1.0, END)
redir = Presmerovanie(self.txf1)
sys.stdout = redir
self.zm()
self.zistiR()
self.zistiV()
# self.txf1.insert(INSERT,"Existujuce rastrove mapy:\n\nExistujuce vektorove mapy:")
# print(self.txf1.get(1.0, END))
def delZM(self):
self.txf1.delete(1.0, END)
def open_file(self):
#zabezpeci ze sa obsah txt zobrazi do okna
self.txf3.delete(1.0, END)
redir = Presmerovanie(self.txf3)
sys.stdout = redir
self.txf3.delete(1.0, END)
options = {}
options['defaultextension'] = '.txt'
options['filetypes'] = [('all files', '.*'), ('text files', '.txt')]
options['initialdir'] = "C:\\DP_LF\\vypocet\\info"
options['parent'] = self.gui
options['title'] = "Open a file"
# z txt suboru INFO precita iba informacie o MIN a MAX hodnote bunky
with tkFileDialog.askopenfile(mode='r', initialdir = "C:\\DP_LF\\vypocet\\info") as f_handle:
pr = os.path.curdir
self.oo = os.path.abspath(pr)
self.oo = self.oo.encode("ascii","ignore")
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ vytlacit nazov suboru
print "Map:"
print "---------------------------------------"
print "MIN and MAX cell value in raster of selected factor :\n"
#vytlaci obsah suboru
for line in f_handle:
line = line.strip()
if line == "": continue
if "max" in line:
print line
if "min" in line:
print line
# ulozit subor txt ako ...
def edit_savey(self):
options = {}
options['defaultextension'] = '.txt'
options['filetypes'] = [('all files', '.*'), ('text files', '.txt')]
options['parent'] = self.gui
options['title'] = "Save as ..."
f = asksaveasfile(mode='w+', defaultextension=".txt", initialdir = "C:\\DP_LF\\vypocet")
if not f:
return
f.write(self.txf7.get(1.0, END))
f.close()
# otvorenie txt suboru INFO
def open_filey(self):
# zabezpeci ze sa obsah txt zobrazi do okna
self.txf6.delete(1.0, END)
redir = Presmerovanie(self.txf6)
sys.stdout = redir
options = {}
options['defaultextension'] = '.txt'
options['filetypes'] = [('all files', '.*'), ('text files', '.txt')]
options['initialdir'] = "C:\\DP_LF\\vypocet"
options['parent'] = self.gui
options['title'] = "Open a file"
f_handle = "C:\\DP_LF\\vypocet\\info_y.txt"
file = open(f_handle, 'r')
# vytlaci obsah suboru
for line in file:
line = line.strip()
if line == "": continue
if "max" in line:
print line
if "min" in line:
print line
def edit_save(self):
options = {}
options['defaultextension'] = '.txt'
options['filetypes'] = [('all files', '.*'), ('text files', '.txt')]
options['parent'] = self.gui
options['title'] = "Save as ..."
f = asksaveasfile(mode='w+', defaultextension=".txt", initialdir = "C:\\DP_LF\\vypocet\\recl1")
if not f:
return
f.write(self.txf3.get(1.0, END))
f.close()
# vytvorenie LOCATION
def createL(self):
import grass.script as gscript
import grass.script.setup as gsetup
import grass.script.core as gcore
cestaL = self.Gobj.gisdb
nameL = self.nameL
epsg = self.epsg
mapset = self.nameMV
mapset1 = self.nameMM
mapset2 = self.nameM
gisbase = self.Gobj.gisbase
gsetup.init(gisbase, cestaL, nameL, "PERMANENT")
#vytvorenie LOCATION
gcore.create_location(cestaL, nameL, epsg=epsg, proj4=None, filename=None, wkt=None,\
datum=None, datum_trans=None, desc=None, overwrite=True)
# vytvorenie MAPSETov
gscript.run_command("g.mapset",overwrite = True,mapset = mapset, flags="c")
gscript.run_command("g.mapset",overwrite = True,mapset = mapset1, flags="c")
gscript.run_command("g.mapset",overwrite = True,mapset = mapset2, flags="c")
# vypise zoznam mapsetov v location
def zm(self):
import grass.script as gscript
print "MAPSETs:"
print gscript.read_command("g.mapsets",flags = "l")
# vypise zoznam rastrov
def zistiR(self):
import grass.script as gscript
print "Raster maps:"
for rast in gscript.list_strings(type = 'rast'):
print rast,
# vypise zoznam vektorov
def zistiV(self):
import grass.script as gscript
print "\nVector maps:"
for vect in gscript.list_strings(type = 'vect'):
print vect,
# vypocet vahy konkretneho faktora
def Faktor(self, faktor):
import math
import scipy
# funkcia na ulozenie reklasifikacnych pravidiel pre II reklasifikaciu
def STL(a,b,c):
ctxt = self.cesta + "recl2\\" + "recl2_" + str(c) + ".txt"
file = open(ctxt, 'w+')
for j,k in zip(a, b):
file.writelines("%r = %r\n" % (j,k))
file.close()
# funkcia na citanie obsahu z reportov
def Report(self,F):
import csv
tf = open(F, "rb")
lines = tf.readlines()
lines1 = lines[4:(len(lines)-3)]
data = csv.reader(lines1, delimiter="|")
table = [row for row in data]
self.recl1 = [None]
self.P = [None]
for row in table:
a = row[1]
b = row[3]
if self.recl1 is [None]:
self.recl1 = [a]
else: self.recl1.append(a)
if self.P is [None]:
self.P = [b]
else: self.P.append(b)
del self.recl1[0]
del self.P[0]
self.recl1 = [int(i) for i in self.recl1]
self.P = [float(i) for i in self.P]
STL(self.recl1, self.P, faktor)
return (self.recl1,self.P)
f1 = "report_"
f2 = str(faktor)
f3 = ".txt"
f4 = "_z.txt"
Ft = self.cesta+"report\\"+f1+f2+f3
Ftz = self.cesta+"report\\"+f1+f2+f4
# plocha triedy
pt = Report(self, Ft)
Pt = pt[1]
recl1t = pt[0]
# plocha zosuvov v triede
ptz = Report(self, Ftz)
Ptz = ptz[1]
recl1tz = ptz[0]
# pocet tried parametrickej mapy
s = len(Pt)
# pravdepodobnost vzniku svahovych deformacii v triede
p = [(Ptzi)/Pti for Ptzi,Pti in zip(Ptz,Pt)]
# sucet pravdepodobnosti v ramci parametra
p_sum = sum(p)
# hustota pravdepodobnosti
pp = [(pi)/p_sum for pi in p]
# hodnota entropie
H = (-1)*(sum([(math.log(pi)/math.log(2))*pi for pi in pp]))
# maximalna entropia
Hmax = math.log(s)/math.log(2)
# priemerna hodnota pravdepodobnosti
p_pr = scipy.mean(p)
# informacny koeficient
I = (Hmax - H)/Hmax
# vaha prislusneho parametra
W = I*p_pr
recl1_u,pp_u = zip(*sorted(zip(self.recl1,pp), key=lambda x: x[1]))
recl1_u = list(recl1_u)
print "Factor", faktor,":"
print "---------------------------------------"
print "Weight of factor",faktor, "is %s." % W
print "Second reclassification is saved in *.txt file in\n%s." % (self.cesta + "recl2\\" + faktor + "_recl2.txt")
STL(recl1_u, self.recl1, faktor)
# print Pt[0], Psd[0], p[0], pp[0], H, s, Hmax, p_pr, I, W
if len(recl1t) == len(recl1tz):
print "Landslides occure in all classes.\n"
else:
print "Landslides occure not in all classes.\n"
return float(W)
def CalculateFactors(self):
# zabezpeci ze sa obsah txt zobrazi do okna
self.txf2.delete(1.0, END)
redir = Presmerovanie(self.txf2)
sys.stdout = redir
self.Wg = self.Faktor("G")
self.Wdmr = self.Faktor("DMR")
self.Ws = self.Faktor("S")
self.We = self.Faktor("E")
self.Wds = self.Faktor("DS")
self.Wk = self.Faktor("K")
self.Wm = self.Faktor("M")
self.Wvk = self.Faktor("VK")
# vypisanie rovnice do okna
def WrRovnica(self):
self.txf4.delete(1.0, END)
redir = Presmerovanie(self.txf4)
sys.stdout = redir
print "y = geology_recl2 * %f + dmr_recl2 * %f + slope_recl2 * %f + aspect_recl2 * %f + curv_m_recl2 * %f + flowlength_recl2 * %f + accumulation_recl2 * %f + landuse_recl2 * %f" % (self.Wg, self.Wdmr, self.Ws, self.We, self.Wk, self.Wds,self.Wm, self.Wvk)
self.ypsilon()
# vypisanie rovnice do txt suboru
def ypsilon(self):
ctxt = self.cesta + "rovnica.txt"
file = open(ctxt, 'w+')
file.write(self.txf4.get(1.0, END))
file.close()
self.txf5.delete(1.0, END)
redir = Presmerovanie(self.txf5)
sys.stdout = redir
print self.txf4.get(1.0, END)
def valid(self):
self.valrecl()
self.bastats()
self.val()
def val(self):
import numpy as np
import pylab as pl
self.txf9.delete(1.0, END)
redir = Presmerovanie(self.txf9)
sys.stdout = redir
ctxt4 = self.cesta + "stats_COV1.txt"
try:
fhand = open(ctxt4)
except:
print "File not found:",ctxt4
lst = list()
for line in fhand:
line.rstrip()
if line == "": continue
a = line.split()
for word in a:
lst.append(word)
lst=[ lst[i] for i in range(len(lst))]
tn4 = float(lst[2])
fn4 = float(lst[5])
fp4 = float(lst[8])
tp4 = float(lst[11])
N4 = tn4+fp4
P4 = fn4+tp4
TP4 = 1-tp4/P4
FP4 = fp4/N4
ctxt6 = self.cesta + "stats_COV2.txt"
try:
fhand = open(ctxt6)
except:
print "File not found:",ctxt6
lst = list()
for line in fhand:
line.rstrip()
if line == "": continue
a = line.split()
for word in a:
lst.append(word)
lst=[ lst[i] for i in range(len(lst))]
tn6 = float(lst[2])
fn6 = float(lst[5])
fp6 = float(lst[8])
tp6 = float(lst[11])
N6 = tn6+fp6
P6 = fn6+tp6
TP6 = 1-tp6/P6
FP6 = fp6/N6
ctxt8 = self.cesta + "stats_COV3.txt"
try:
fhand = open(ctxt8)
except:
print "File not found:",ctxt8
lst = list()
for line in fhand:
line.rstrip()
if line == "": continue
a = line.split()
for word in a:
lst.append(word)
lst=[ lst[i] for i in range(len(lst))]
tn8 = float(lst[2])
fn8 = float(lst[5])
fp8 = float(lst[8])
tp8 = float(lst[11])
N8 = tn8+fp8
P8 = fn8+tp8
TP8 = 1-tp8/P8
FP8 = fp8/N8
x = 0,FP4,FP6,FP8,1
y = 0,TP4,TP6,TP8,1
# AUC
self.auc = np.trapz(y,x)
# ROC curve
pl.clf()
pl.plot(x, y, "r", linewidth="1.7", label='ROC curve (area = %0.2f)' % self.auc)
pl.plot([0, 1], [0, 1], 'r--',alpha=0.57)
pl.xlim([0.0, 1.0])
pl.ylim([0.0, 1.0])
pl.xlabel('False Positive Rate')
pl.ylabel('True Positive Rate')
pl.title('Receiver operating characteristic')
pl.legend(loc="lower right")
pl.fill_between(x,y,color="red",alpha=0.17)
pl.grid(True,alpha=0.7)
pl.savefig(self.cesta + "plot.png")
areaUC = self.auc*100.00
print "Area under the ROC curve:\n%0.2f" % areaUC,"%"
print "\n(I. COV)\n-------------\n*true negative: %0.2f" % (((tn4)/(N4+P4))*100),"%"
print "*false negative: %0.2f" % (((fn4)/(N4+P4))*100),"%"
print "*false positive: %0.2f" % (((fp4)/(N4+P4))*100),"%"
print "*true positive: %0.2f" % (((tp4)/(N4+P4))*100),"%"
print "*FP = %0.2f" % FP4
print "*TP = %0.2f" % TP4
print "\n(II. COV)\n-------------\n*true negative: %0.2f" % (((tn6)/(N6+P6))*100),"%"
print "*false negative: %0.2f" % (((fn6)/(N6+P6))*100),"%"
print "*false positive: %0.2f" % (((fp6)/(N6+P6))*100),"%"
print "*true positive: %0.2f" % (((tp6)/(N6+P6))*100),"%"
print "*FP = %0.2f" % FP6
print "*TP = %0.2f" % TP6
print "\n(III. COV)\n-------------\n*true negative: %0.2f" % (((tn8)/(N8+P8))*100),"%"
print "*false negative: %0.2f" % (((fn8)/(N8+P8))*100),"%"
print "*false positive: %0.2f" % (((fp8)/(N8+P8))*100),"%"
print "*true positive: %0.2f" % (((tp8)/(N8+P8))*100),"%"
print "*FP = %0.2f" % FP8
print "*TP = %0.2f" % TP8
def bastats(self):
self.txf10.delete(1.0, END)
redir = Presmerovanie(self.txf10)
sys.stdout = redir
print "(I. COV):\n-------------"
self.BA_stats(1)
print "(II. COV):\n-------------"
self.BA_stats(2)
print "(III. COV):\n-------------"
self.BA_stats(3)
def BA_stats(self,fstats):
ctxt = self.cesta + "y_stats_COV" + str(fstats) + ".txt"
try:
fhand = open(ctxt)
except:
print "File not found:",ctxt
lst = list()
for line in fhand:
line.rstrip()
if line == "": continue
a = line.split()
for word in a:
lst.append(word)
lst=[ lst[i] for i in range(len(lst))]
a = lst[1]
b = lst[3]
c = float(a)+ float(b)
pa = (float(a)/c)*100
pb = (float(b)/c)*100
print "*without landslide: %0.2f" % (pa),"%"
print "*with landslide: %0.2f" % (pb),"%\n"
| 0 |
118a4483ca1cf645d66de85cad611d4b936981a8
|
Python
|
def valrecl(self):
self.txf8.delete(1.0, END)
redir = Presmerovanie(self.txf8)
sys.stdout = redir
print "(I. COV):\n-------------"
self.VAL_recl(1)
print "(II. COV):\n-------------"
self.VAL_recl(2)
print "(III. COV):\n-------------"
self.VAL_recl(3)
def VAL_recl(self,frecl):
ctxt = self.cesta + "recl_COV" + str(frecl) + ".txt"
try:
fhand = open(ctxt)
except:
print "File not found:",ctxt
lst = list()
for line in fhand:
line.rstrip()
if line == "": continue
a = line.split(":")
for word in a:
lst.append(word)
lst=[ lst[i] for i in range(len(lst))]
# print lst
a = lst[0]
b = lst[1]
c = lst[3]
d = lst[4]
print "*without landslide"
print a,"-",b
print "*with landslide"
print c,"-",d,"\n"
# zobrazenie orientacnej mapy po stlaceni prislusneho tlacidla
def showimg(self):
image = self.cesta + "y.png"
try:
os.startfile(image)
except:
tkMessageBox.showwarning(""," Cannot open map. ")
# zobrazenie ROC krivky po stlaceni prislusneho tlacidla
def showROC(self):
ROCg = self.cesta + "plot.png"
try:
os.startfile(ROCg)
except:
tkMessageBox.showwarning(""," Cannot open map. ")
# ~~~~~~ HLAVNE GUI ~~~~~~~~
def main():
gui = Tkinter.Tk()
# zobrazenie grafickej casti okna GUI
o1 = PhotoImage(file="files\gui.gif")
def panelObr(o):
Label(gui, image=o).pack(side="right", fill="both", expand=True)
panelObr(o1)
GUI(gui).pack(side="right", fill="both", expand=True)
gui.mainloop()
if __name__ == '__main__':
main()
## PRVY MODEL: import dat, tvorba parametrickych map, export informacii o kazdej z nich)
## ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#import sys
#import os
#import atexit
#
#from grass.script import parser, run_command
#
#def cleanup():
# pass
#
#def main():
# run_command("v.in.ogr",
# flags = 'o',
# overwrite = True,
# input = "C:\DP_LF\data",
# layer = "area",
# output = "area",
# min_area = 0.0001,
# snap = -1,
# geometry = "None")
#
# run_command("v.in.ogr",
# flags = 'o',
# overwrite = True,
# input = "C:\DP_LF\data",
# layer = "geology",
# output = "geology",
# min_area = 0.0001,
# snap = -1,
# geometry = "None")
#
# run_command("v.in.ogr",
# flags = 'o',
# overwrite = True,
# input = "C:\DP_LF\data",
# layer = "polohopis",
# output = "polohopis",
# min_area = 0.0001,
# snap = -1,
# geometry = "None")
#
# run_command("v.in.ogr",
# flags = 'o',
# overwrite = True,
# input = "C:\DP_LF\data",
# layer = "vyskopis",
# output = "vyskopis",
# min_area = 0.0001,
# snap = -1,
# geometry = "None")
#
# run_command("v.in.ogr",
# flags = 'o',
# overwrite = True,
# input = "C:\DP_LF\data",
# layer = "zosuvy",
# output = "zosuvy",
# min_area = 0.0001,
# snap = -1,
# geometry = "None")
#
# run_command("g.region",
# overwrite = True,
# vector = "area",
# res = "10")
#
# run_command("v.to.rast",
# overwrite = True,
# input = "area",
# layer = "1",
# type = "point,line,area",
# output = "zu",
# use = "attr",
# attribute_column = "Id",
# value = 1,
# memory = 300)
#
# run_command("v.to.rast",
# overwrite = True,
# input = "geology",
# layer = "1",
# type = "point,line,area",
# output = "geology",
# use = "attr",
# attribute_column = "kat",
# value = 1,
# memory = 300)
#
# run_command("v.to.rast",
# overwrite = True,
# input = "polohopis",
# layer = "1",
# type = "point,line,area",
# output = "landuse",
# use = "attr",
# attribute_column = "Id",
# value = 1,
# memory = 300)
#
# run_command("v.to.rast",
# overwrite = True,
# input = "zosuvy",
# layer = "1",
# type = "point,line,area",
# output = "zosuvy0",
# use = "attr",
# attribute_column = "Id",
# value = 1,
# memory = 300)
#
# run_command("r.mapcalc",
# overwrite = True,
# expression = "zosuvy = if( zosuvy0 == 0, null(), 1)")
#
# run_command("r.mask",
# overwrite = True,
# raster = "zu",
# maskcats = "*",
# layer = "1")
#
# run_command("v.surf.rst",
# overwrite = True,
# input = "vyskopis",
# layer = "1",
# zcolumn = "VYSKA",
# elevation = "dmr",
# slope = "slope",
# aspect = "aspect",
# pcurvature = "curvature_p_rst",
# tcurvature = "curvature_t_rst",
# mcurvature = "curvature_m",
# tension = 40.,
# segmax = 40,
# npmin = 300,
# zscale = 1.0)
#
# run_command("r.flow",
# overwrite = True,
# elevation = "dmr",
# flowlength = "flowlength")
#
# run_command("r.terraflow",
# overwrite = True,
# elevation = "dmr",
# filled = "filled",
# direction = "direction",
# swatershed = "swatershed",
# accumulation = "accumulation",
# tci = "tci",
# memory = 300)
#
# run_command("r.univar",
# flags = 'g',
# overwrite = True,
# map = "geology",
# output = "C:\DP_LF\vypocet\info\info_G.txt",
# percentile = 90,
# separator = "pipe")
#
# run_command("r.univar",
# flags = 'g',
# overwrite = True,
# map = "dmr",
# output = "C:\DP_LF\vypocet\info\info_DMR.txt",
# percentile = 90,
# separator = "pipe")
#
# run_command("r.univar",
# flags = 'g',
# overwrite = True,
# map = "curvature_m",
# output = "C:\DP_LF\vypocet\info\info_K.txt",
# percentile = 90,
# separator = "pipe")
#
# run_command("r.univar",
# flags = 'g',
# overwrite = True,
# map = "flowlength",
# output = "C:\DP_LF\vypocet\info\info_DS.txt",
# percentile = 90,
# separator = "pipe")
#
# run_command("r.univar",
# flags = 'g',
# overwrite = True,
# map = "accumulation",
# output = "C:\DP_LF\vypocet\info\info_M.txt",
# percentile = 90,
# separator = "pipe")
#
# run_command("r.univar",
# flags = 'g',
# overwrite = True,
# map = "landuse",
# output = "C:\DP_LF\vypocet\info\info_VK.txt",
# percentile = 90,
# separator = "pipe")
#
# run_command("r.univar",
# flags = 'g',
# overwrite = True,
# map = "aspect",
# output = "C:\DP_LF\vypocet\info\info_E.txt",
# percentile = 90,
# separator = "pipe")
#
# run_command("r.univar",
# flags = 'g',
# overwrite = True,
# map = "slope",
# output = "C:\DP_LF\vypocet\info\info_S.txt",
# percentile = 90,
# separator = "pipe")
#
# return 0
#
#if __name__ == "__main__":
# options, flags = parser()
# atexit.register(cleanup)
# sys.exit(main())
#
## DRUHY MODEL: prvotna reklasifikacia parametrickych map, export informacii
## o ploche kazdej triedy a ploche zosuvov v tejto triede
## ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#import sys
#import os
#import atexit
#
#from grass.script import parser, run_command
#
#def cleanup():
# pass
#
#def main():
# run_command("r.reclass",
# overwrite = True,
# input = "geology",
# output = "geology_recl1",
# rules = "C:\DP_LF\vypocet\recl1\recl1_G.txt")
#
# run_command("r.recode",
# overwrite = True,
# input = "dmr",
# output = "dmr_recl1",
# rules = "C:\DP_LF\vypocet\recl1\recl1_DMR.txt")
#
# run_command("r.recode",
# overwrite = True,
# input = "slope",
# output = "slope_recl1",
# rules = "C:\DP_LF\vypocet\recl1\recl1_S.txt")
#
# run_command("r.recode",
# overwrite = True,
# input = "aspect",
# output = "aspect_recl1",
# rules = "C:\DP_LF\vypocet\recl1\recl1_E.txt")
#
# run_command("r.reclass",
# overwrite = True,
# input = "landuse",
# output = "landuse_recl1",
# rules = "C:\DP_LF\vypocet\recl1\recl1_VK.txt")
#
# run_command("r.recode",
# overwrite = True,
# input = "flowlength",
# output = "flowlength_recl1",
# rules = "C:\DP_LF\vypocet\recl1\recl1_DS.txt")
#
# run_command("r.recode",
# overwrite = True,
# input = "accumulation",
# output = "accumulation_recl1",
# rules = "C:\DP_LF\vypocet\recl1\recl1_M.txt")
#
# run_command("r.recode",
# overwrite = True,
# input = "curvature_m",
# output = "curv_m_recl1",
# rules = "C:\DP_LF\vypocet\recl1\recl1_K.txt")
#
# run_command("r.report",
# flags = 'hn',
# overwrite = True,
# map = "geology_recl1",
# units = "k,p",
# output = "C:\DP_LF\vypocet\report\report_G.txt",
# null_value = "*",
# nsteps = 255)
#
# run_command("r.report",
# flags = 'hn',
# overwrite = True,
# map = "dmr_recl1",
# units = "k,p",
# output = "C:\DP_LF\vypocet\report\report_DMR.txt",
# null_value = "*",
# nsteps = 255)
#
# run_command("r.report",
# flags = 'hn',
# overwrite = True,
# map = "slope_recl1",
# units = "k,p",
# output = "C:\DP_LF\vypocet\report\report_S.txt",
# null_value = "*",
# nsteps = 255)
#
# run_command("r.report",
# flags = 'hn',
# overwrite = True,
# map = "aspect_recl1",
# units = "k,p",
# output = "C:\DP_LF\vypocet\report\report_E.txt",
# null_value = "*",
# nsteps = 255)
#
# run_command("r.report",
# flags = 'hn',
# overwrite = True,
# map = "landuse_recl1",
# units = "k,p",
# output = "C:\DP_LF\vypocet\report\report_VK.txt",
# null_value = "*",
# nsteps = 255)
#
# run_command("r.report",
# flags = 'hn',
# overwrite = True,
# map = "flowlength_recl1",
# units = "k,p",
# output = "C:\DP_LF\vypocet\report\report_DS.txt",
# null_value = "*",
# nsteps = 255)
#
# run_command("r.report",
# flags = 'hn',
# overwrite = True,
# map = "accumulation_recl1",
# units = "k,p",
# output = "C:\DP_LF\vypocet\report\report_M.txt",
# null_value = "*",
# nsteps = 255)
#
# run_command("r.report",
# flags = 'hn',
# overwrite = True,
# map = "curv_m_recl1",
# units = "k,p",
# output = "C:\DP_LF\vypocet\report\report_K.txt",
# null_value = "*",
# nsteps = 255)
#
# run_command("r.mask",
# overwrite = True,
# raster = "zosuvy",
# maskcats = "*",
# layer = "1")
#
# run_command("r.report",
# flags = 'hn',
# overwrite = True,
# map = "geology_recl1",
# units = "k,p",
# output = "C:\DP_LF\vypocet\report\report_G_z.txt",
# null_value = "*",
# nsteps = 255)
#
# run_command("r.report",
# flags = 'hn',
# overwrite = True,
# map = "dmr_recl1",
# units = "k,p",
# output = "C:\DP_LF\vypocet\report\report_DMR_z.txt",
# null_value = "*",
# nsteps = 255)
#
# run_command("r.report",
# flags = 'hn',
# overwrite = True,
# map = "slope_recl1",
# units = "k,p",
# output = "C:\DP_LF\vypocet\report\report_S_z.txt",
# null_value = "*",
# nsteps = 255)
#
# run_command("r.report",
# flags = 'hn',
# overwrite = True,
# map = "aspect_recl1",
# units = "k,p",
# output = "C:\DP_LF\vypocet\report\report_E_z.txt",
# null_value = "*",
# nsteps = 255)
#
# run_command("r.report",
# flags = 'hn',
# overwrite = True,
# map = "landuse_recl1",
# units = "k,p",
# output = "C:\DP_LF\vypocet\report\report_VK_z.txt",
# null_value = "*",
# nsteps = 255)
#
# run_command("r.report",
# flags = 'hn',
# overwrite = True,
# map = "flowlength_recl1",
# units = "k,p",
# output = "C:\DP_LF\vypocet\report\report_DS_z.txt",
# null_value = "*",
# nsteps = 255)
#
# run_command("r.report",
# flags = 'hn',
# overwrite = True,
# map = "accumulation_recl1",
# units = "k,p",
# output = "C:\DP_LF\vypocet\report\report_M_z.txt",
# null_value = "*",
# nsteps = 255)
#
# run_command("r.report",
# flags = 'hn',
# overwrite = True,
# map = "curv_m_recl1",
# units = "k,p",
# output = "C:\DP_LF\vypocet\report\report_K_z.txt",
# null_value = "*",
# nsteps = 255)
#
# run_command("r.mask",
# overwrite = True,
# raster = "zu",
# maskcats = "*",
# layer = "1")
#
# return 0
#
#if __name__ == "__main__":
# options, flags = parser()
# atexit.register(cleanup)
# sys.exit(main())
#
## TRETI MODEL: druhotna reklasifikacia parametrickych map
## ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#import sys
#import os
#import atexit
#
#from grass.script import parser, run_command
#
#def cleanup():
# pass
#
#def main():
# run_command("r.reclass",
# overwrite = True,
# input = "geology",
# output = "geology_recl2",
# rules = "C:\DP_LF\vypocet\recl2\recl2_G.txt")
#
# run_command("r.reclass",
# overwrite = True,
# input = "dmr_recl1",
# output = "dmr_recl2",
# rules = "C:\DP_LF\vypocet\recl2\recl2_DMR.txt")
#
# run_command("r.reclass",
# overwrite = True,
# input = "slope_recl1",
# output = "slope_recl2",
# rules = "C:\DP_LF\vypocet\recl2\recl2_S.txt")
#
# run_command("r.reclass",
# overwrite = True,
# input = "aspect_recl1",
# output = "aspect_recl2",
# rules = "C:\DP_LF\vypocet\recl2\recl2_E.txt")
#
# run_command("r.reclass",
# overwrite = True,
# input = "landuse_recl1",
# output = "landuse_recl2",
# rules = "C:\DP_LF\vypocet\recl2\recl2_VK.txt")
#
# run_command("r.reclass",
# overwrite = True,
# input = "flowlength_recl1",
# output = "flowlength_recl2",
# rules = "C:\DP_LF\vypocet\recl2\recl2_DS.txt")
#
# run_command("r.reclass",
# overwrite = True,
# input = "accumulation_recl1",
# output = "accumulation_recl2",
# rules = "C:\DP_LF\vypocet\recl2\recl2_M.txt")
#
# run_command("r.reclass",
# overwrite = True,
# input = "curv_m_recl1",
# output = "curv_m_recl2",
# rules = "C:\DP_LF\vypocet\recl2\recl2_K.txt")
#
# run_command("r.mapcalc",
# overwrite = True,
# file = "C:\DP_LF\vypocet\rovnica.txt")
#
# run_command("r.univar",
# flags = 'g',
# overwrite = True,
# map = "y",
# output = "C:\DP_LF\vypocet\info_y.txt",
# percentile = 90,
# separator = "pipe")
#
# return 0
#
#if __name__ == "__main__":
# options, flags = parser()
# atexit.register(cleanup)
# sys.exit(main())
#
## STVRTY MODEL: rozdelenie spojiteho intervalu do kategorii (viac ako dva alebo dva)
## ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#import sys
#import os
#import atexit
#
#from grass.script import parser, run_command
#
#def cleanup():
# pass
#
#def main():
# run_command("r.quantile",
# flags = 'r',
# input = "y",
# quantiles = 5,
# bins = 1000000)
#
# run_command("r.quantile",
# flags = 'r',
# quiet = True,
# input = "y",
# quantiles = -1000000000,
# percentiles = 90,
# bins = 1000000)
#
# return 0
#
#if __name__ == "__main__":
# options, flags = parser()
# atexit.register(cleanup)
# sys.exit(main())
#
## PIATY MODEL: export vyslednej mapy, nastavenie farieb, reklasifikacia mapy Y
## a export informacii o mapach pre validaciu a zostrojenie ROC krivky (tri cutoff values)
## ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#import sys
#import os
#import atexit
#
#from grass.script import parser, run_command
#
#def cleanup():
# pass
#
#def main():
# run_command("r.colors",
# flags = 'e',
# map = "y",
# color = "gyr")
#
# run_command("r.recode",
# overwrite = True,
# input = "y",
# output = "ba",
# rules = "C:\DP_LF\vypocet\recl_y.txt",
# title = "kategorie")
#
# run_command("r.out.png",
# flags = 't',
# overwrite = True,
# input = "y",
# output = "C:\DP_LF\vypocet\y.png",
# compression = 7)
#
# run_command("r.recode",
# overwrite = True,
# input = "y",
# output = "y_COV1",
# rules = "C:\DP_LF\vypocet\recl_COV1.txt",
# title = "validation")
#
# run_command("r.stats",
# flags = 'an',
# overwrite = True,
# input = "y_COV1",
# output = "C:\DP_LF\vypocet\y_stats_COV1.txt",
# separator = "space",
# null_value = "*",
# nsteps = 255)
#
# run_command("r.stats",
# flags = 'cn',
# overwrite = True,
# input = "zosuvy0,y_COV1",
# output = "C:\DP_LF\vypocet\stats_COV1.txt",
# separator = "space",
# null_value = "*",
# nsteps = 255)
#
# run_command("r.recode",
# overwrite = True,
# input = "y",
# output = "y_COV2",
# rules = "C:\DP_LF\vypocet\recl_COV2.txt")
#
# run_command("r.stats",
# flags = 'an',
# overwrite = True,
# input = "y_COV2",
# output = "C:\DP_LF\vypocet\y_stats_COV2.txt",
# separator = "space",
# null_value = "*",
# nsteps = 255)
#
# run_command("r.stats",
# flags = 'cn',
# overwrite = True,
# input = "zosuvy0,y_COV2",
# output = "C:\DP_LF\vypocet\stats_COV2.txt",
# separator = "space",
# null_value = "*",
# nsteps = 255)
#
# run_command("r.recode",
# overwrite = True,
# input = "y",
# output = "y_COV3",
# rules = "C:\DP_LF\vypocet\recl_COV3.txt")
#
# run_command("r.stats",
# flags = 'an',
# overwrite = True,
# input = "y_COV3",
# output = "C:\DP_LF\vypocet\y_stats_COV3.txt",
# separator = "space",
# null_value = "*",
# nsteps = 255)
#
# run_command("r.stats",
# flags = 'cn',
# overwrite = True,
# input = "zosuvy0,y_COV3",
# output = "C:\DP_LF\vypocet\stats_COV3.txt",
# separator = "space",
# null_value = "*",
# nsteps = 255)
#
# run_command("r.category",
# map = "ba",
# separator = ":",
# rules = "C:\DP_LF\nastroj\files\display\cat_vysledok.txt")
#
#
# return 0
#
#
#if __name__ == "__main__":
# options, flags = parser()
# atexit.register(cleanup)
# sys.exit(main())
| 1 |
66d96c42c8b814fcf38f8fd18215e6e29efbc810
|
Python
|
"""
Class: Stat232C
Project 3: Goal Inference
Name:Mingjia Yao
Date: May, 2020
"""
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.patches import Rectangle
import copy
import math
class ValueIteration(object):
def __init__(self, transitionTable, rewardTable, valueTable, convergenceTolerance, gamma):
self.transitionTable = transitionTable
self.rewardTable = rewardTable
self.valueTable = valueTable
self.convergenceTolerance = convergenceTolerance
self.gamma = gamma
def __call__(self):
ValueTable=self.valueTable
tempValueTable=copy.copy(ValueTable)
delta=1
while delta >= self.convergenceTolerance:
delta=0
for s in ValueTable:
v=ValueTable[s]
lib_s=self.transitionTable[s]
max_a =0
for a in lib_s:
sum_sr=0
lib_a=lib_s[a]
for ns in lib_a:
sum_sr=sum_sr+lib_a[ns]*(self.rewardTable[s][a][ns]+self.gamma*tempValueTable[ns])
max_a=max(max_a,sum_sr)
ValueTable[s]=max_a
delta=max(delta,abs(v-ValueTable[s]))
tempValueTable=copy.copy(ValueTable)
policyTable={}
for s in ValueTable:
lib_s=self.transitionTable[s]
pi_s=(0,0)
vs=0
prob_ns=0
for a in lib_s:
sum_sr=0
lib_a=lib_s[a]
for ns in lib_a:
sum_sr=sum_sr+lib_a[ns]*(self.rewardTable[s][a][ns]+self.gamma*ValueTable[ns])
if sum_sr>vs:
pi_s=a
vs=sum_sr
pi_ns=(0,0)
v_ns=0
for ns in lib_a:
if ValueTable[ns]>v_ns:
pi_ns=ns
v_ns=ValueTable[ns]
prob_ns=lib_a[ns]
policyTable[s]={pi_s:prob_ns}
return ([ValueTable, policyTable])
def visualizeValueTable(gridWidth, gridHeight, goalState, trapStates, valueTable):
gridAdjust = .5
gridScale = 1.5
xs = np.linspace(-gridAdjust, gridWidth-gridAdjust, gridWidth+1)
ys = np.linspace(-gridAdjust, gridHeight-gridAdjust, gridHeight+1)
plt.rcParams["figure.figsize"] = [gridWidth*gridScale,gridHeight*gridScale]
ax = plt.gca(frameon=False, xticks = range(gridWidth), yticks = range(gridHeight))
#goal and trap coloring
ax.add_patch(Rectangle((goalState[0]-gridAdjust, goalState[1]-gridAdjust), 1, 1, fill=True, color='green', alpha=.1))
for (trapx, trapy) in trapStates:
ax.add_patch(Rectangle((trapx-gridAdjust, trapy-gridAdjust), 1, 1, fill=True, color='black', alpha=.1))
# grid lines
for x in xs:
plt.plot([x, x], [ys[0], ys[-1]], color = "black")
for y in ys:
plt.plot([xs[0], xs[-1]], [y, y], color = "black")
#labeled values
for (statex, statey), val in valueTable.items():
plt.text(statex-.2, statey, str(round(val, 3)))
plt.show()
def visualizePolicy(gridWidth, gridHeight, goalState, trapStates, policy):
#grid height/width
gridAdjust = .5
gridScale = 1.5
arrowScale = .5
xs = np.linspace(-gridAdjust, gridWidth-gridAdjust, gridWidth+1)
ys = np.linspace(-gridAdjust, gridHeight-gridAdjust, gridHeight+1)
plt.rcParams["figure.figsize"] = [gridWidth*gridScale,gridHeight*gridScale]
ax = plt.gca(frameon=False, xticks = range(gridWidth), yticks = range(gridHeight))
#goal and trap coloring
ax.add_patch(Rectangle((goalState[0]-gridAdjust, goalState[1]-gridAdjust), 1, 1, fill=True, color='green', alpha=.1))
for (trapx, trapy) in trapStates:
ax.add_patch(Rectangle((trapx-gridAdjust, trapy-gridAdjust), 1, 1, fill=True, color='black', alpha=.1))
# grid lines
for x in xs:
plt.plot([x, x], [ys[0], ys[-1]], color = "black")
for y in ys:
plt.plot([xs[0], xs[-1]], [y, y], color = "black")
#labeled values
for (statex, statey), actionDict in policy.items():
for (optimalActionX, optimalActionY), actionProb in actionDict.items():
plt.arrow(statex, statey, optimalActionX*actionProb*arrowScale, optimalActionY*actionProb*arrowScale, head_width=0.05*actionProb, head_length=0.1*actionProb)
plt.show()
def viewDictionaryStructure(d, levels, indent=0):
for key, value in d.items():
print('\t' * indent + str(levels[indent]) + ": "+ str(key))
if isinstance(value, dict):
viewDictionaryStructure(value, levels, indent+1)
else:
print('\t' * (indent+1) + str(levels[indent+1])+ ": " + str(value))
def p_traj(traj,beta,gamma,transitTable,rewardTable,valueTable):
p=1
ret=[]
for i in range(len(traj)-1):
p=p*p_stn_st(traj[i],traj[i+1],beta,gamma,transitTable,rewardTable,valueTable)
ret=ret+[p]
return ret
def p_stn_st(st,stn,beta,gamma,transitTable,rewardTable,valueTable):
lib_s=transitTable[st]
p=0
for at in lib_s:
if stn in lib_s[at]:
p=p+lib_s[at][stn]*pias(st,at,beta,gamma,transitTable,rewardTable,valueTable)
return p
def pias(st,at,beta,gamma,transitTable,rewardTable,valueTable):
return math.log(beta*q(st,at,gamma,transitTable,rewardTable,valueTable))
def q(st,at,gamma,transitTable,rewardTable,valueTable):
lib_sa=transitTable[st][at]
q=0
for ns in lib_sa:
q=q+lib_sa[ns]*(rewardTable[st][at][ns]+gamma*valueTable[ns])
return q
def main():
gamma = .95
beta = .4
convergenceTolerance = 10e-7
transition = {(0, 0): {(1, 0): {(1, 0): 1},(0, 1): {(0, 1): 1},(-1, 0): {(0, 0): 1},(0, -1): {(0, 0): 1},(-1, 1): {(0, 0): 1},(1, -1): {(0, 0): 1},(1, 1): {(1, 1): 1},(-1, -1): {(0, 0): 1}},(0, 1): {(1, 0): {(1, 1): 1},(0, 1): {(0, 2): 1},(-1, 0): {(0, 1): 1},(0, -1): {(0, 0): 1},(-1, 1): {(0, 1): 1},(1, -1): {(1, 0): 1},(1, 1): {(1, 2): 1},(-1, -1): {(0, 1): 1}},(0, 2): {(1, 0): {(1, 2): 1},(0, 1): {(0, 3): 1},(-1, 0): {(0, 2): 1},(0, -1): {(0, 1): 1},(-1, 1): {(0, 2): 1},(1, -1): {(1, 1): 1},(1, 1): {(1, 3): 1},(-1, -1): {(0, 2): 1}},(0, 3): {(1, 0): {(1, 3): 1},(0, 1): {(0, 4): 1},(-1, 0): {(0, 3): 1},(0, -1): {(0, 2): 1},(-1, 1): {(0, 3): 1},(1, -1): {(1, 2): 1},(1, 1): {(1, 4): 1},(-1, -1): {(0, 3): 1}},(0, 4): {(1, 0): {(1, 4): 1},(0, 1): {(0, 5): 1},(-1, 0): {(0, 4): 1},(0, -1): {(0, 3): 1},(-1, 1): {(0, 4): 1},(1, -1): {(1, 3): 1},(1, 1): {(1, 5): 1},(-1, -1): {(0, 4): 1}},(0, 5): {(1, 0): {(1, 5): 1},(0, 1): {(0, 5): 1},(-1, 0): {(0, 5): 1},(0, -1): {(0, 4): 1},(-1, 1): {(0, 5): 1},(1, -1): {(1, 4): 1},(1, 1): {(0, 5): 1},(-1, -1): {(0, 5): 1}},(1, 0): {(1, 0): {(2, 0): 1},(0, 1): {(1, 1): 1},(-1, 0): {(0, 0): 1},(0, -1): {(1, 0): 1},(-1, 1): {(0, 1): 1},(1, -1): {(1, 0): 1},(1, 1): {(2, 1): 1},(-1, -1): {(1, 0): 1}},(1, 1): {(1, 0): {(2, 1): 1},(0, 1): {(1, 2): 1},(-1, 0): {(0, 1): 1},(0, -1): {(1, 0): 1},(-1, 1): {(0, 2): 1},(1, -1): {(2, 0): 1},(1, 1): {(2, 2): 1},(-1, -1): {(0, 0): 1}},(1, 2): {(1, 0): {(2, 2): 1},(0, 1): {(1, 3): 1},(-1, 0): {(0, 2): 1},(0, -1): {(1, 1): 1},(-1, 1): {(0, 3): 1},(1, -1): {(2, 1): 1},(1, 1): {(2, 3): 1},(-1, -1): {(0, 1): 1}},(1, 3): {(1, 0): {(2, 3): 1},(0, 1): {(1, 4): 1},(-1, 0): {(0, 3): 1},(0, -1): {(1, 2): 1},(-1, 1): {(0, 4): 1},(1, -1): {(2, 2): 1},(1, 1): {(2, 4): 1},(-1, -1): {(0, 2): 1}},(1, 4): {(1, 0): {(2, 4): 1},(0, 1): {(1, 5): 1},(-1, 0): {(0, 4): 1},(0, -1): {(1, 3): 1},(-1, 1): {(0, 5): 1},(1, -1): {(2, 3): 1},(1, 1): {(2, 5): 1},(-1, -1): {(0, 3): 1}},(1, 5): {(1, 0): {(2, 5): 1},(0, 1): {(1, 5): 1},(-1, 0): {(0, 5): 1},(0, -1): {(1, 4): 1},(-1, 1): {(1, 5): 1},(1, -1): {(2, 4): 1},(1, 1): {(1, 5): 1},(-1, -1): {(0, 4): 1}},(2, 0): {(1, 0): {(3, 0): 1},(0, 1): {(2, 1): 1},(-1, 0): {(1, 0): 1},(0, -1): {(2, 0): 1},(-1, 1): {(1, 1): 1},(1, -1): {(2, 0): 1},(1, 1): {(3, 1): 1},(-1, -1): {(2, 0): 1}},(2, 1): {(1, 0): {(3, 1): 1},(0, 1): {(2, 2): 1},(-1, 0): {(1, 1): 1},(0, -1): {(2, 0): 1},(-1, 1): {(1, 2): 1},(1, -1): {(3, 0): 1},(1, 1): {(3, 2): 1},(-1, -1): {(1, 0): 1}},(2, 2): {(1, 0): {(3, 2): 1},(0, 1): {(2, 3): 1},(-1, 0): {(1, 2): 1},(0, -1): {(2, 1): 1},(-1, 1): {(1, 3): 1},(1, -1): {(3, 1): 1},(1, 1): {(3, 3): 1},(-1, -1): {(1, 1): 1}},(2, 3): {(1, 0): {(3, 3): 1},(0, 1): {(2, 4): 1},(-1, 0): {(1, 3): 1},(0, -1): {(2, 2): 1},(-1, 1): {(1, 4): 1},(1, -1): {(3, 2): 1},(1, 1): {(3, 4): 1},(-1, -1): {(1, 2): 1}},(2, 4): {(1, 0): {(3, 4): 1},(0, 1): {(2, 5): 1},(-1, 0): {(1, 4): 1},(0, -1): {(2, 3): 1},(-1, 1): {(1, 5): 1},(1, -1): {(3, 3): 1},(1, 1): {(3, 5): 1},(-1, -1): {(1, 3): 1}},(2, 5): {(1, 0): {(3, 5): 1},(0, 1): {(2, 5): 1},(-1, 0): {(1, 5): 1},(0, -1): {(2, 4): 1},(-1, 1): {(2, 5): 1},(1, -1): {(3, 4): 1},(1, 1): {(2, 5): 1},(-1, -1): {(1, 4): 1}},(3, 0): {(1, 0): {(4, 0): 1},(0, 1): {(3, 1): 1},(-1, 0): {(2, 0): 1},(0, -1): {(3, 0): 1},(-1, 1): {(2, 1): 1},(1, -1): {(3, 0): 1},(1, 1): {(4, 1): 1},(-1, -1): {(3, 0): 1}},(3, 1): {(1, 0): {(4, 1): 1},(0, 1): {(3, 2): 1},(-1, 0): {(2, 1): 1},(0, -1): {(3, 0): 1},(-1, 1): {(2, 2): 1},(1, -1): {(4, 0): 1},(1, 1): {(4, 2): 1},(-1, -1): {(2, 0): 1}},(3, 2): {(1, 0): {(4, 2): 1},(0, 1): {(3, 3): 1},(-1, 0): {(2, 2): 1},(0, -1): {(3, 1): 1},(-1, 1): {(2, 3): 1},(1, -1): {(4, 1): 1},(1, 1): {(4, 3): 1},(-1, -1): {(2, 1): 1}},(3, 3): {(1, 0): {(4, 3): 1},(0, 1): {(3, 4): 1},(-1, 0): {(2, 3): 1},(0, -1): {(3, 2): 1},(-1, 1): {(2, 4): 1},(1, -1): {(4, 2): 1},(1, 1): {(4, 4): 1},(-1, -1): {(2, 2): 1}},(3, 4): {(1, 0): {(4, 4): 1},(0, 1): {(3, 5): 1},(-1, 0): {(2, 4): 1},(0, -1): {(3, 3): 1},(-1, 1): {(2, 5): 1},(1, -1): {(4, 3): 1},(1, 1): {(4, 5): 1},(-1, -1): {(2, 3): 1}},(3, 5): {(1, 0): {(4, 5): 1},(0, 1): {(3, 5): 1},(-1, 0): {(2, 5): 1},(0, -1): {(3, 4): 1},(-1, 1): {(3, 5): 1},(1, -1): {(4, 4): 1},(1, 1): {(3, 5): 1},(-1, -1): {(2, 4): 1}},(4, 0): {(1, 0): {(5, 0): 1},(0, 1): {(4, 1): 1},(-1, 0): {(3, 0): 1},(0, -1): {(4, 0): 1},(-1, 1): {(3, 1): 1},(1, -1): {(4, 0): 1},(1, 1): {(5, 1): 1},(-1, -1): {(4, 0): 1}},(4, 1): {(1, 0): {(5, 1): 1},(0, 1): {(4, 2): 1},(-1, 0): {(3, 1): 1},(0, -1): {(4, 0): 1},(-1, 1): {(3, 2): 1},(1, -1): {(5, 0): 1},(1, 1): {(5, 2): 1},(-1, -1): {(3, 0): 1}},(4, 2): {(1, 0): {(5, 2): 1},(0, 1): {(4, 3): 1},(-1, 0): {(3, 2): 1},(0, -1): {(4, 1): 1},(-1, 1): {(3, 3): 1},(1, -1): {(5, 1): 1},(1, 1): {(5, 3): 1},(-1, -1): {(3, 1): 1}},(4, 3): {(1, 0): {(5, 3): 1},(0, 1): {(4, 4): 1},(-1, 0): {(3, 3): 1},(0, -1): {(4, 2): 1},(-1, 1): {(3, 4): 1},(1, -1): {(5, 2): 1},(1, 1): {(5, 4): 1},(-1, -1): {(3, 2): 1}},(4, 4): {(1, 0): {(5, 4): 1},(0, 1): {(4, 5): 1},(-1, 0): {(3, 4): 1},(0, -1): {(4, 3): 1},(-1, 1): {(3, 5): 1},(1, -1): {(5, 3): 1},(1, 1): {(5, 5): 1},(-1, -1): {(3, 3): 1}},(4, 5): {(1, 0): {(5, 5): 1},(0, 1): {(4, 5): 1},(-1, 0): {(3, 5): 1},(0, -1): {(4, 4): 1},(-1, 1): {(4, 5): 1},(1, -1): {(5, 4): 1},(1, 1): {(4, 5): 1},(-1, -1): {(3, 4): 1}},(5, 0): {(1, 0): {(6, 0): 1},(0, 1): {(5, 1): 1},(-1, 0): {(4, 0): 1},(0, -1): {(5, 0): 1},(-1, 1): {(4, 1): 1},(1, -1): {(5, 0): 1},(1, 1): {(6, 1): 1},(-1, -1): {(5, 0): 1}},(5, 1): {(1, 0): {(6, 1): 1},(0, 1): {(5, 2): 1},(-1, 0): {(4, 1): 1},(0, -1): {(5, 0): 1},(-1, 1): {(4, 2): 1},(1, -1): {(6, 0): 1},(1, 1): {(6, 2): 1},(-1, -1): {(4, 0): 1}},(5, 2): {(1, 0): {(6, 2): 1},(0, 1): {(5, 3): 1},(-1, 0): {(4, 2): 1},(0, -1): {(5, 1): 1},(-1, 1): {(4, 3): 1},(1, -1): {(6, 1): 1},(1, 1): {(6, 3): 1},(-1, -1): {(4, 1): 1}},(5, 3): {(1, 0): {(6, 3): 1},(0, 1): {(5, 4): 1},(-1, 0): {(4, 3): 1},(0, -1): {(5, 2): 1},(-1, 1): {(4, 4): 1},(1, -1): {(6, 2): 1},(1, 1): {(6, 4): 1},(-1, -1): {(4, 2): 1}},(5, 4): {(1, 0): {(6, 4): 1},(0, 1): {(5, 5): 1},(-1, 0): {(4, 4): 1},(0, -1): {(5, 3): 1},(-1, 1): {(4, 5): 1},(1, -1): {(6, 3): 1},(1, 1): {(6, 5): 1},(-1, -1): {(4, 3): 1}},(5, 5): {(1, 0): {(6, 5): 1},(0, 1): {(5, 5): 1},(-1, 0): {(4, 5): 1},(0, -1): {(5, 4): 1},(-1, 1): {(5, 5): 1},(1, -1): {(6, 4): 1},(1, 1): {(5, 5): 1},(-1, -1): {(4, 4): 1}},(6, 0): {(1, 0): {(6, 0): 1},(0, 1): {(6, 1): 1},(-1, 0): {(5, 0): 1},(0, -1): {(6, 0): 1},(-1, 1): {(5, 1): 1},(1, -1): {(6, 0): 1},(1, 1): {(6, 0): 1},(-1, -1): {(6, 0): 1}},(6, 1): {(1, 0): {(6, 1): 1},(0, 1): {(6, 2): 1},(-1, 0): {(5, 1): 1},(0, -1): {(6, 0): 1},(-1, 1): {(5, 2): 1},(1, -1): {(6, 1): 1},(1, 1): {(6, 1): 1},(-1, -1): {(5, 0): 1}},(6, 2): {(1, 0): {(6, 2): 1},(0, 1): {(6, 3): 1},(-1, 0): {(5, 2): 1},(0, -1): {(6, 1): 1},(-1, 1): {(5, 3): 1},(1, -1): {(6, 2): 1},(1, 1): {(6, 2): 1},(-1, -1): {(5, 1): 1}},(6, 3): {(1, 0): {(6, 3): 1},(0, 1): {(6, 4): 1},(-1, 0): {(5, 3): 1},(0, -1): {(6, 2): 1},(-1, 1): {(5, 4): 1},(1, -1): {(6, 3): 1},(1, 1): {(6, 3): 1},(-1, -1): {(5, 2): 1}},(6, 4): {(1, 0): {(6, 4): 1},(0, 1): {(6, 5): 1},(-1, 0): {(5, 4): 1},(0, -1): {(6, 3): 1},(-1, 1): {(5, 5): 1},(1, -1): {(6, 4): 1},(1, 1): {(6, 4): 1},(-1, -1): {(5, 3): 1}},(6, 5): {(1, 0): {(6, 5): 1},(0, 1): {(6, 5): 1},(-1, 0): {(5, 5): 1},(0, -1): {(6, 4): 1},(-1, 1): {(6, 5): 1},(1, -1): {(6, 5): 1},(1, 1): {(6, 5): 1},(-1, -1): {(5, 4): 1}}}
valueTable = {(0, 0): 0,(0, 1): 0,(0, 2): 0,(0, 3): 0,(0, 4): 0,(0, 5): 0,(1, 0): 0,(1, 1): 0,(1, 2): 0,(1, 3): 0,(1, 4): 0,(1, 5): 0,(2, 0): 0,(2, 1): 0,(2, 2): 0,(2, 3): 0,(2, 4): 0,(2, 5): 0,(3, 0): 0,(3, 1): 0,(3, 2): 0,(3, 3): 0,(3, 4): 0,(3, 5): 0,(4, 0): 0,(4, 1): 0,(4, 2): 0,(4, 3): 0,(4, 4): 0,(4, 5): 0,(5, 0): 0,(5, 1): 0,(5, 2): 0,(5, 3): 0,(5, 4): 0,(5, 5): 0,(6, 0): 0,(6, 1): 0,(6, 2): 0,(6, 3): 0,(6, 4): 0,(6, 5): 0}
#Observed Trajectories
trajectoryToGoalA = [(0,0), (1,1), (1,2), (2,3), (3,4), (4,4), (5,4), (6,4)]
trajectoryToGoalB = [(0,0), (1,1), (2,2), (2,3), (3,4), (4,3), (5,2), (6,1)]
trajectoryToGoalC = [(0,0), (0,1), (1,2), (1,3), (1,4), (1,5)]
#Environment 1: Solid Barrier
rewardA = {(0, 0): {(1, 0): {(1, 0): -1.0},(0, 1): {(0, 1): -1.0},(-1, 0): {(0, 0): -1},(0, -1): {(0, 0): -1},(-1, 1): {(0, 0): -1},(1, -1): {(0, 0): -1},(1, 1): {(1, 1): -1.4142135623730951},(-1, -1): {(0, 0): -1}},(0, 1): {(1, 0): {(1, 1): -1.0},(0, 1): {(0, 2): -1.0},(-1, 0): {(0, 1): -1},(0, -1): {(0, 0): -1.0},(-1, 1): {(0, 1): -1},(1, -1): {(1, 0): -1.4142135623730951},(1, 1): {(1, 2): -1.4142135623730951},(-1, -1): {(0, 1): -1}},(0, 2): {(1, 0): {(1, 2): -1.0},(0, 1): {(0, 3): -1.0},(-1, 0): {(0, 2): -1},(0, -1): {(0, 1): -1.0},(-1, 1): {(0, 2): -1},(1, -1): {(1, 1): -1.4142135623730951},(1, 1): {(1, 3): -1.4142135623730951},(-1, -1): {(0, 2): -1}},(0, 3): {(1, 0): {(1, 3): -1.0},(0, 1): {(0, 4): -1.0},(-1, 0): {(0, 3): -1},(0, -1): {(0, 2): -1.0},(-1, 1): {(0, 3): -1},(1, -1): {(1, 2): -1.4142135623730951},(1, 1): {(1, 4): -1.4142135623730951},(-1, -1): {(0, 3): -1}},(0, 4): {(1, 0): {(1, 4): -1.0},(0, 1): {(0, 5): -1.0},(-1, 0): {(0, 4): -1},(0, -1): {(0, 3): -1.0},(-1, 1): {(0, 4): -1},(1, -1): {(1, 3): -1.4142135623730951},(1, 1): {(1, 5): -1.4142135623730951},(-1, -1): {(0, 4): -1}},(0, 5): {(1, 0): {(1, 5): -1.0},(0, 1): {(0, 5): -1},(-1, 0): {(0, 5): -1},(0, -1): {(0, 4): -1.0},(-1, 1): {(0, 5): -1},(1, -1): {(1, 4): -1.4142135623730951},(1, 1): {(0, 5): -1},(-1, -1): {(0, 5): -1}},(1, 0): {(1, 0): {(2, 0): -1.0},(0, 1): {(1, 1): -1.0},(-1, 0): {(0, 0): -1.0},(0, -1): {(1, 0): -1},(-1, 1): {(0, 1): -1.4142135623730951},(1, -1): {(1, 0): -1},(1, 1): {(2, 1): -1.4142135623730951},(-1, -1): {(1, 0): -1}},(1, 1): {(1, 0): {(2, 1): -1.0},(0, 1): {(1, 2): -1.0},(-1, 0): {(0, 1): -1.0},(0, -1): {(1, 0): -1.0},(-1, 1): {(0, 2): -1.4142135623730951},(1, -1): {(2, 0): -1.4142135623730951},(1, 1): {(2, 2): -1.4142135623730951},(-1, -1): {(0, 0): -1.4142135623730951}},(1, 2): {(1, 0): {(2, 2): -1.0},(0, 1): {(1, 3): -1.0},(-1, 0): {(0, 2): -1.0},(0, -1): {(1, 1): -1.0},(-1, 1): {(0, 3): -1.4142135623730951},(1, -1): {(2, 1): -1.4142135623730951},(1, 1): {(2, 3): -1.4142135623730951},(-1, -1): {(0, 1): -1.4142135623730951}},(1, 3): {(1, 0): {(2, 3): -1.0},(0, 1): {(1, 4): -1.0},(-1, 0): {(0, 3): -1.0},(0, -1): {(1, 2): -1.0},(-1, 1): {(0, 4): -1.4142135623730951},(1, -1): {(2, 2): -1.4142135623730951},(1, 1): {(2, 4): -1.4142135623730951},(-1, -1): {(0, 2): -1.4142135623730951}},(1, 4): {(1, 0): {(2, 4): -1.0},(0, 1): {(1, 5): -1.0},(-1, 0): {(0, 4): -1.0},(0, -1): {(1, 3): -1.0},(-1, 1): {(0, 5): -1.4142135623730951},(1, -1): {(2, 3): -1.4142135623730951},(1, 1): {(2, 5): -1.4142135623730951},(-1, -1): {(0, 3): -1.4142135623730951}},(1, 5): {(1, 0): {(2, 5): -1.0},(0, 1): {(1, 5): -1},(-1, 0): {(0, 5): -1.0},(0, -1): {(1, 4): -1.0},(-1, 1): {(1, 5): -1},(1, -1): {(2, 4): -1.4142135623730951},(1, 1): {(1, 5): -1},(-1, -1): {(0, 4): -1.4142135623730951}},(2, 0): {(1, 0): {(3, 0): -1.0},(0, 1): {(2, 1): -1.0},(-1, 0): {(1, 0): -1.0},(0, -1): {(2, 0): -1},(-1, 1): {(1, 1): -1.4142135623730951},(1, -1): {(2, 0): -1},(1, 1): {(3, 1): -1.4142135623730951},(-1, -1): {(2, 0): -1}},(2, 1): {(1, 0): {(3, 1): -1.0},(0, 1): {(2, 2): -1.0},(-1, 0): {(1, 1): -1.0},(0, -1): {(2, 0): -1.0},(-1, 1): {(1, 2): -1.4142135623730951},(1, -1): {(3, 0): -1.4142135623730951},(1, 1): {(3, 2): -1.4142135623730951},(-1, -1): {(1, 0): -1.4142135623730951}},(2, 2): {(1, 0): {(3, 2): -1.0},(0, 1): {(2, 3): -1.0},(-1, 0): {(1, 2): -1.0},(0, -1): {(2, 1): -1.0},(-1, 1): {(1, 3): -1.4142135623730951},(1, -1): {(3, 1): -1.4142135623730951},(1, 1): {(3, 3): -1.4142135623730951},(-1, -1): {(1, 1): -1.4142135623730951}},(2, 3): {(1, 0): {(3, 3): -1.0},(0, 1): {(2, 4): -1.0},(-1, 0): {(1, 3): -1.0},(0, -1): {(2, 2): -1.0},(-1, 1): {(1, 4): -1.4142135623730951},(1, -1): {(3, 2): -1.4142135623730951},(1, 1): {(3, 4): -1.4142135623730951},(-1, -1): {(1, 2): -1.4142135623730951}},(2, 4): {(1, 0): {(3, 4): -1.0},(0, 1): {(2, 5): -1.0},(-1, 0): {(1, 4): -1.0},(0, -1): {(2, 3): -1.0},(-1, 1): {(1, 5): -1.4142135623730951},(1, -1): {(3, 3): -1.4142135623730951},(1, 1): {(3, 5): -1.4142135623730951},(-1, -1): {(1, 3): -1.4142135623730951}},(2, 5): {(1, 0): {(3, 5): -1.0},(0, 1): {(2, 5): -1},(-1, 0): {(1, 5): -1.0},(0, -1): {(2, 4): -1.0},(-1, 1): {(2, 5): -1},(1, -1): {(3, 4): -1.4142135623730951},(1, 1): {(2, 5): -1},(-1, -1): {(1, 4): -1.4142135623730951}},(3, 0): {(1, 0): {(4, 0): -100},(0, 1): {(3, 1): -100},(-1, 0): {(2, 0): -100},(0, -1): {(3, 0): -100},(-1, 1): {(2, 1): -100},(1, -1): {(3, 0): -100},(1, 1): {(4, 1): -100},(-1, -1): {(3, 0): -100}},(3, 1): {(1, 0): {(4, 1): -100},(0, 1): {(3, 2): -100},(-1, 0): {(2, 1): -100},(0, -1): {(3, 0): -100},(-1, 1): {(2, 2): -100},(1, -1): {(4, 0): -100},(1, 1): {(4, 2): -100},(-1, -1): {(2, 0): -100}},(3, 2): {(1, 0): {(4, 2): -100},(0, 1): {(3, 3): -100},(-1, 0): {(2, 2): -100},(0, -1): {(3, 1): -100},(-1, 1): {(2, 3): -100},(1, -1): {(4, 1): -100},(1, 1): {(4, 3): -100},(-1, -1): {(2, 1): -100}},(3, 3): {(1, 0): {(4, 3): -100},(0, 1): {(3, 4): -100},(-1, 0): {(2, 3): -100},(0, -1): {(3, 2): -100},(-1, 1): {(2, 4): -100},(1, -1): {(4, 2): -100},(1, 1): {(4, 4): -100},(-1, -1): {(2, 2): -100}},(3, 4): {(1, 0): {(4, 4): -1.0},(0, 1): {(3, 5): -1.0},(-1, 0): {(2, 4): -1.0},(0, -1): {(3, 3): -1.0},(-1, 1): {(2, 5): -1.4142135623730951},(1, -1): {(4, 3): -1.4142135623730951},(1, 1): {(4, 5): -1.4142135623730951},(-1, -1): {(2, 3): -1.4142135623730951}},(3, 5): {(1, 0): {(4, 5): -1.0},(0, 1): {(3, 5): -1},(-1, 0): {(2, 5): -1.0},(0, -1): {(3, 4): -1.0},(-1, 1): {(3, 5): -1},(1, -1): {(4, 4): -1.4142135623730951},(1, 1): {(3, 5): -1},(-1, -1): {(2, 4): -1.4142135623730951}},(4, 0): {(1, 0): {(5, 0): -1.0},(0, 1): {(4, 1): -1.0},(-1, 0): {(3, 0): -1.0},(0, -1): {(4, 0): -1},(-1, 1): {(3, 1): -1.4142135623730951},(1, -1): {(4, 0): -1},(1, 1): {(5, 1): -1.4142135623730951},(-1, -1): {(4, 0): -1}},(4, 1): {(1, 0): {(5, 1): -1.0},(0, 1): {(4, 2): -1.0},(-1, 0): {(3, 1): -1.0},(0, -1): {(4, 0): -1.0},(-1, 1): {(3, 2): -1.4142135623730951},(1, -1): {(5, 0): -1.4142135623730951},(1, 1): {(5, 2): -1.4142135623730951},(-1, -1): {(3, 0): -1.4142135623730951}},(4, 2): {(1, 0): {(5, 2): -1.0},(0, 1): {(4, 3): -1.0},(-1, 0): {(3, 2): -1.0},(0, -1): {(4, 1): -1.0},(-1, 1): {(3, 3): -1.4142135623730951},(1, -1): {(5, 1): -1.4142135623730951},(1, 1): {(5, 3): -1.4142135623730951},(-1, -1): {(3, 1): -1.4142135623730951}},(4, 3): {(1, 0): {(5, 3): -1.0},(0, 1): {(4, 4): -1.0},(-1, 0): {(3, 3): -1.0},(0, -1): {(4, 2): -1.0},(-1, 1): {(3, 4): -1.4142135623730951},(1, -1): {(5, 2): -1.4142135623730951},(1, 1): {(5, 4): -1.4142135623730951},(-1, -1): {(3, 2): -1.4142135623730951}},(4, 4): {(1, 0): {(5, 4): -1.0},(0, 1): {(4, 5): -1.0},(-1, 0): {(3, 4): -1.0},(0, -1): {(4, 3): -1.0},(-1, 1): {(3, 5): -1.4142135623730951},(1, -1): {(5, 3): -1.4142135623730951},(1, 1): {(5, 5): -1.4142135623730951},(-1, -1): {(3, 3): -1.4142135623730951}},(4, 5): {(1, 0): {(5, 5): -1.0},(0, 1): {(4, 5): -1},(-1, 0): {(3, 5): -1.0},(0, -1): {(4, 4): -1.0},(-1, 1): {(4, 5): -1},(1, -1): {(5, 4): -1.4142135623730951},(1, 1): {(4, 5): -1},(-1, -1): {(3, 4): -1.4142135623730951}},(5, 0): {(1, 0): {(6, 0): -1.0},(0, 1): {(5, 1): -1.0},(-1, 0): {(4, 0): -1.0},(0, -1): {(5, 0): -1},(-1, 1): {(4, 1): -1.4142135623730951},(1, -1): {(5, 0): -1},(1, 1): {(6, 1): -1.4142135623730951},(-1, -1): {(5, 0): -1}},(5, 1): {(1, 0): {(6, 1): -1.0},(0, 1): {(5, 2): -1.0},(-1, 0): {(4, 1): -1.0},(0, -1): {(5, 0): -1.0},(-1, 1): {(4, 2): -1.4142135623730951},(1, -1): {(6, 0): -1.4142135623730951},(1, 1): {(6, 2): -1.4142135623730951},(-1, -1): {(4, 0): -1.4142135623730951}},(5, 2): {(1, 0): {(6, 2): -1.0},(0, 1): {(5, 3): -1.0},(-1, 0): {(4, 2): -1.0},(0, -1): {(5, 1): -1.0},(-1, 1): {(4, 3): -1.4142135623730951},(1, -1): {(6, 1): -1.4142135623730951},(1, 1): {(6, 3): -1.4142135623730951},(-1, -1): {(4, 1): -1.4142135623730951}},(5, 3): {(1, 0): {(6, 3): -1.0},(0, 1): {(5, 4): -1.0},(-1, 0): {(4, 3): -1.0},(0, -1): {(5, 2): -1.0},(-1, 1): {(4, 4): -1.4142135623730951},(1, -1): {(6, 2): -1.4142135623730951},(1, 1): {(6, 4): -1.4142135623730951},(-1, -1): {(4, 2): -1.4142135623730951}},(5, 4): {(1, 0): {(6, 4): -1.0},(0, 1): {(5, 5): -1.0},(-1, 0): {(4, 4): -1.0},(0, -1): {(5, 3): -1.0},(-1, 1): {(4, 5): -1.4142135623730951},(1, -1): {(6, 3): -1.4142135623730951},(1, 1): {(6, 5): -1.4142135623730951},(-1, -1): {(4, 3): -1.4142135623730951}},(5, 5): {(1, 0): {(6, 5): -1.0},(0, 1): {(5, 5): -1},(-1, 0): {(4, 5): -1.0},(0, -1): {(5, 4): -1.0},(-1, 1): {(5, 5): -1},(1, -1): {(6, 4): -1.4142135623730951},(1, 1): {(5, 5): -1},(-1, -1): {(4, 4): -1.4142135623730951}},(6, 0): {(1, 0): {(6, 0): -1},(0, 1): {(6, 1): -1.0},(-1, 0): {(5, 0): -1.0},(0, -1): {(6, 0): -1},(-1, 1): {(5, 1): -1.4142135623730951},(1, -1): {(6, 0): -1},(1, 1): {(6, 0): -1},(-1, -1): {(6, 0): -1}},(6, 1): {(1, 0): {(6, 1): -1},(0, 1): {(6, 2): -1.0},(-1, 0): {(5, 1): -1.0},(0, -1): {(6, 0): -1.0},(-1, 1): {(5, 2): -1.4142135623730951},(1, -1): {(6, 1): -1},(1, 1): {(6, 1): -1},(-1, -1): {(5, 0): -1.4142135623730951}},(6, 2): {(1, 0): {(6, 2): -1},(0, 1): {(6, 3): -1.0},(-1, 0): {(5, 2): -1.0},(0, -1): {(6, 1): -1.0},(-1, 1): {(5, 3): -1.4142135623730951},(1, -1): {(6, 2): -1},(1, 1): {(6, 2): -1},(-1, -1): {(5, 1): -1.4142135623730951}},(6, 3): {(1, 0): {(6, 3): -1},(0, 1): {(6, 4): -1.0},(-1, 0): {(5, 3): -1.0},(0, -1): {(6, 2): -1.0},(-1, 1): {(5, 4): -1.4142135623730951},(1, -1): {(6, 3): -1},(1, 1): {(6, 3): -1},(-1, -1): {(5, 2): -1.4142135623730951}},(6, 4): {(1, 0): {(6, 4): -1},(0, 1): {(6, 5): 10},(-1, 0): {(5, 4): 10},(0, -1): {(6, 3): 10},(-1, 1): {(5, 5): 10},(1, -1): {(6, 4): -1},(1, 1): {(6, 4): -1},(-1, -1): {(5, 3): 10}},(6, 5): {(1, 0): {(6, 5): -1},(0, 1): {(6, 5): -1},(-1, 0): {(5, 5): -1.0},(0, -1): {(6, 4): -1.0},(-1, 1): {(6, 5): -1},(1, -1): {(6, 5): -1},(1, 1): {(6, 5): -1},(-1, -1): {(5, 4): -1.4142135623730951}}}
rewardB = {(0, 0): {(1, 0): {(1, 0): -1.0},(0, 1): {(0, 1): -1.0},(-1, 0): {(0, 0): -1},(0, -1): {(0, 0): -1},(-1, 1): {(0, 0): -1},(1, -1): {(0, 0): -1},(1, 1): {(1, 1): -1.4142135623730951},(-1, -1): {(0, 0): -1}},(0, 1): {(1, 0): {(1, 1): -1.0},(0, 1): {(0, 2): -1.0},(-1, 0): {(0, 1): -1},(0, -1): {(0, 0): -1.0},(-1, 1): {(0, 1): -1},(1, -1): {(1, 0): -1.4142135623730951},(1, 1): {(1, 2): -1.4142135623730951},(-1, -1): {(0, 1): -1}},(0, 2): {(1, 0): {(1, 2): -1.0},(0, 1): {(0, 3): -1.0},(-1, 0): {(0, 2): -1},(0, -1): {(0, 1): -1.0},(-1, 1): {(0, 2): -1},(1, -1): {(1,
| 0 |
66d96c42c8b814fcf38f8fd18215e6e29efbc810
|
Python
|
1): -1.4142135623730951},(1, 1): {(1, 3): -1.4142135623730951},(-1, -1): {(0, 2): -1}},(0, 3): {(1, 0): {(1, 3): -1.0},(0, 1): {(0, 4): -1.0},(-1, 0): {(0, 3): -1},(0, -1): {(0, 2): -1.0},(-1, 1): {(0, 3): -1},(1, -1): {(1, 2): -1.4142135623730951},(1, 1): {(1, 4): -1.4142135623730951},(-1, -1): {(0, 3): -1}},(0, 4): {(1, 0): {(1, 4): -1.0},(0, 1): {(0, 5): -1.0},(-1, 0): {(0, 4): -1},(0, -1): {(0, 3): -1.0},(-1, 1): {(0, 4): -1},(1, -1): {(1, 3): -1.4142135623730951},(1, 1): {(1, 5): -1.4142135623730951},(-1, -1): {(0, 4): -1}},(0, 5): {(1, 0): {(1, 5): -1.0},(0, 1): {(0, 5): -1},(-1, 0): {(0, 5): -1},(0, -1): {(0, 4): -1.0},(-1, 1): {(0, 5): -1},(1, -1): {(1, 4): -1.4142135623730951},(1, 1): {(0, 5): -1},(-1, -1): {(0, 5): -1}},(1, 0): {(1, 0): {(2, 0): -1.0},(0, 1): {(1, 1): -1.0},(-1, 0): {(0, 0): -1.0},(0, -1): {(1, 0): -1},(-1, 1): {(0, 1): -1.4142135623730951},(1, -1): {(1, 0): -1},(1, 1): {(2, 1): -1.4142135623730951},(-1, -1): {(1, 0): -1}},(1, 1): {(1, 0): {(2, 1): -1.0},(0, 1): {(1, 2): -1.0},(-1, 0): {(0, 1): -1.0},(0, -1): {(1, 0): -1.0},(-1, 1): {(0, 2): -1.4142135623730951},(1, -1): {(2, 0): -1.4142135623730951},(1, 1): {(2, 2): -1.4142135623730951},(-1, -1): {(0, 0): -1.4142135623730951}},(1, 2): {(1, 0): {(2, 2): -1.0},(0, 1): {(1, 3): -1.0},(-1, 0): {(0, 2): -1.0},(0, -1): {(1, 1): -1.0},(-1, 1): {(0, 3): -1.4142135623730951},(1, -1): {(2, 1): -1.4142135623730951},(1, 1): {(2, 3): -1.4142135623730951},(-1, -1): {(0, 1): -1.4142135623730951}},(1, 3): {(1, 0): {(2, 3): -1.0},(0, 1): {(1, 4): -1.0},(-1, 0): {(0, 3): -1.0},(0, -1): {(1, 2): -1.0},(-1, 1): {(0, 4): -1.4142135623730951},(1, -1): {(2, 2): -1.4142135623730951},(1, 1): {(2, 4): -1.4142135623730951},(-1, -1): {(0, 2): -1.4142135623730951}},(1, 4): {(1, 0): {(2, 4): -1.0},(0, 1): {(1, 5): -1.0},(-1, 0): {(0, 4): -1.0},(0, -1): {(1, 3): -1.0},(-1, 1): {(0, 5): -1.4142135623730951},(1, -1): {(2, 3): -1.4142135623730951},(1, 1): {(2, 5): -1.4142135623730951},(-1, -1): {(0, 3): -1.4142135623730951}},(1, 5): {(1, 0): {(2, 5): -1.0},(0, 1): {(1, 5): -1},(-1, 0): {(0, 5): -1.0},(0, -1): {(1, 4): -1.0},(-1, 1): {(1, 5): -1},(1, -1): {(2, 4): -1.4142135623730951},(1, 1): {(1, 5): -1},(-1, -1): {(0, 4): -1.4142135623730951}},(2, 0): {(1, 0): {(3, 0): -1.0},(0, 1): {(2, 1): -1.0},(-1, 0): {(1, 0): -1.0},(0, -1): {(2, 0): -1},(-1, 1): {(1, 1): -1.4142135623730951},(1, -1): {(2, 0): -1},(1, 1): {(3, 1): -1.4142135623730951},(-1, -1): {(2, 0): -1}},(2, 1): {(1, 0): {(3, 1): -1.0},(0, 1): {(2, 2): -1.0},(-1, 0): {(1, 1): -1.0},(0, -1): {(2, 0): -1.0},(-1, 1): {(1, 2): -1.4142135623730951},(1, -1): {(3, 0): -1.4142135623730951},(1, 1): {(3, 2): -1.4142135623730951},(-1, -1): {(1, 0): -1.4142135623730951}},(2, 2): {(1, 0): {(3, 2): -1.0},(0, 1): {(2, 3): -1.0},(-1, 0): {(1, 2): -1.0},(0, -1): {(2, 1): -1.0},(-1, 1): {(1, 3): -1.4142135623730951},(1, -1): {(3, 1): -1.4142135623730951},(1, 1): {(3, 3): -1.4142135623730951},(-1, -1): {(1, 1): -1.4142135623730951}},(2, 3): {(1, 0): {(3, 3): -1.0},(0, 1): {(2, 4): -1.0},(-1, 0): {(1, 3): -1.0},(0, -1): {(2, 2): -1.0},(-1, 1): {(1, 4): -1.4142135623730951},(1, -1): {(3, 2): -1.4142135623730951},(1, 1): {(3, 4): -1.4142135623730951},(-1, -1): {(1, 2): -1.4142135623730951}},(2, 4): {(1, 0): {(3, 4): -1.0},(0, 1): {(2, 5): -1.0},(-1, 0): {(1, 4): -1.0},(0, -1): {(2, 3): -1.0},(-1, 1): {(1, 5): -1.4142135623730951},(1, -1): {(3, 3): -1.4142135623730951},(1, 1): {(3, 5): -1.4142135623730951},(-1, -1): {(1, 3): -1.4142135623730951}},(2, 5): {(1, 0): {(3, 5): -1.0},(0, 1): {(2, 5): -1},(-1, 0): {(1, 5): -1.0},(0, -1): {(2, 4): -1.0},(-1, 1): {(2, 5): -1},(1, -1): {(3, 4): -1.4142135623730951},(1, 1): {(2, 5): -1},(-1, -1): {(1, 4): -1.4142135623730951}},(3, 0): {(1, 0): {(4, 0): -100},(0, 1): {(3, 1): -100},(-1, 0): {(2, 0): -100},(0, -1): {(3, 0): -100},(-1, 1): {(2, 1): -100},(1, -1): {(3, 0): -100},(1, 1): {(4, 1): -100},(-1, -1): {(3, 0): -100}},(3, 1): {(1, 0): {(4, 1): -100},(0, 1): {(3, 2): -100},(-1, 0): {(2, 1): -100},(0, -1): {(3, 0): -100},(-1, 1): {(2, 2): -100},(1, -1): {(4, 0): -100},(1, 1): {(4, 2): -100},(-1, -1): {(2, 0): -100}},(3, 2): {(1, 0): {(4, 2): -100},(0, 1): {(3, 3): -100},(-1, 0): {(2, 2): -100},(0, -1): {(3, 1): -100},(-1, 1): {(2, 3): -100},(1, -1): {(4, 1): -100},(1, 1): {(4, 3): -100},(-1, -1): {(2, 1): -100}},(3, 3): {(1, 0): {(4, 3): -100},(0, 1): {(3, 4): -100},(-1, 0): {(2, 3): -100},(0, -1): {(3, 2): -100},(-1, 1): {(2, 4): -100},(1, -1): {(4, 2): -100},(1, 1): {(4, 4): -100},(-1, -1): {(2, 2): -100}},(3, 4): {(1, 0): {(4, 4): -1.0},(0, 1): {(3, 5): -1.0},(-1, 0): {(2, 4): -1.0},(0, -1): {(3, 3): -1.0},(-1, 1): {(2, 5): -1.4142135623730951},(1, -1): {(4, 3): -1.4142135623730951},(1, 1): {(4, 5): -1.4142135623730951},(-1, -1): {(2, 3): -1.4142135623730951}},(3, 5): {(1, 0): {(4, 5): -1.0},(0, 1): {(3, 5): -1},(-1, 0): {(2, 5): -1.0},(0, -1): {(3, 4): -1.0},(-1, 1): {(3, 5): -1},(1, -1): {(4, 4): -1.4142135623730951},(1, 1): {(3, 5): -1},(-1, -1): {(2, 4): -1.4142135623730951}},(4, 0): {(1, 0): {(5, 0): -1.0},(0, 1): {(4, 1): -1.0},(-1, 0): {(3, 0): -1.0},(0, -1): {(4, 0): -1},(-1, 1): {(3, 1): -1.4142135623730951},(1, -1): {(4, 0): -1},(1, 1): {(5, 1): -1.4142135623730951},(-1, -1): {(4, 0): -1}},(4, 1): {(1, 0): {(5, 1): -1.0},(0, 1): {(4, 2): -1.0},(-1, 0): {(3, 1): -1.0},(0, -1): {(4, 0): -1.0},(-1, 1): {(3, 2): -1.4142135623730951},(1, -1): {(5, 0): -1.4142135623730951},(1, 1): {(5, 2): -1.4142135623730951},(-1, -1): {(3, 0): -1.4142135623730951}},(4, 2): {(1, 0): {(5, 2): -1.0},(0, 1): {(4, 3): -1.0},(-1, 0): {(3, 2): -1.0},(0, -1): {(4, 1): -1.0},(-1, 1): {(3, 3): -1.4142135623730951},(1, -1): {(5, 1): -1.4142135623730951},(1, 1): {(5, 3): -1.4142135623730951},(-1, -1): {(3, 1): -1.4142135623730951}},(4, 3): {(1, 0): {(5, 3): -1.0},(0, 1): {(4, 4): -1.0},(-1, 0): {(3, 3): -1.0},(0, -1): {(4, 2): -1.0},(-1, 1): {(3, 4): -1.4142135623730951},(1, -1): {(5, 2): -1.4142135623730951},(1, 1): {(5, 4): -1.4142135623730951},(-1, -1): {(3, 2): -1.4142135623730951}},(4, 4): {(1, 0): {(5, 4): -1.0},(0, 1): {(4, 5): -1.0},(-1, 0): {(3, 4): -1.0},(0, -1): {(4, 3): -1.0},(-1, 1): {(3, 5): -1.4142135623730951},(1, -1): {(5, 3): -1.4142135623730951},(1, 1): {(5, 5): -1.4142135623730951},(-1, -1): {(3, 3): -1.4142135623730951}},(4, 5): {(1, 0): {(5, 5): -1.0},(0, 1): {(4, 5): -1},(-1, 0): {(3, 5): -1.0},(0, -1): {(4, 4): -1.0},(-1, 1): {(4, 5): -1},(1, -1): {(5, 4): -1.4142135623730951},(1, 1): {(4, 5): -1},(-1, -1): {(3, 4): -1.4142135623730951}},(5, 0): {(1, 0): {(6, 0): -1.0},(0, 1): {(5, 1): -1.0},(-1, 0): {(4, 0): -1.0},(0, -1): {(5, 0): -1},(-1, 1): {(4, 1): -1.4142135623730951},(1, -1): {(5, 0): -1},(1, 1): {(6, 1): -1.4142135623730951},(-1, -1): {(5, 0): -1}},(5, 1): {(1, 0): {(6, 1): -1.0},(0, 1): {(5, 2): -1.0},(-1, 0): {(4, 1): -1.0},(0, -1): {(5, 0): -1.0},(-1, 1): {(4, 2): -1.4142135623730951},(1, -1): {(6, 0): -1.4142135623730951},(1, 1): {(6, 2): -1.4142135623730951},(-1, -1): {(4, 0): -1.4142135623730951}},(5, 2): {(1, 0): {(6, 2): -1.0},(0, 1): {(5, 3): -1.0},(-1, 0): {(4, 2): -1.0},(0, -1): {(5, 1): -1.0},(-1, 1): {(4, 3): -1.4142135623730951},(1, -1): {(6, 1): -1.4142135623730951},(1, 1): {(6, 3): -1.4142135623730951},(-1, -1): {(4, 1): -1.4142135623730951}},(5, 3): {(1, 0): {(6, 3): -1.0},(0, 1): {(5, 4): -1.0},(-1, 0): {(4, 3): -1.0},(0, -1): {(5, 2): -1.0},(-1, 1): {(4, 4): -1.4142135623730951},(1, -1): {(6, 2): -1.4142135623730951},(1, 1): {(6, 4): -1.4142135623730951},(-1, -1): {(4, 2): -1.4142135623730951}},(5, 4): {(1, 0): {(6, 4): -1.0},(0, 1): {(5, 5): -1.0},(-1, 0): {(4, 4): -1.0},(0, -1): {(5, 3): -1.0},(-1, 1): {(4, 5): -1.4142135623730951},(1, -1): {(6, 3): -1.4142135623730951},(1, 1): {(6, 5): -1.4142135623730951},(-1, -1): {(4, 3): -1.4142135623730951}},(5, 5): {(1, 0): {(6, 5): -1.0},(0, 1): {(5, 5): -1},(-1, 0): {(4, 5): -1.0},(0, -1): {(5, 4): -1.0},(-1, 1): {(5, 5): -1},(1, -1): {(6, 4): -1.4142135623730951},(1, 1): {(5, 5): -1},(-1, -1): {(4, 4): -1.4142135623730951}},(6, 0): {(1, 0): {(6, 0): -1},(0, 1): {(6, 1): -1.0},(-1, 0): {(5, 0): -1.0},(0, -1): {(6, 0): -1},(-1, 1): {(5, 1): -1.4142135623730951},(1, -1): {(6, 0): -1},(1, 1): {(6, 0): -1},(-1, -1): {(6, 0): -1}},(6, 1): {(1, 0): {(6, 1): -1},(0, 1): {(6, 2): 10},(-1, 0): {(5, 1): 10},(0, -1): {(6, 0): 10},(-1, 1): {(5, 2): 10},(1, -1): {(6, 1): -1},(1, 1): {(6, 1): -1},(-1, -1): {(5, 0): 10}},(6, 2): {(1, 0): {(6, 2): -1},(0, 1): {(6, 3): -1.0},(-1, 0): {(5, 2): -1.0},(0, -1): {(6, 1): -1.0},(-1, 1): {(5, 3): -1.4142135623730951},(1, -1): {(6, 2): -1},(1, 1): {(6, 2): -1},(-1, -1): {(5, 1): -1.4142135623730951}},(6, 3): {(1, 0): {(6, 3): -1},(0, 1): {(6, 4): -1.0},(-1, 0): {(5, 3): -1.0},(0, -1): {(6, 2): -1.0},(-1, 1): {(5, 4): -1.4142135623730951},(1, -1): {(6, 3): -1},(1, 1): {(6, 3): -1},(-1, -1): {(5, 2): -1.4142135623730951}},(6, 4): {(1, 0): {(6, 4): -1},(0, 1): {(6, 5): -1.0},(-1, 0): {(5, 4): -1.0},(0, -1): {(6, 3): -1.0},(-1, 1): {(5, 5): -1.4142135623730951},(1, -1): {(6, 4): -1},(1, 1): {(6, 4): -1},(-1, -1): {(5, 3): -1.4142135623730951}},(6, 5): {(1, 0): {(6, 5): -1},(0, 1): {(6, 5): -1},(-1, 0): {(5, 5): -1.0},(0, -1): {(6, 4): -1.0},(-1, 1): {(6, 5): -1},(1, -1): {(6, 5): -1},(1, 1): {(6, 5): -1},(-1, -1): {(5, 4): -1.4142135623730951}}}
rewardC = {(0, 0): {(1, 0): {(1, 0): -1.0},(0, 1): {(0, 1): -1.0},(-1, 0): {(0, 0): -1},(0, -1): {(0, 0): -1},(-1, 1): {(0, 0): -1},(1, -1): {(0, 0): -1},(1, 1): {(1, 1): -1.4142135623730951},(-1, -1): {(0, 0): -1}},(0, 1): {(1, 0): {(1, 1): -1.0},(0, 1): {(0, 2): -1.0},(-1, 0): {(0, 1): -1},(0, -1): {(0, 0): -1.0},(-1, 1): {(0, 1): -1},(1, -1): {(1, 0): -1.4142135623730951},(1, 1): {(1, 2): -1.4142135623730951},(-1, -1): {(0, 1): -1}},(0, 2): {(1, 0): {(1, 2): -1.0},(0, 1): {(0, 3): -1.0},(-1, 0): {(0, 2): -1},(0, -1): {(0, 1): -1.0},(-1, 1): {(0, 2): -1},(1, -1): {(1, 1): -1.4142135623730951},(1, 1): {(1, 3): -1.4142135623730951},(-1, -1): {(0, 2): -1}},(0, 3): {(1, 0): {(1, 3): -1.0},(0, 1): {(0, 4): -1.0},(-1, 0): {(0, 3): -1},(0, -1): {(0, 2): -1.0},(-1, 1): {(0, 3): -1},(1, -1): {(1, 2): -1.4142135623730951},(1, 1): {(1, 4): -1.4142135623730951},(-1, -1): {(0, 3): -1}},(0, 4): {(1, 0): {(1, 4): -1.0},(0, 1): {(0, 5): -1.0},(-1, 0): {(0, 4): -1},(0, -1): {(0, 3): -1.0},(-1, 1): {(0, 4): -1},(1, -1): {(1, 3): -1.4142135623730951},(1, 1): {(1, 5): -1.4142135623730951},(-1, -1): {(0, 4): -1}},(0, 5): {(1, 0): {(1, 5): -1.0},(0, 1): {(0, 5): -1},(-1, 0): {(0, 5): -1},(0, -1): {(0, 4): -1.0},(-1, 1): {(0, 5): -1},(1, -1): {(1, 4): -1.4142135623730951},(1, 1): {(0, 5): -1},(-1, -1): {(0, 5): -1}},(1, 0): {(1, 0): {(2, 0): -1.0},(0, 1): {(1, 1): -1.0},(-1, 0): {(0, 0): -1.0},(0, -1): {(1, 0): -1},(-1, 1): {(0, 1): -1.4142135623730951},(1, -1): {(1, 0): -1},(1, 1): {(2, 1): -1.4142135623730951},(-1, -1): {(1, 0): -1}},(1, 1): {(1, 0): {(2, 1): -1.0},(0, 1): {(1, 2): -1.0},(-1, 0): {(0, 1): -1.0},(0, -1): {(1, 0): -1.0},(-1, 1): {(0, 2): -1.4142135623730951},(1, -1): {(2, 0): -1.4142135623730951},(1, 1): {(2, 2): -1.4142135623730951},(-1, -1): {(0, 0): -1.4142135623730951}},(1, 2): {(1, 0): {(2, 2): -1.0},(0, 1): {(1, 3): -1.0},(-1, 0): {(0, 2): -1.0},(0, -1): {(1, 1): -1.0},(-1, 1): {(0, 3): -1.4142135623730951},(1, -1): {(2, 1): -1.4142135623730951},(1, 1): {(2, 3): -1.4142135623730951},(-1, -1): {(0, 1): -1.4142135623730951}},(1, 3): {(1, 0): {(2, 3): -1.0},(0, 1): {(1, 4): -1.0},(-1, 0): {(0, 3): -1.0},(0, -1): {(1, 2): -1.0},(-1, 1): {(0, 4): -1.4142135623730951},(1, -1): {(2, 2): -1.4142135623730951},(1, 1): {(2, 4): -1.4142135623730951},(-1, -1): {(0, 2): -1.4142135623730951}},(1, 4): {(1, 0): {(2, 4): -1.0},(0, 1): {(1, 5): -1.0},(-1, 0): {(0, 4): -1.0},(0, -1): {(1, 3): -1.0},(-1, 1): {(0, 5): -1.4142135623730951},(1, -1): {(2, 3): -1.4142135623730951},(1, 1): {(2, 5): -1.4142135623730951},(-1, -1): {(0, 3): -1.4142135623730951}},(1, 5): {(1, 0): {(2, 5): 10},(0, 1): {(1, 5): -1},(-1, 0): {(0, 5): 10},(0, -1): {(1, 4): 10},(-1, 1): {(1, 5): -1},(1, -1): {(2, 4): 10},(1, 1): {(1, 5): -1},(-1, -1): {(0, 4): 10}},(2, 0): {(1, 0): {(3, 0): -1.0},(0, 1): {(2, 1): -1.0},(-1, 0): {(1, 0): -1.0},(0, -1): {(2, 0): -1},(-1, 1): {(1, 1): -1.4142135623730951},(1, -1): {(2, 0): -1},(1, 1): {(3, 1): -1.4142135623730951},(-1, -1): {(2, 0): -1}},(2, 1): {(1, 0): {(3, 1): -1.0},(0, 1): {(2, 2): -1.0},(-1, 0): {(1, 1): -1.0},(0, -1): {(2, 0): -1.0},(-1, 1): {(1, 2): -1.4142135623730951},(1, -1): {(3, 0): -1.4142135623730951},(1, 1): {(3, 2): -1.4142135623730951},(-1, -1): {(1, 0): -1.4142135623730951}},(2, 2): {(1, 0): {(3, 2): -1.0},(0, 1): {(2, 3): -1.0},(-1, 0): {(1, 2): -1.0},(0, -1): {(2, 1): -1.0},(-1, 1): {(1, 3): -1.4142135623730951},(1, -1): {(3, 1): -1.4142135623730951},(1, 1): {(3, 3): -1.4142135623730951},(-1, -1): {(1, 1): -1.4142135623730951}},(2, 3): {(1, 0): {(3, 3): -1.0},(0, 1): {(2, 4): -1.0},(-1, 0): {(1, 3): -1.0},(0, -1): {(2, 2): -1.0},(-1, 1): {(1, 4): -1.4142135623730951},(1, -1): {(3, 2): -1.4142135623730951},(1, 1): {(3, 4): -1.4142135623730951},(-1, -1): {(1, 2): -1.4142135623730951}},(2, 4): {(1, 0): {(3, 4): -1.0},(0, 1): {(2, 5): -1.0},(-1, 0): {(1, 4): -1.0},(0, -1): {(2, 3): -1.0},(-1, 1): {(1, 5): -1.4142135623730951},(1, -1): {(3, 3): -1.4142135623730951},(1, 1): {(3, 5): -1.4142135623730951},(-1, -1): {(1, 3): -1.4142135623730951}},(2, 5): {(1, 0): {(3, 5): -1.0},(0, 1): {(2, 5): -1},(-1, 0): {(1, 5): -1.0},(0, -1): {(2, 4): -1.0},(-1, 1): {(2, 5): -1},(1, -1): {(3, 4): -1.4142135623730951},(1, 1): {(2, 5): -1},(-1, -1): {(1, 4): -1.4142135623730951}},(3, 0): {(1, 0): {(4, 0): -100},(0, 1): {(3, 1): -100},(-1, 0): {(2, 0): -100},(0, -1): {(3, 0): -100},(-1, 1): {(2, 1): -100},(1, -1): {(3, 0): -100},(1, 1): {(4, 1): -100},(-1, -1): {(3, 0): -100}},(3, 1): {(1, 0): {(4, 1): -100},(0, 1): {(3, 2): -100},(-1, 0): {(2, 1): -100},(0, -1): {(3, 0): -100},(-1, 1): {(2, 2): -100},(1, -1): {(4, 0): -100},(1, 1): {(4, 2): -100},(-1, -1): {(2, 0): -100}},(3, 2): {(1, 0): {(4, 2): -100},(0, 1): {(3, 3): -100},(-1, 0): {(2, 2): -100},(0, -1): {(3, 1): -100},(-1, 1): {(2, 3): -100},(1, -1): {(4, 1): -100},(1, 1): {(4, 3): -100},(-1, -1): {(2, 1): -100}},(3, 3): {(1, 0): {(4, 3): -100},(0, 1): {(3, 4): -100},(-1, 0): {(2, 3): -100},(0, -1): {(3, 2): -100},(-1, 1): {(2, 4): -100},(1, -1): {(4, 2): -100},(1, 1): {(4, 4): -100},(-1, -1): {(2, 2): -100}},(3, 4): {(1, 0): {(4, 4): -1.0},(0, 1): {(3, 5): -1.0},(-1, 0): {(2, 4): -1.0},(0, -1): {(3, 3): -1.0},(-1, 1): {(2, 5): -1.4142135623730951},(1, -1): {(4, 3): -1.4142135623730951},(1, 1): {(4, 5): -1.4142135623730951},(-1, -1): {(2, 3): -1.4142135623730951}},(3, 5): {(1, 0): {(4, 5): -1.0},(0, 1): {(3, 5): -1},(-1, 0): {(2, 5): -1.0},(0, -1): {(3, 4): -1.0},(-1, 1): {(3, 5): -1},(1, -1): {(4, 4): -1.4142135623730951},(1, 1): {(3, 5): -1},(-1, -1): {(2, 4): -1.4142135623730951}},(4, 0): {(1, 0): {(5, 0): -1.0},(0, 1): {(4, 1): -1.0},(-1, 0): {(3, 0): -1.0},(0, -1): {(4, 0): -1},(-1, 1): {(3, 1): -1.4142135623730951},(1, -1): {(4, 0): -1},(1, 1): {(5, 1): -1.4142135623730951},(-1, -1): {(4, 0): -1}},(4, 1): {(1, 0): {(5, 1): -1.0},(0, 1): {(4, 2): -1.0},(-1, 0): {(3, 1): -1.0},(0, -1): {(4, 0): -1.0},(-1, 1): {(3, 2): -1.4142135623730951},(1, -1): {(5, 0): -1.4142135623730951},(1, 1): {(5, 2): -1.4142135623730951},(-1, -1): {(3, 0): -1.4142135623730951}},(4, 2): {(1, 0): {(5, 2): -1.0},(0, 1): {(4, 3): -1.0},(-1, 0): {(3, 2): -1.0},(0, -1): {(4, 1): -1.0},(-1, 1): {(3, 3): -1.4142135623730951},(1, -1): {(5, 1): -1.4142135623730951},(1, 1): {(5, 3): -1.4142135623730951},(-1, -1): {(3, 1): -1.4142135623730951}},(4, 3): {(1, 0): {(5, 3): -1.0},(0, 1): {(4, 4): -1.0},(-1, 0): {(3, 3): -1.0},(0, -1): {(4, 2): -1.0},(-1, 1): {(3, 4): -1.4142135623730951},(1, -1): {(5, 2): -1.4142135623730951},(1, 1): {(5, 4): -1.4142135623730951},(-1, -1): {(3, 2): -1.4142135623730951}},(4, 4): {(1, 0): {(5, 4): -1.0},(0, 1): {(4, 5): -1.0},(-1, 0): {(3, 4): -1.0},(0, -1): {(4, 3): -1.0},(-1, 1): {(3, 5): -1.4142135623730951},(1, -1): {(5, 3): -1.4142135623730951},(1, 1): {(5, 5): -1.4142135623730951},(-1, -1): {(3, 3): -1.4142135623730951}},(4, 5): {(1, 0): {(5, 5): -1.0},(0, 1): {(4, 5): -1},(-1, 0): {(3, 5): -1.0},(0, -1): {(4, 4): -1.0},(-1, 1): {(4, 5): -1},(1, -1): {(5, 4): -1.4142135623730951},(1, 1): {(4, 5): -1},(-1, -1): {(3, 4): -1.4142135623730951}},(5, 0): {(1, 0): {(6, 0): -1.0},(0, 1): {(5, 1): -1.0},(-1, 0): {(4, 0): -1.0},(0, -1): {(5, 0): -1},(-1, 1): {(4, 1): -1.4142135623730951},(1, -1): {(5, 0): -1},(1, 1): {(6, 1): -1.4142135623730951},(-1, -1): {(5, 0): -1}},(5, 1): {(1, 0): {(6, 1): -1.0},(0, 1): {(5, 2): -1.0},(-1, 0): {(4, 1): -1.0},(0, -1): {(5, 0): -1.0},(-1, 1): {(4, 2): -1.4142135623730951},(1, -1): {(6, 0): -1.4142135623730951},(1, 1): {(6, 2): -1.4142135623730951},(-1, -1): {(4, 0): -1.4142135623730951}},(5, 2): {(1, 0): {(6, 2): -1.0},(0, 1): {(5, 3): -1.0},(-1, 0): {(4, 2): -1.0},(0, -1): {(5, 1): -1.0},(-1, 1): {(4, 3): -1.4142135623730951},(1, -1): {(6, 1): -1.4142135623730951},(1, 1): {(6, 3): -1.4142135623730951},(-1, -1): {(4, 1): -1.4142135623730951}},(5, 3): {(1, 0): {(6, 3): -1.0},(0, 1): {(5, 4): -1.0},(-1, 0): {(4, 3): -1.0},(0, -1): {(5, 2): -1.0},(-1, 1): {(4, 4): -1.4142135623730951},(1, -1): {(6, 2): -1.4142135623730951},(1, 1): {(6, 4): -1.4142135623730951},(-1, -1): {(4, 2): -1.4142135623730951}},(5, 4): {(1, 0): {(6, 4): -1.0},(0, 1): {(5, 5): -1.0},(-1, 0): {(4, 4): -1.0},(0, -1): {(5, 3): -1.0},(-1, 1): {(4, 5): -1.4142135623730951},(1, -1): {(6, 3): -1.4142135623730951},(1, 1): {(6, 5): -1.4142135623730951},(-1, -1): {(4, 3): -1.4142135623730951}},(5, 5): {(1, 0): {(6, 5): -1.0},(0, 1): {(5, 5): -1},(-1, 0): {(4, 5): -1.0},(0, -1): {(5, 4): -1.0},(-1, 1): {(5, 5): -1},(1, -1): {(6, 4): -1.4142135623730951},(1, 1): {(5, 5): -1},(-1, -1): {(4, 4): -1.4142135623730951}},(6, 0): {(1, 0): {(6, 0): -1},(0, 1): {(6, 1): -1.0},(-1, 0): {(5, 0): -1.0},(0, -1): {(6, 0): -1},(-1, 1): {(5, 1): -1.4142135623730951},(1, -1): {(6, 0): -1},(1, 1): {(6, 0): -1},(-1, -1): {(6, 0): -1}},(6, 1): {(1, 0): {(6, 1): -1},(0, 1): {(6, 2): -1.0},(-1, 0): {(5, 1): -1.0},(0, -1): {(6, 0): -1.0},(-1, 1): {(5, 2): -1.4142135623730951},(1, -1): {(6, 1): -1},(1, 1): {(6, 1): -1},(-1, -1): {(5, 0): -1.4142135623730951}},(6, 2): {(1, 0): {(6, 2): -1},(0, 1): {(6, 3): -1.0},(-1, 0): {(5, 2): -1.0},(0, -1): {(6, 1): -1.0},(-1, 1): {(5, 3): -1.4142135623730951},(1, -1): {(6, 2): -1},(1, 1): {(6, 2): -1},(-1, -1): {(5, 1): -1.4142135623730951}},(6, 3): {(1, 0): {(6, 3): -1},(0, 1): {(6, 4): -1.0},(-1, 0): {(5, 3): -1.0},(0, -1): {(6, 2): -1.0},(-1, 1): {(5, 4): -1.4142135623730951},(1, -1): {(6, 3): -1},(1, 1): {(6, 3): -1},(-1, -1): {(5, 2): -1.4142135623730951}},(6, 4): {(1, 0): {(6, 4): -1},(0, 1): {(6, 5): -1.0},(-1, 0): {(5, 4): -1.0},(0, -1): {(6, 3): -1.0},(-1, 1): {(5, 5): -1.4142135623730951},(1, -1): {(6, 4): -1},(1, 1): {(6, 4): -1},(-1, -1): {(5, 3): -1.4142135623730951}},(6, 5): {(1, 0): {(6, 5): -1},(0, 1): {(6, 5): -1},(-1, 0): {(5, 5): -1.0},(0, -1): {(6, 4): -1.0},(-1, 1): {(6, 5): -1},(1, -1): {(6, 5): -1},(1, 1): {(6, 5): -1},(-1, -1): {(5, 4): -1.4142135623730951}}}
#Environment 2: Barrier with a Gap
rewardAGap = {(0, 0): {(1, 0): {(1, 0): -1.0},(0, 1): {(0, 1): -1.0},(-1, 0): {(0, 0): -1},(0, -1): {(0, 0): -1},(-1, 1): {(0, 0): -1},(1, -1): {(0, 0): -1},(1, 1): {(1, 1): -1.4142135623730951},(-1, -1): {(0, 0): -1}},(0, 1): {(1, 0): {(1, 1): -1.0},(0, 1): {(0, 2): -1.0},(-1, 0): {(0, 1): -1},(0, -1): {(0, 0): -1.0},(-1, 1): {(0, 1): -1},(1, -1): {(1, 0): -1.4142135623730951},(1, 1): {(1, 2): -1.4142135623730951},(-1, -1): {(0, 1): -1}},(0, 2): {(1, 0): {(1, 2): -1.0},(0, 1): {(0, 3): -1.0},(-1, 0): {(0, 2): -1},(0, -1): {(0, 1): -1.0},(-1, 1): {(0, 2): -1},(1, -1): {(1, 1): -1.4142135623730951},(1, 1): {(1, 3): -1.4142135623730951},(-1, -1): {(0, 2): -1}},(0, 3): {(1, 0): {(1, 3): -1.0},(0, 1): {(0, 4): -1.0},(-1, 0): {(0, 3): -1},(0, -1): {(0, 2): -1.0},(-1, 1): {(0, 3): -1},(1, -1): {(1, 2): -1.4142135623730951},(1, 1): {(1, 4): -1.4142135623730951},(-1, -1): {(0, 3): -1}},(0, 4): {(1, 0): {(1, 4): -1.0},(0, 1): {(0, 5): -1.0},(-1, 0): {(0, 4): -1},(0, -1): {(0, 3): -1.0},(-1, 1): {(0, 4): -1},(1, -1): {(1, 3): -1.4142135623730951},(1, 1): {(1, 5): -1.4142135623730951},(-1, -1): {(0, 4): -1}},(0, 5): {(1, 0): {(1, 5): -1.0},(0, 1): {(0, 5): -1},(-1, 0): {(0, 5): -1},(0, -1): {(0, 4): -1.0},(-1, 1): {(0, 5): -1},(1, -1): {(1, 4): -1.4142135623730951},(1, 1): {(0, 5): -1},(-1, -1): {(0, 5): -1}},(1, 0): {(1, 0): {(2, 0): -1.0},(0, 1): {(1, 1): -1.0},(-1, 0): {(0, 0): -1.0},(0, -1): {(1, 0): -1},(-1, 1): {(0, 1): -1.4142135623730951},(1, -1): {(1, 0): -1},(1, 1): {(2, 1): -1.4142135623730951},(-1, -1): {(1, 0): -1}},(1, 1): {(1, 0): {(2, 1): -1.0},(0, 1): {(1, 2): -1.0},(-1, 0): {(0, 1): -1.0},(0, -1): {(1, 0): -1.0},(-1, 1): {(0, 2): -
| 1 |
66d96c42c8b814fcf38f8fd18215e6e29efbc810
|
Python
|
1.4142135623730951},(1, -1): {(2, 0): -1.4142135623730951},(1, 1): {(2, 2): -1.4142135623730951},(-1, -1): {(0, 0): -1.4142135623730951}},(1, 2): {(1, 0): {(2, 2): -1.0},(0, 1): {(1, 3): -1.0},(-1, 0): {(0, 2): -1.0},(0, -1): {(1, 1): -1.0},(-1, 1): {(0, 3): -1.4142135623730951},(1, -1): {(2, 1): -1.4142135623730951},(1, 1): {(2, 3): -1.4142135623730951},(-1, -1): {(0, 1): -1.4142135623730951}},(1, 3): {(1, 0): {(2, 3): -1.0},(0, 1): {(1, 4): -1.0},(-1, 0): {(0, 3): -1.0},(0, -1): {(1, 2): -1.0},(-1, 1): {(0, 4): -1.4142135623730951},(1, -1): {(2, 2): -1.4142135623730951},(1, 1): {(2, 4): -1.4142135623730951},(-1, -1): {(0, 2): -1.4142135623730951}},(1, 4): {(1, 0): {(2, 4): -1.0},(0, 1): {(1, 5): -1.0},(-1, 0): {(0, 4): -1.0},(0, -1): {(1, 3): -1.0},(-1, 1): {(0, 5): -1.4142135623730951},(1, -1): {(2, 3): -1.4142135623730951},(1, 1): {(2, 5): -1.4142135623730951},(-1, -1): {(0, 3): -1.4142135623730951}},(1, 5): {(1, 0): {(2, 5): -1.0},(0, 1): {(1, 5): -1},(-1, 0): {(0, 5): -1.0},(0, -1): {(1, 4): -1.0},(-1, 1): {(1, 5): -1},(1, -1): {(2, 4): -1.4142135623730951},(1, 1): {(1, 5): -1},(-1, -1): {(0, 4): -1.4142135623730951}},(2, 0): {(1, 0): {(3, 0): -1.0},(0, 1): {(2, 1): -1.0},(-1, 0): {(1, 0): -1.0},(0, -1): {(2, 0): -1},(-1, 1): {(1, 1): -1.4142135623730951},(1, -1): {(2, 0): -1},(1, 1): {(3, 1): -1.4142135623730951},(-1, -1): {(2, 0): -1}},(2, 1): {(1, 0): {(3, 1): -1.0},(0, 1): {(2, 2): -1.0},(-1, 0): {(1, 1): -1.0},(0, -1): {(2, 0): -1.0},(-1, 1): {(1, 2): -1.4142135623730951},(1, -1): {(3, 0): -1.4142135623730951},(1, 1): {(3, 2): -1.4142135623730951},(-1, -1): {(1, 0): -1.4142135623730951}},(2, 2): {(1, 0): {(3, 2): -1.0},(0, 1): {(2, 3): -1.0},(-1, 0): {(1, 2): -1.0},(0, -1): {(2, 1): -1.0},(-1, 1): {(1, 3): -1.4142135623730951},(1, -1): {(3, 1): -1.4142135623730951},(1, 1): {(3, 3): -1.4142135623730951},(-1, -1): {(1, 1): -1.4142135623730951}},(2, 3): {(1, 0): {(3, 3): -1.0},(0, 1): {(2, 4): -1.0},(-1, 0): {(1, 3): -1.0},(0, -1): {(2, 2): -1.0},(-1, 1): {(1, 4): -1.4142135623730951},(1, -1): {(3, 2): -1.4142135623730951},(1, 1): {(3, 4): -1.4142135623730951},(-1, -1): {(1, 2): -1.4142135623730951}},(2, 4): {(1, 0): {(3, 4): -1.0},(0, 1): {(2, 5): -1.0},(-1, 0): {(1, 4): -1.0},(0, -1): {(2, 3): -1.0},(-1, 1): {(1, 5): -1.4142135623730951},(1, -1): {(3, 3): -1.4142135623730951},(1, 1): {(3, 5): -1.4142135623730951},(-1, -1): {(1, 3): -1.4142135623730951}},(2, 5): {(1, 0): {(3, 5): -1.0},(0, 1): {(2, 5): -1},(-1, 0): {(1, 5): -1.0},(0, -1): {(2, 4): -1.0},(-1, 1): {(2, 5): -1},(1, -1): {(3, 4): -1.4142135623730951},(1, 1): {(2, 5): -1},(-1, -1): {(1, 4): -1.4142135623730951}},(3, 0): {(1, 0): {(4, 0): -100},(0, 1): {(3, 1): -100},(-1, 0): {(2, 0): -100},(0, -1): {(3, 0): -100},(-1, 1): {(2, 1): -100},(1, -1): {(3, 0): -100},(1, 1): {(4, 1): -100},(-1, -1): {(3, 0): -100}},(3, 1): {(1, 0): {(4, 1): -1.0},(0, 1): {(3, 2): -1.0},(-1, 0): {(2, 1): -1.0},(0, -1): {(3, 0): -1.0},(-1, 1): {(2, 2): -1.4142135623730951},(1, -1): {(4, 0): -1.4142135623730951},(1, 1): {(4, 2): -1.4142135623730951},(-1, -1): {(2, 0): -1.4142135623730951}},(3, 2): {(1, 0): {(4, 2): -100},(0, 1): {(3, 3): -100},(-1, 0): {(2, 2): -100},(0, -1): {(3, 1): -100},(-1, 1): {(2, 3): -100},(1, -1): {(4, 1): -100},(1, 1): {(4, 3): -100},(-1, -1): {(2, 1): -100}},(3, 3): {(1, 0): {(4, 3): -100},(0, 1): {(3, 4): -100},(-1, 0): {(2, 3): -100},(0, -1): {(3, 2): -100},(-1, 1): {(2, 4): -100},(1, -1): {(4, 2): -100},(1, 1): {(4, 4): -100},(-1, -1): {(2, 2): -100}},(3, 4): {(1, 0): {(4, 4): -1.0},(0, 1): {(3, 5): -1.0},(-1, 0): {(2, 4): -1.0},(0, -1): {(3, 3): -1.0},(-1, 1): {(2, 5): -1.4142135623730951},(1, -1): {(4, 3): -1.4142135623730951},(1, 1): {(4, 5): -1.4142135623730951},(-1, -1): {(2, 3): -1.4142135623730951}},(3, 5): {(1, 0): {(4, 5): -1.0},(0, 1): {(3, 5): -1},(-1, 0): {(2, 5): -1.0},(0, -1): {(3, 4): -1.0},(-1, 1): {(3, 5): -1},(1, -1): {(4, 4): -1.4142135623730951},(1, 1): {(3, 5): -1},(-1, -1): {(2, 4): -1.4142135623730951}},(4, 0): {(1, 0): {(5, 0): -1.0},(0, 1): {(4, 1): -1.0},(-1, 0): {(3, 0): -1.0},(0, -1): {(4, 0): -1},(-1, 1): {(3, 1): -1.4142135623730951},(1, -1): {(4, 0): -1},(1, 1): {(5, 1): -1.4142135623730951},(-1, -1): {(4, 0): -1}},(4, 1): {(1, 0): {(5, 1): -1.0},(0, 1): {(4, 2): -1.0},(-1, 0): {(3, 1): -1.0},(0, -1): {(4, 0): -1.0},(-1, 1): {(3, 2): -1.4142135623730951},(1, -1): {(5, 0): -1.4142135623730951},(1, 1): {(5, 2): -1.4142135623730951},(-1, -1): {(3, 0): -1.4142135623730951}},(4, 2): {(1, 0): {(5, 2): -1.0},(0, 1): {(4, 3): -1.0},(-1, 0): {(3, 2): -1.0},(0, -1): {(4, 1): -1.0},(-1, 1): {(3, 3): -1.4142135623730951},(1, -1): {(5, 1): -1.4142135623730951},(1, 1): {(5, 3): -1.4142135623730951},(-1, -1): {(3, 1): -1.4142135623730951}},(4, 3): {(1, 0): {(5, 3): -1.0},(0, 1): {(4, 4): -1.0},(-1, 0): {(3, 3): -1.0},(0, -1): {(4, 2): -1.0},(-1, 1): {(3, 4): -1.4142135623730951},(1, -1): {(5, 2): -1.4142135623730951},(1, 1): {(5, 4): -1.4142135623730951},(-1, -1): {(3, 2): -1.4142135623730951}},(4, 4): {(1, 0): {(5, 4): -1.0},(0, 1): {(4, 5): -1.0},(-1, 0): {(3, 4): -1.0},(0, -1): {(4, 3): -1.0},(-1, 1): {(3, 5): -1.4142135623730951},(1, -1): {(5, 3): -1.4142135623730951},(1, 1): {(5, 5): -1.4142135623730951},(-1, -1): {(3, 3): -1.4142135623730951}},(4, 5): {(1, 0): {(5, 5): -1.0},(0, 1): {(4, 5): -1},(-1, 0): {(3, 5): -1.0},(0, -1): {(4, 4): -1.0},(-1, 1): {(4, 5): -1},(1, -1): {(5, 4): -1.4142135623730951},(1, 1): {(4, 5): -1},(-1, -1): {(3, 4): -1.4142135623730951}},(5, 0): {(1, 0): {(6, 0): -1.0},(0, 1): {(5, 1): -1.0},(-1, 0): {(4, 0): -1.0},(0, -1): {(5, 0): -1},(-1, 1): {(4, 1): -1.4142135623730951},(1, -1): {(5, 0): -1},(1, 1): {(6, 1): -1.4142135623730951},(-1, -1): {(5, 0): -1}},(5, 1): {(1, 0): {(6, 1): -1.0},(0, 1): {(5, 2): -1.0},(-1, 0): {(4, 1): -1.0},(0, -1): {(5, 0): -1.0},(-1, 1): {(4, 2): -1.4142135623730951},(1, -1): {(6, 0): -1.4142135623730951},(1, 1): {(6, 2): -1.4142135623730951},(-1, -1): {(4, 0): -1.4142135623730951}},(5, 2): {(1, 0): {(6, 2): -1.0},(0, 1): {(5, 3): -1.0},(-1, 0): {(4, 2): -1.0},(0, -1): {(5, 1): -1.0},(-1, 1): {(4, 3): -1.4142135623730951},(1, -1): {(6, 1): -1.4142135623730951},(1, 1): {(6, 3): -1.4142135623730951},(-1, -1): {(4, 1): -1.4142135623730951}},(5, 3): {(1, 0): {(6, 3): -1.0},(0, 1): {(5, 4): -1.0},(-1, 0): {(4, 3): -1.0},(0, -1): {(5, 2): -1.0},(-1, 1): {(4, 4): -1.4142135623730951},(1, -1): {(6, 2): -1.4142135623730951},(1, 1): {(6, 4): -1.4142135623730951},(-1, -1): {(4, 2): -1.4142135623730951}},(5, 4): {(1, 0): {(6, 4): -1.0},(0, 1): {(5, 5): -1.0},(-1, 0): {(4, 4): -1.0},(0, -1): {(5, 3): -1.0},(-1, 1): {(4, 5): -1.4142135623730951},(1, -1): {(6, 3): -1.4142135623730951},(1, 1): {(6, 5): -1.4142135623730951},(-1, -1): {(4, 3): -1.4142135623730951}},(5, 5): {(1, 0): {(6, 5): -1.0},(0, 1): {(5, 5): -1},(-1, 0): {(4, 5): -1.0},(0, -1): {(5, 4): -1.0},(-1, 1): {(5, 5): -1},(1, -1): {(6, 4): -1.4142135623730951},(1, 1): {(5, 5): -1},(-1, -1): {(4, 4): -1.4142135623730951}},(6, 0): {(1, 0): {(6, 0): -1},(0, 1): {(6, 1): -1.0},(-1, 0): {(5, 0): -1.0},(0, -1): {(6, 0): -1},(-1, 1): {(5, 1): -1.4142135623730951},(1, -1): {(6, 0): -1},(1, 1): {(6, 0): -1},(-1, -1): {(6, 0): -1}},(6, 1): {(1, 0): {(6, 1): -1},(0, 1): {(6, 2): -1.0},(-1, 0): {(5, 1): -1.0},(0, -1): {(6, 0): -1.0},(-1, 1): {(5, 2): -1.4142135623730951},(1, -1): {(6, 1): -1},(1, 1): {(6, 1): -1},(-1, -1): {(5, 0): -1.4142135623730951}},(6, 2): {(1, 0): {(6, 2): -1},(0, 1): {(6, 3): -1.0},(-1, 0): {(5, 2): -1.0},(0, -1): {(6, 1): -1.0},(-1, 1): {(5, 3): -1.4142135623730951},(1, -1): {(6, 2): -1},(1, 1): {(6, 2): -1},(-1, -1): {(5, 1): -1.4142135623730951}},(6, 3): {(1, 0): {(6, 3): -1},(0, 1): {(6, 4): -1.0},(-1, 0): {(5, 3): -1.0},(0, -1): {(6, 2): -1.0},(-1, 1): {(5, 4): -1.4142135623730951},(1, -1): {(6, 3): -1},(1, 1): {(6, 3): -1},(-1, -1): {(5, 2): -1.4142135623730951}},(6, 4): {(1, 0): {(6, 4): -1},(0, 1): {(6, 5): 10},(-1, 0): {(5, 4): 10},(0, -1): {(6, 3): 10},(-1, 1): {(5, 5): 10},(1, -1): {(6, 4): -1},(1, 1): {(6, 4): -1},(-1, -1): {(5, 3): 10}},(6, 5): {(1, 0): {(6, 5): -1},(0, 1): {(6, 5): -1},(-1, 0): {(5, 5): -1.0},(0, -1): {(6, 4): -1.0},(-1, 1): {(6, 5): -1},(1, -1): {(6, 5): -1},(1, 1): {(6, 5): -1},(-1, -1): {(5, 4): -1.4142135623730951}}}
rewardBGap = {(0, 0): {(1, 0): {(1, 0): -1.0},(0, 1): {(0, 1): -1.0},(-1, 0): {(0, 0): -1},(0, -1): {(0, 0): -1},(-1, 1): {(0, 0): -1},(1, -1): {(0, 0): -1},(1, 1): {(1, 1): -1.4142135623730951},(-1, -1): {(0, 0): -1}},(0, 1): {(1, 0): {(1, 1): -1.0},(0, 1): {(0, 2): -1.0},(-1, 0): {(0, 1): -1},(0, -1): {(0, 0): -1.0},(-1, 1): {(0, 1): -1},(1, -1): {(1, 0): -1.4142135623730951},(1, 1): {(1, 2): -1.4142135623730951},(-1, -1): {(0, 1): -1}},(0, 2): {(1, 0): {(1, 2): -1.0},(0, 1): {(0, 3): -1.0},(-1, 0): {(0, 2): -1},(0, -1): {(0, 1): -1.0},(-1, 1): {(0, 2): -1},(1, -1): {(1, 1): -1.4142135623730951},(1, 1): {(1, 3): -1.4142135623730951},(-1, -1): {(0, 2): -1}},(0, 3): {(1, 0): {(1, 3): -1.0},(0, 1): {(0, 4): -1.0},(-1, 0): {(0, 3): -1},(0, -1): {(0, 2): -1.0},(-1, 1): {(0, 3): -1},(1, -1): {(1, 2): -1.4142135623730951},(1, 1): {(1, 4): -1.4142135623730951},(-1, -1): {(0, 3): -1}},(0, 4): {(1, 0): {(1, 4): -1.0},(0, 1): {(0, 5): -1.0},(-1, 0): {(0, 4): -1},(0, -1): {(0, 3): -1.0},(-1, 1): {(0, 4): -1},(1, -1): {(1, 3): -1.4142135623730951},(1, 1): {(1, 5): -1.4142135623730951},(-1, -1): {(0, 4): -1}},(0, 5): {(1, 0): {(1, 5): -1.0},(0, 1): {(0, 5): -1},(-1, 0): {(0, 5): -1},(0, -1): {(0, 4): -1.0},(-1, 1): {(0, 5): -1},(1, -1): {(1, 4): -1.4142135623730951},(1, 1): {(0, 5): -1},(-1, -1): {(0, 5): -1}},(1, 0): {(1, 0): {(2, 0): -1.0},(0, 1): {(1, 1): -1.0},(-1, 0): {(0, 0): -1.0},(0, -1): {(1, 0): -1},(-1, 1): {(0, 1): -1.4142135623730951},(1, -1): {(1, 0): -1},(1, 1): {(2, 1): -1.4142135623730951},(-1, -1): {(1, 0): -1}},(1, 1): {(1, 0): {(2, 1): -1.0},(0, 1): {(1, 2): -1.0},(-1, 0): {(0, 1): -1.0},(0, -1): {(1, 0): -1.0},(-1, 1): {(0, 2): -1.4142135623730951},(1, -1): {(2, 0): -1.4142135623730951},(1, 1): {(2, 2): -1.4142135623730951},(-1, -1): {(0, 0): -1.4142135623730951}},(1, 2): {(1, 0): {(2, 2): -1.0},(0, 1): {(1, 3): -1.0},(-1, 0): {(0, 2): -1.0},(0, -1): {(1, 1): -1.0},(-1, 1): {(0, 3): -1.4142135623730951},(1, -1): {(2, 1): -1.4142135623730951},(1, 1): {(2, 3): -1.4142135623730951},(-1, -1): {(0, 1): -1.4142135623730951}},(1, 3): {(1, 0): {(2, 3): -1.0},(0, 1): {(1, 4): -1.0},(-1, 0): {(0, 3): -1.0},(0, -1): {(1, 2): -1.0},(-1, 1): {(0, 4): -1.4142135623730951},(1, -1): {(2, 2): -1.4142135623730951},(1, 1): {(2, 4): -1.4142135623730951},(-1, -1): {(0, 2): -1.4142135623730951}},(1, 4): {(1, 0): {(2, 4): -1.0},(0, 1): {(1, 5): -1.0},(-1, 0): {(0, 4): -1.0},(0, -1): {(1, 3): -1.0},(-1, 1): {(0, 5): -1.4142135623730951},(1, -1): {(2, 3): -1.4142135623730951},(1, 1): {(2, 5): -1.4142135623730951},(-1, -1): {(0, 3): -1.4142135623730951}},(1, 5): {(1, 0): {(2, 5): -1.0},(0, 1): {(1, 5): -1},(-1, 0): {(0, 5): -1.0},(0, -1): {(1, 4): -1.0},(-1, 1): {(1, 5): -1},(1, -1): {(2, 4): -1.4142135623730951},(1, 1): {(1, 5): -1},(-1, -1): {(0, 4): -1.4142135623730951}},(2, 0): {(1, 0): {(3, 0): -1.0},(0, 1): {(2, 1): -1.0},(-1, 0): {(1, 0): -1.0},(0, -1): {(2, 0): -1},(-1, 1): {(1, 1): -1.4142135623730951},(1, -1): {(2, 0): -1},(1, 1): {(3, 1): -1.4142135623730951},(-1, -1): {(2, 0): -1}},(2, 1): {(1, 0): {(3, 1): -1.0},(0, 1): {(2, 2): -1.0},(-1, 0): {(1, 1): -1.0},(0, -1): {(2, 0): -1.0},(-1, 1): {(1, 2): -1.4142135623730951},(1, -1): {(3, 0): -1.4142135623730951},(1, 1): {(3, 2): -1.4142135623730951},(-1, -1): {(1, 0): -1.4142135623730951}},(2, 2): {(1, 0): {(3, 2): -1.0},(0, 1): {(2, 3): -1.0},(-1, 0): {(1, 2): -1.0},(0, -1): {(2, 1): -1.0},(-1, 1): {(1, 3): -1.4142135623730951},(1, -1): {(3, 1): -1.4142135623730951},(1, 1): {(3, 3): -1.4142135623730951},(-1, -1): {(1, 1): -1.4142135623730951}},(2, 3): {(1, 0): {(3, 3): -1.0},(0, 1): {(2, 4): -1.0},(-1, 0): {(1, 3): -1.0},(0, -1): {(2, 2): -1.0},(-1, 1): {(1, 4): -1.4142135623730951},(1, -1): {(3, 2): -1.4142135623730951},(1, 1): {(3, 4): -1.4142135623730951},(-1, -1): {(1, 2): -1.4142135623730951}},(2, 4): {(1, 0): {(3, 4): -1.0},(0, 1): {(2, 5): -1.0},(-1, 0): {(1, 4): -1.0},(0, -1): {(2, 3): -1.0},(-1, 1): {(1, 5): -1.4142135623730951},(1, -1): {(3, 3): -1.4142135623730951},(1, 1): {(3, 5): -1.4142135623730951},(-1, -1): {(1, 3): -1.4142135623730951}},(2, 5): {(1, 0): {(3, 5): -1.0},(0, 1): {(2, 5): -1},(-1, 0): {(1, 5): -1.0},(0, -1): {(2, 4): -1.0},(-1, 1): {(2, 5): -1},(1, -1): {(3, 4): -1.4142135623730951},(1, 1): {(2, 5): -1},(-1, -1): {(1, 4): -1.4142135623730951}},(3, 0): {(1, 0): {(4, 0): -100},(0, 1): {(3, 1): -100},(-1, 0): {(2, 0): -100},(0, -1): {(3, 0): -100},(-1, 1): {(2, 1): -100},(1, -1): {(3, 0): -100},(1, 1): {(4, 1): -100},(-1, -1): {(3, 0): -100}},(3, 1): {(1, 0): {(4, 1): -1.0},(0, 1): {(3, 2): -1.0},(-1, 0): {(2, 1): -1.0},(0, -1): {(3, 0): -1.0},(-1, 1): {(2, 2): -1.4142135623730951},(1, -1): {(4, 0): -1.4142135623730951},(1, 1): {(4, 2): -1.4142135623730951},(-1, -1): {(2, 0): -1.4142135623730951}},(3, 2): {(1, 0): {(4, 2): -100},(0, 1): {(3, 3): -100},(-1, 0): {(2, 2): -100},(0, -1): {(3, 1): -100},(-1, 1): {(2, 3): -100},(1, -1): {(4, 1): -100},(1, 1): {(4, 3): -100},(-1, -1): {(2, 1): -100}},(3, 3): {(1, 0): {(4, 3): -100},(0, 1): {(3, 4): -100},(-1, 0): {(2, 3): -100},(0, -1): {(3, 2): -100},(-1, 1): {(2, 4): -100},(1, -1): {(4, 2): -100},(1, 1): {(4, 4): -100},(-1, -1): {(2, 2): -100}},(3, 4): {(1, 0): {(4, 4): -1.0},(0, 1): {(3, 5): -1.0},(-1, 0): {(2, 4): -1.0},(0, -1): {(3, 3): -1.0},(-1, 1): {(2, 5): -1.4142135623730951},(1, -1): {(4, 3): -1.4142135623730951},(1, 1): {(4, 5): -1.4142135623730951},(-1, -1): {(2, 3): -1.4142135623730951}},(3, 5): {(1, 0): {(4, 5): -1.0},(0, 1): {(3, 5): -1},(-1, 0): {(2, 5): -1.0},(0, -1): {(3, 4): -1.0},(-1, 1): {(3, 5): -1},(1, -1): {(4, 4): -1.4142135623730951},(1, 1): {(3, 5): -1},(-1, -1): {(2, 4): -1.4142135623730951}},(4, 0): {(1, 0): {(5, 0): -1.0},(0, 1): {(4, 1): -1.0},(-1, 0): {(3, 0): -1.0},(0, -1): {(4, 0): -1},(-1, 1): {(3, 1): -1.4142135623730951},(1, -1): {(4, 0): -1},(1, 1): {(5, 1): -1.4142135623730951},(-1, -1): {(4, 0): -1}},(4, 1): {(1, 0): {(5, 1): -1.0},(0, 1): {(4, 2): -1.0},(-1, 0): {(3, 1): -1.0},(0, -1): {(4, 0): -1.0},(-1, 1): {(3, 2): -1.4142135623730951},(1, -1): {(5, 0): -1.4142135623730951},(1, 1): {(5, 2): -1.4142135623730951},(-1, -1): {(3, 0): -1.4142135623730951}},(4, 2): {(1, 0): {(5, 2): -1.0},(0, 1): {(4, 3): -1.0},(-1, 0): {(3, 2): -1.0},(0, -1): {(4, 1): -1.0},(-1, 1): {(3, 3): -1.4142135623730951},(1, -1): {(5, 1): -1.4142135623730951},(1, 1): {(5, 3): -1.4142135623730951},(-1, -1): {(3, 1): -1.4142135623730951}},(4, 3): {(1, 0): {(5, 3): -1.0},(0, 1): {(4, 4): -1.0},(-1, 0): {(3, 3): -1.0},(0, -1): {(4, 2): -1.0},(-1, 1): {(3, 4): -1.4142135623730951},(1, -1): {(5, 2): -1.4142135623730951},(1, 1): {(5, 4): -1.4142135623730951},(-1, -1): {(3, 2): -1.4142135623730951}},(4, 4): {(1, 0): {(5, 4): -1.0},(0, 1): {(4, 5): -1.0},(-1, 0): {(3, 4): -1.0},(0, -1): {(4, 3): -1.0},(-1, 1): {(3, 5): -1.4142135623730951},(1, -1): {(5, 3): -1.4142135623730951},(1, 1): {(5, 5): -1.4142135623730951},(-1, -1): {(3, 3): -1.4142135623730951}},(4, 5): {(1, 0): {(5, 5): -1.0},(0, 1): {(4, 5): -1},(-1, 0): {(3, 5): -1.0},(0, -1): {(4, 4): -1.0},(-1, 1): {(4, 5): -1},(1, -1): {(5, 4): -1.4142135623730951},(1, 1): {(4, 5): -1},(-1, -1): {(3, 4): -1.4142135623730951}},(5, 0): {(1, 0): {(6, 0): -1.0},(0, 1): {(5, 1): -1.0},(-1, 0): {(4, 0): -1.0},(0, -1): {(5, 0): -1},(-1, 1): {(4, 1): -1.4142135623730951},(1, -1): {(5, 0): -1},(1, 1): {(6, 1): -1.4142135623730951},(-1, -1): {(5, 0): -1}},(5, 1): {(1, 0): {(6, 1): -1.0},(0, 1): {(5, 2): -1.0},(-1, 0): {(4, 1): -1.0},(0, -1): {(5, 0): -1.0},(-1, 1): {(4, 2): -1.4142135623730951},(1, -1): {(6, 0): -1.4142135623730951},(1, 1): {(6, 2): -1.4142135623730951},(-1, -1): {(4, 0): -1.4142135623730951}},(5, 2): {(1, 0): {(6, 2): -1.0},(0, 1): {(5, 3): -1.0},(-1, 0): {(4, 2): -1.0},(0, -1): {(5, 1): -1.0},(-1, 1): {(4, 3): -1.4142135623730951},(1, -1): {(6, 1): -1.4142135623730951},(1, 1): {(6, 3): -1.4142135623730951},(-1, -1): {(4, 1): -1.4142135623730951}},(5, 3): {(1, 0): {(6, 3): -1.0},(0, 1): {(5, 4): -1.0},(-1, 0): {(4, 3): -1.0},(0, -1): {(5, 2): -1.0},(-1, 1): {(4, 4): -1.4142135623730951},(1, -1): {(6, 2): -1.4142135623730951},(1, 1): {(6, 4): -1.4142135623730951},(-1, -1): {(4, 2): -1.4142135623730951}},(5, 4): {(1, 0): {(6, 4): -1.0},(0, 1): {(5, 5): -1.0},(-1, 0): {(4, 4): -1.0},(0, -1): {(5, 3): -1.0},(-1, 1): {(4, 5): -1.4142135623730951},(1, -1): {(6, 3): -1.4142135623730951},(1, 1): {(6, 5): -1.4142135623730951},(-1, -1): {(4, 3): -1.4142135623730951}},(5, 5): {(1, 0): {(6, 5): -1.0},(0, 1): {(5, 5): -1},(-1, 0): {(4, 5): -1.0},(0, -1): {(5, 4): -1.0},(-1, 1): {(5, 5): -1},(1, -1): {(6, 4): -1.4142135623730951},(1, 1): {(5, 5): -1},(-1, -1): {(4, 4): -1.4142135623730951}},(6, 0): {(1, 0): {(6, 0): -1},(0, 1): {(6, 1): -1.0},(-1, 0): {(5, 0): -1.0},(0, -1): {(6, 0): -1},(-1, 1): {(5, 1): -1.4142135623730951},(1, -1): {(6, 0): -1},(1, 1): {(6, 0): -1},(-1, -1): {(6, 0): -1}},(6, 1): {(1, 0): {(6, 1): -1},(0, 1): {(6, 2): 10},(-1, 0): {(5, 1): 10},(0, -1): {(6, 0): 10},(-1, 1): {(5, 2): 10},(1, -1): {(6, 1): -1},(1, 1): {(6, 1): -1},(-1, -1): {(5, 0): 10}},(6, 2): {(1, 0): {(6, 2): -1},(0, 1): {(6, 3): -1.0},(-1, 0): {(5, 2): -1.0},(0, -1): {(6, 1): -1.0},(-1, 1): {(5, 3): -1.4142135623730951},(1, -1): {(6, 2): -1},(1, 1): {(6, 2): -1},(-1, -1): {(5, 1): -1.4142135623730951}},(6, 3): {(1, 0): {(6, 3): -1},(0, 1): {(6, 4): -1.0},(-1, 0): {(5, 3): -1.0},(0, -1): {(6, 2): -1.0},(-1, 1): {(5, 4): -1.4142135623730951},(1, -1): {(6, 3): -1},(1, 1): {(6, 3): -1},(-1, -1): {(5, 2): -1.4142135623730951}},(6, 4): {(1, 0): {(6, 4): -1},(0, 1): {(6, 5): -1.0},(-1, 0): {(5, 4): -1.0},(0, -1): {(6, 3): -1.0},(-1, 1): {(5, 5): -1.4142135623730951},(1, -1): {(6, 4): -1},(1, 1): {(6, 4): -1},(-1, -1): {(5, 3): -1.4142135623730951}},(6, 5): {(1, 0): {(6, 5): -1},(0, 1): {(6, 5): -1},(-1, 0): {(5, 5): -1.0},(0, -1): {(6, 4): -1.0},(-1, 1): {(6, 5): -1},(1, -1): {(6, 5): -1},(1, 1): {(6, 5): -1},(-1, -1): {(5, 4): -1.4142135623730951}}}
rewardCGap = {(0, 0): {(1, 0): {(1, 0): -1.0},(0, 1): {(0, 1): -1.0},(-1, 0): {(0, 0): -1},(0, -1): {(0, 0): -1},(-1, 1): {(0, 0): -1},(1, -1): {(0, 0): -1},(1, 1): {(1, 1): -1.4142135623730951},(-1, -1): {(0, 0): -1}},(0, 1): {(1, 0): {(1, 1): -1.0},(0, 1): {(0, 2): -1.0},(-1, 0): {(0, 1): -1},(0, -1): {(0, 0): -1.0},(-1, 1): {(0, 1): -1},(1, -1): {(1, 0): -1.4142135623730951},(1, 1): {(1, 2): -1.4142135623730951},(-1, -1): {(0, 1): -1}},(0, 2): {(1, 0): {(1, 2): -1.0},(0, 1): {(0, 3): -1.0},(-1, 0): {(0, 2): -1},(0, -1): {(0, 1): -1.0},(-1, 1): {(0, 2): -1},(1, -1): {(1, 1): -1.4142135623730951},(1, 1): {(1, 3): -1.4142135623730951},(-1, -1): {(0, 2): -1}},(0, 3): {(1, 0): {(1, 3): -1.0},(0, 1): {(0, 4): -1.0},(-1, 0): {(0, 3): -1},(0, -1): {(0, 2): -1.0},(-1, 1): {(0, 3): -1},(1, -1): {(1, 2): -1.4142135623730951},(1, 1): {(1, 4): -1.4142135623730951},(-1, -1): {(0, 3): -1}},(0, 4): {(1, 0): {(1, 4): -1.0},(0, 1): {(0, 5): -1.0},(-1, 0): {(0, 4): -1},(0, -1): {(0, 3): -1.0},(-1, 1): {(0, 4): -1},(1, -1): {(1, 3): -1.4142135623730951},(1, 1): {(1, 5): -1.4142135623730951},(-1, -1): {(0, 4): -1}},(0, 5): {(1, 0): {(1, 5): -1.0},(0, 1): {(0, 5): -1},(-1, 0): {(0, 5): -1},(0, -1): {(0, 4): -1.0},(-1, 1): {(0, 5): -1},(1, -1): {(1, 4): -1.4142135623730951},(1, 1): {(0, 5): -1},(-1, -1): {(0, 5): -1}},(1, 0): {(1, 0): {(2, 0): -1.0},(0, 1): {(1, 1): -1.0},(-1, 0): {(0, 0): -1.0},(0, -1): {(1, 0): -1},(-1, 1): {(0, 1): -1.4142135623730951},(1, -1): {(1, 0): -1},(1, 1): {(2, 1): -1.4142135623730951},(-1, -1): {(1, 0): -1}},(1, 1): {(1, 0): {(2, 1): -1.0},(0, 1): {(1, 2): -1.0},(-1, 0): {(0, 1): -1.0},(0, -1): {(1, 0): -1.0},(-1, 1): {(0, 2): -1.4142135623730951},(1, -1): {(2, 0): -1.4142135623730951},(1, 1): {(2, 2): -1.4142135623730951},(-1, -1): {(0, 0): -1.4142135623730951}},(1, 2): {(1, 0): {(2, 2): -1.0},(0, 1): {(1, 3): -1.0},(-1, 0): {(0, 2): -1.0},(0, -1): {(1, 1): -1.0},(-1, 1): {(0, 3): -1.4142135623730951},(1, -1): {(2, 1): -1.4142135623730951},(1, 1): {(2, 3): -1.4142135623730951},(-1, -1): {(0, 1): -1.4142135623730951}},(1, 3): {(1, 0): {(2, 3): -1.0},(0, 1): {(1, 4): -1.0},(-1, 0): {(0, 3): -1.0},(0, -1): {(1, 2): -1.0},(-1, 1): {(0, 4): -1.4142135623730951},(1, -1): {(2, 2): -1.4142135623730951},(1, 1): {(2, 4): -1.4142135623730951},(-1, -1): {(0, 2): -1.4142135623730951}},(1, 4): {(1, 0): {(2, 4): -1.0},(0, 1): {(1, 5): -1.0},(-1, 0): {(0, 4): -1.0},(0, -1): {(1, 3): -1.0},(-1, 1): {(0, 5): -1.4142135623730951},(1, -1): {(2, 3): -1.4142135623730951},(1, 1): {(2, 5): -1.4142135623730951},(-1, -1): {(0, 3): -1.41421
| 2 |
66d96c42c8b814fcf38f8fd18215e6e29efbc810
|
Python
|
35623730951}},(1, 5): {(1, 0): {(2, 5): 10},(0, 1): {(1, 5): -1},(-1, 0): {(0, 5): 10},(0, -1): {(1, 4): 10},(-1, 1): {(1, 5): -1},(1, -1): {(2, 4): 10},(1, 1): {(1, 5): -1},(-1, -1): {(0, 4): 10}},(2, 0): {(1, 0): {(3, 0): -1.0},(0, 1): {(2, 1): -1.0},(-1, 0): {(1, 0): -1.0},(0, -1): {(2, 0): -1},(-1, 1): {(1, 1): -1.4142135623730951},(1, -1): {(2, 0): -1},(1, 1): {(3, 1): -1.4142135623730951},(-1, -1): {(2, 0): -1}},(2, 1): {(1, 0): {(3, 1): -1.0},(0, 1): {(2, 2): -1.0},(-1, 0): {(1, 1): -1.0},(0, -1): {(2, 0): -1.0},(-1, 1): {(1, 2): -1.4142135623730951},(1, -1): {(3, 0): -1.4142135623730951},(1, 1): {(3, 2): -1.4142135623730951},(-1, -1): {(1, 0): -1.4142135623730951}},(2, 2): {(1, 0): {(3, 2): -1.0},(0, 1): {(2, 3): -1.0},(-1, 0): {(1, 2): -1.0},(0, -1): {(2, 1): -1.0},(-1, 1): {(1, 3): -1.4142135623730951},(1, -1): {(3, 1): -1.4142135623730951},(1, 1): {(3, 3): -1.4142135623730951},(-1, -1): {(1, 1): -1.4142135623730951}},(2, 3): {(1, 0): {(3, 3): -1.0},(0, 1): {(2, 4): -1.0},(-1, 0): {(1, 3): -1.0},(0, -1): {(2, 2): -1.0},(-1, 1): {(1, 4): -1.4142135623730951},(1, -1): {(3, 2): -1.4142135623730951},(1, 1): {(3, 4): -1.4142135623730951},(-1, -1): {(1, 2): -1.4142135623730951}},(2, 4): {(1, 0): {(3, 4): -1.0},(0, 1): {(2, 5): -1.0},(-1, 0): {(1, 4): -1.0},(0, -1): {(2, 3): -1.0},(-1, 1): {(1, 5): -1.4142135623730951},(1, -1): {(3, 3): -1.4142135623730951},(1, 1): {(3, 5): -1.4142135623730951},(-1, -1): {(1, 3): -1.4142135623730951}},(2, 5): {(1, 0): {(3, 5): -1.0},(0, 1): {(2, 5): -1},(-1, 0): {(1, 5): -1.0},(0, -1): {(2, 4): -1.0},(-1, 1): {(2, 5): -1},(1, -1): {(3, 4): -1.4142135623730951},(1, 1): {(2, 5): -1},(-1, -1): {(1, 4): -1.4142135623730951}},(3, 0): {(1, 0): {(4, 0): -100},(0, 1): {(3, 1): -100},(-1, 0): {(2, 0): -100},(0, -1): {(3, 0): -100},(-1, 1): {(2, 1): -100},(1, -1): {(3, 0): -100},(1, 1): {(4, 1): -100},(-1, -1): {(3, 0): -100}},(3, 1): {(1, 0): {(4, 1): -1.0},(0, 1): {(3, 2): -1.0},(-1, 0): {(2, 1): -1.0},(0, -1): {(3, 0): -1.0},(-1, 1): {(2, 2): -1.4142135623730951},(1, -1): {(4, 0): -1.4142135623730951},(1, 1): {(4, 2): -1.4142135623730951},(-1, -1): {(2, 0): -1.4142135623730951}},(3, 2): {(1, 0): {(4, 2): -100},(0, 1): {(3, 3): -100},(-1, 0): {(2, 2): -100},(0, -1): {(3, 1): -100},(-1, 1): {(2, 3): -100},(1, -1): {(4, 1): -100},(1, 1): {(4, 3): -100},(-1, -1): {(2, 1): -100}},(3, 3): {(1, 0): {(4, 3): -100},(0, 1): {(3, 4): -100},(-1, 0): {(2, 3): -100},(0, -1): {(3, 2): -100},(-1, 1): {(2, 4): -100},(1, -1): {(4, 2): -100},(1, 1): {(4, 4): -100},(-1, -1): {(2, 2): -100}},(3, 4): {(1, 0): {(4, 4): -1.0},(0, 1): {(3, 5): -1.0},(-1, 0): {(2, 4): -1.0},(0, -1): {(3, 3): -1.0},(-1, 1): {(2, 5): -1.4142135623730951},(1, -1): {(4, 3): -1.4142135623730951},(1, 1): {(4, 5): -1.4142135623730951},(-1, -1): {(2, 3): -1.4142135623730951}},(3, 5): {(1, 0): {(4, 5): -1.0},(0, 1): {(3, 5): -1},(-1, 0): {(2, 5): -1.0},(0, -1): {(3, 4): -1.0},(-1, 1): {(3, 5): -1},(1, -1): {(4, 4): -1.4142135623730951},(1, 1): {(3, 5): -1},(-1, -1): {(2, 4): -1.4142135623730951}},(4, 0): {(1, 0): {(5, 0): -1.0},(0, 1): {(4, 1): -1.0},(-1, 0): {(3, 0): -1.0},(0, -1): {(4, 0): -1},(-1, 1): {(3, 1): -1.4142135623730951},(1, -1): {(4, 0): -1},(1, 1): {(5, 1): -1.4142135623730951},(-1, -1): {(4, 0): -1}},(4, 1): {(1, 0): {(5, 1): -1.0},(0, 1): {(4, 2): -1.0},(-1, 0): {(3, 1): -1.0},(0, -1): {(4, 0): -1.0},(-1, 1): {(3, 2): -1.4142135623730951},(1, -1): {(5, 0): -1.4142135623730951},(1, 1): {(5, 2): -1.4142135623730951},(-1, -1): {(3, 0): -1.4142135623730951}},(4, 2): {(1, 0): {(5, 2): -1.0},(0, 1): {(4, 3): -1.0},(-1, 0): {(3, 2): -1.0},(0, -1): {(4, 1): -1.0},(-1, 1): {(3, 3): -1.4142135623730951},(1, -1): {(5, 1): -1.4142135623730951},(1, 1): {(5, 3): -1.4142135623730951},(-1, -1): {(3, 1): -1.4142135623730951}},(4, 3): {(1, 0): {(5, 3): -1.0},(0, 1): {(4, 4): -1.0},(-1, 0): {(3, 3): -1.0},(0, -1): {(4, 2): -1.0},(-1, 1): {(3, 4): -1.4142135623730951},(1, -1): {(5, 2): -1.4142135623730951},(1, 1): {(5, 4): -1.4142135623730951},(-1, -1): {(3, 2): -1.4142135623730951}},(4, 4): {(1, 0): {(5, 4): -1.0},(0, 1): {(4, 5): -1.0},(-1, 0): {(3, 4): -1.0},(0, -1): {(4, 3): -1.0},(-1, 1): {(3, 5): -1.4142135623730951},(1, -1): {(5, 3): -1.4142135623730951},(1, 1): {(5, 5): -1.4142135623730951},(-1, -1): {(3, 3): -1.4142135623730951}},(4, 5): {(1, 0): {(5, 5): -1.0},(0, 1): {(4, 5): -1},(-1, 0): {(3, 5): -1.0},(0, -1): {(4, 4): -1.0},(-1, 1): {(4, 5): -1},(1, -1): {(5, 4): -1.4142135623730951},(1, 1): {(4, 5): -1},(-1, -1): {(3, 4): -1.4142135623730951}},(5, 0): {(1, 0): {(6, 0): -1.0},(0, 1): {(5, 1): -1.0},(-1, 0): {(4, 0): -1.0},(0, -1): {(5, 0): -1},(-1, 1): {(4, 1): -1.4142135623730951},(1, -1): {(5, 0): -1},(1, 1): {(6, 1): -1.4142135623730951},(-1, -1): {(5, 0): -1}},(5, 1): {(1, 0): {(6, 1): -1.0},(0, 1): {(5, 2): -1.0},(-1, 0): {(4, 1): -1.0},(0, -1): {(5, 0): -1.0},(-1, 1): {(4, 2): -1.4142135623730951},(1, -1): {(6, 0): -1.4142135623730951},(1, 1): {(6, 2): -1.4142135623730951},(-1, -1): {(4, 0): -1.4142135623730951}},(5, 2): {(1, 0): {(6, 2): -1.0},(0, 1): {(5, 3): -1.0},(-1, 0): {(4, 2): -1.0},(0, -1): {(5, 1): -1.0},(-1, 1): {(4, 3): -1.4142135623730951},(1, -1): {(6, 1): -1.4142135623730951},(1, 1): {(6, 3): -1.4142135623730951},(-1, -1): {(4, 1): -1.4142135623730951}},(5, 3): {(1, 0): {(6, 3): -1.0},(0, 1): {(5, 4): -1.0},(-1, 0): {(4, 3): -1.0},(0, -1): {(5, 2): -1.0},(-1, 1): {(4, 4): -1.4142135623730951},(1, -1): {(6, 2): -1.4142135623730951},(1, 1): {(6, 4): -1.4142135623730951},(-1, -1): {(4, 2): -1.4142135623730951}},(5, 4): {(1, 0): {(6, 4): -1.0},(0, 1): {(5, 5): -1.0},(-1, 0): {(4, 4): -1.0},(0, -1): {(5, 3): -1.0},(-1, 1): {(4, 5): -1.4142135623730951},(1, -1): {(6, 3): -1.4142135623730951},(1, 1): {(6, 5): -1.4142135623730951},(-1, -1): {(4, 3): -1.4142135623730951}},(5, 5): {(1, 0): {(6, 5): -1.0},(0, 1): {(5, 5): -1},(-1, 0): {(4, 5): -1.0},(0, -1): {(5, 4): -1.0},(-1, 1): {(5, 5): -1},(1, -1): {(6, 4): -1.4142135623730951},(1, 1): {(5, 5): -1},(-1, -1): {(4, 4): -1.4142135623730951}},(6, 0): {(1, 0): {(6, 0): -1},(0, 1): {(6, 1): -1.0},(-1, 0): {(5, 0): -1.0},(0, -1): {(6, 0): -1},(-1, 1): {(5, 1): -1.4142135623730951},(1, -1): {(6, 0): -1},(1, 1): {(6, 0): -1},(-1, -1): {(6, 0): -1}},(6, 1): {(1, 0): {(6, 1): -1},(0, 1): {(6, 2): -1.0},(-1, 0): {(5, 1): -1.0},(0, -1): {(6, 0): -1.0},(-1, 1): {(5, 2): -1.4142135623730951},(1, -1): {(6, 1): -1},(1, 1): {(6, 1): -1},(-1, -1): {(5, 0): -1.4142135623730951}},(6, 2): {(1, 0): {(6, 2): -1},(0, 1): {(6, 3): -1.0},(-1, 0): {(5, 2): -1.0},(0, -1): {(6, 1): -1.0},(-1, 1): {(5, 3): -1.4142135623730951},(1, -1): {(6, 2): -1},(1, 1): {(6, 2): -1},(-1, -1): {(5, 1): -1.4142135623730951}},(6, 3): {(1, 0): {(6, 3): -1},(0, 1): {(6, 4): -1.0},(-1, 0): {(5, 3): -1.0},(0, -1): {(6, 2): -1.0},(-1, 1): {(5, 4): -1.4142135623730951},(1, -1): {(6, 3): -1},(1, 1): {(6, 3): -1},(-1, -1): {(5, 2): -1.4142135623730951}},(6, 4): {(1, 0): {(6, 4): -1},(0, 1): {(6, 5): -1.0},(-1, 0): {(5, 4): -1.0},(0, -1): {(6, 3): -1.0},(-1, 1): {(5, 5): -1.4142135623730951},(1, -1): {(6, 4): -1},(1, 1): {(6, 4): -1},(-1, -1): {(5, 3): -1.4142135623730951}},(6, 5): {(1, 0): {(6, 5): -1},(0, 1): {(6, 5): -1},(-1, 0): {(5, 5): -1.0},(0, -1): {(6, 4): -1.0},(-1, 1): {(6, 5): -1},(1, -1): {(6, 5): -1},(1, 1): {(6, 5): -1},(-1, -1): {(5, 4): -1.4142135623730951}}}
#reward A part
performValueIteration = ValueIteration(transition, rewardA, valueTable, convergenceTolerance, gamma)
optimalValuesA, policyTableA = performValueIteration()
visualizeValueTable(gridWidth=7, gridHeight=6, goalState = (6,4) , trapStates = [(3,0), (3,1),(3,2), (3,3)],valueTable=optimalValuesA)
visualizePolicy(gridWidth=7, gridHeight=6, goalState = (6,4) , trapStates = [(3,0), (3,1),(3,2), (3,3)], policy=policyTableA)
print(p_traj(trajectoryToGoalA,beta,gamma,transition,rewardA,optimalValuesA))
plt.plot([2,3,4,5,6,7,8],p_traj(trajectoryToGoalA,beta,gamma,transition,rewardA,optimalValuesA))
plt.ylabel('Probabilty of goal A (proportional to)')
plt.xlabel('Time point')
plt.title('Gaol A without gap')
plt.show()
#reward A Gap part
performValueIteration = ValueIteration(transition, rewardAGap, valueTable, convergenceTolerance, gamma)
optimalValuesAG, policyTableAG = performValueIteration()
visualizeValueTable(gridWidth=7, gridHeight=6, goalState = (6,4) , trapStates = [(3,0), (3,2), (3,3)],valueTable=optimalValuesAG)
visualizePolicy(gridWidth=7, gridHeight=6, goalState = (6,4) , trapStates = [(3,0), (3,2), (3,3)], policy=policyTableAG)
print(p_traj(trajectoryToGoalA,beta,gamma,transition,rewardAGap,optimalValuesAG))
plt.plot([2,3,4,5,6,7,8],p_traj(trajectoryToGoalA,beta,gamma,transition,rewardAGap,optimalValuesAG))
plt.ylabel('Probabilty of goal A Gap (proportional to)')
plt.xlabel('Time point')
plt.title('Gaol A with gap')
plt.show()
#reward B part
performValueIteration = ValueIteration(transition, rewardB, valueTable, convergenceTolerance, gamma)
optimalValuesB, policyTableB = performValueIteration()
visualizeValueTable(gridWidth=7, gridHeight=6, goalState = (6,1) , trapStates = [(3,0), (3,1),(3,2), (3,3)],valueTable=optimalValuesB)
visualizePolicy(gridWidth=7, gridHeight=6, goalState = (6,1) , trapStates = [(3,0), (3,1),(3,2), (3,3)], policy=policyTableB)
print(p_traj(trajectoryToGoalB,beta,gamma,transition,rewardB,optimalValuesB))
plt.plot([2,3,4,5,6,7,8],p_traj(trajectoryToGoalB,beta,gamma,transition,rewardB,optimalValuesB))
plt.ylabel('Probabilty of goal B (proportional to)')
plt.xlabel('Time point')
plt.title('Gaol B without gap')
plt.show()
#reward B Gap part
performValueIteration = ValueIteration(transition, rewardBGap, valueTable, convergenceTolerance, gamma)
optimalValuesBG, policyTableBG = performValueIteration()
visualizeValueTable(gridWidth=7, gridHeight=6, goalState = (6,1) , trapStates = [(3,0), (3,2), (3,3)],valueTable=optimalValuesBG)
visualizePolicy(gridWidth=7, gridHeight=6, goalState = (6,1) , trapStates = [(3,0), (3,2), (3,3)], policy=policyTableBG)
print(p_traj(trajectoryToGoalB,beta,gamma,transition,rewardBGap,optimalValuesBG))
plt.plot([2,3,4,5,6,7,8],p_traj(trajectoryToGoalB,beta,gamma,transition,rewardBGap,optimalValuesBG))
plt.ylabel('Probabilty of goal B Gap (proportional to)')
plt.xlabel('Time point')
plt.title('Gaol B with gap')
plt.show()
#reward C part
performValueIteration = ValueIteration(transition, rewardC, valueTable, convergenceTolerance, gamma)
optimalValuesC, policyTableC = performValueIteration()
visualizeValueTable(gridWidth=7, gridHeight=6, goalState = (1,5) , trapStates = [(3,0), (3,1),(3,2), (3,3)],valueTable=optimalValuesC)
visualizePolicy(gridWidth=7, gridHeight=6, goalState = (1,5) , trapStates = [(3,0), (3,1),(3,2), (3,3)], policy=policyTableC)
print(p_traj(trajectoryToGoalC,beta,gamma,transition,rewardC,optimalValuesC))
plt.plot([2,3,4,5,6],p_traj(trajectoryToGoalC,beta,gamma,transition,rewardC,optimalValuesC))
plt.ylabel('Probabilty of goal C (proportional to)')
plt.xlabel('Time point')
plt.title('Gaol C without gap')
plt.show()
#reward C Gap part
performValueIteration = ValueIteration(transition, rewardCGap, valueTable, convergenceTolerance, gamma)
optimalValuesCG, policyTableCG = performValueIteration()
visualizeValueTable(gridWidth=7, gridHeight=6, goalState = (1,5) , trapStates = [(3,0), (3,2), (3,3)],valueTable=optimalValuesCG)
visualizePolicy(gridWidth=7, gridHeight=6, goalState = (1,5) , trapStates = [(3,0), (3,2), (3,3)], policy=policyTableCG)
print(p_traj(trajectoryToGoalC,beta,gamma,transition,rewardCGap,optimalValuesCG))
plt.plot([2,3,4,5,6],p_traj(trajectoryToGoalC,beta,gamma,transition,rewardCGap,optimalValuesCG))
plt.ylabel('Probabilty of goal C Gap (proportional to)')
plt.xlabel('Time point')
plt.title('Gaol C with gap')
plt.show()
if __name__ == '__main__':
main()
| 3 |
c4c2912c76dea40a7fe36e070c679142bc0f0425
|
Python
|
# coding: utf-8
import gc
import matplotlib.pyplot as plt
import seaborn as sns
import numpy as np
import pandas as pd
import time
from datetime import datetime, timedelta
from collections import deque, defaultdict
from itertools import islice
from tqdm import tqdm as tqdm
from contextlib import contextmanager
@contextmanager
def faith(title):
start_time = time.time()
yield
print(">> {} - done in {:.0f}s".format(title, time.time() - start_time))
START_DATE = datetime.strptime('2017-12-01', '%Y-%m-%d') #or 30th Nov?
#START_DATE = datetime.strptime('2017-11-01', '%Y-%m-%d')
periods = ['7d', '14d']
min_instances = 1000
aggr_cols = [
'addr1','card1','card2','card3','card4','card5','card6','ProductCD',
'pemail_domain','remail_domain','pemail_ext', 'remail_ext',
]
country_map = {
'com':'us', 'net':'us', 'edu':'us', 'gmail':'us', 'mx': 'mx', 'es':'es', 'de':'de', 'fr':'fr','uk':'uk', 'jp':'jp'
}
domain = lambda x: x.split('.')[0]
pemail_country = lambda x: x.split('.')[-1]
USER_AGENTS = [
'Intel', 'Windows NT 6.1', 'Windows NT 6.2', 'Microsoft', 'Trident/7.0',
'Touch', 'S.N.O.W.4', 'BOIE9', 'rv:11.0', 'rv:48.0', 'rv:52.0', 'rv:56.0',
'rv:57.0', 'rv:58.0', 'rv:59.0', 'rv:60.0', 'rv:61.0', 'rv:62.0', 'rv:63.0',
'rv:64.0', 'rv:38.0', 'rv:51.0', 'rv:45.0', 'rv:42.0', 'rv:49.0', 'en-us',
'rv:41.0', 'rv:54.0', 'rv:47.0', 'rv:55.0', 'rv:31.0', 'rv:44.0', 'rv:53.0',
'rv:39.0', 'rv:35.0', 'rv:50.0', 'rv:37.0', 'rv:52.9', 'rv:46.0', 'rv:43.0',
'rv:29.0', 'rv:14.0', 'rv:33.0', 'rv:21.0', 'rv:27.0', 'rv:65.0', 'rv:28.0',
'rv:60.1.0', 'es-us', 'es-es', 'es-mx', 'en-gb', 'Linux', 'MDDRJS',
'Android 5.1', 'Android 4.4.2', 'Android 6.0.1', 'Android 6.0', 'Android 7.0',
'Android', 'Android 8.0.0', 'Android 7.1.2', 'WOW64', 'ATT-IE11', 'MAMI', 'MALC',
'hp2015', 'Northwell', 'xs-Z47b7VqTMxs', 'QwestIE8', 'ATT', 'NetHelper70',
'FunWebProducts', 'Lifesize', 'CPU'
]
CAT_FCOLS = ['card2', 'card3', 'card5', 'addr1', 'addr2', 'dist1', 'dist2']
C_FCOLS = [f'C{i}' for i in range(1, 15)]
D_FCOLS = [f'D{i}' for i in range(1, 16)]
V_FCOLS = [f'V{i}' for i in range(1, 340)]
FLOAT64_TCOLS = CAT_FCOLS + C_FCOLS + D_FCOLS + V_FCOLS
FLOAT64_ICOLS = [f'id_0{i}' for i in range(1, 10)] + ['id_10', 'id_11', 'id_13', 'id_14', 'id_17', 'id_18', 'id_19', 'id_20', 'id_21', 'id_22', 'id_24', 'id_25', 'id_26', 'id_32']
id_30_dates = {
'Android 4.4.2':'2012-11-13','Android 5.0':'2014-11-12','Android 5.0.2':'2014-12-19',
'Android 5.1.1':'2015-04-21','Android 6.0':'2015-10-05','Android 6.0.1':'2015-12-07',
'Android 7.0':'2016-08-22','Android 7.1.1':'2016-12-05','Android 7.1.2':'2017-04-04',
'Android 8.0.0':'2017-08-21','Android 8.1.0':'2017-12-05','Android 9':'2018-08-06',
#Windows
'Windows XP':'2001-10-25','Windows Vista':'2006-11-08','Windows 7':'2009-10-22',
'Windows 8':'2012-10-26','Windows 8.1':'2013-10-17','Windows 10':'2015-07-29',
#MacOS
'Mac OS X 10.6': '2009-08-28','Mac OS X 10_6_8': '2011-06-23','Mac OS X 10_7_5': '2012-09-19',
'Mac OS X 10_8_5': '2013-09-12','Mac OS X 10.9': '2013-10-22','Mac OS X 10_9_5': '2014-09-17',
'Mac OS X 10.10': '2014-10-16','Mac OS X 10_10_5': '2015-08-13','Mac OS X 10.11': '2015-09-30',
'Mac OS X 10_11_3': '2016-01-19','Mac OS X 10_11_4': '2016-03-20','Mac OS X 10_11_5': '2016-05-16',
'Mac OS X 10_11_6': '2016-07-18','Mac OS X 10.12': '2016-09-20','Mac OS X 10_12': '2016-09-20',
'Mac OS X 10_12_1': '2016-10-24','Mac OS X 10_12_2': '2016-12-13','Mac OS X 10_12_3': '2017-01-23',
'Mac OS X 10_12_4': '2017-03-27','Mac OS X 10_12_5': '2017-05-15','Mac OS X 10_12_6': '2017-07-19',
'Mac OS X 10.13': '2017-09-25','Mac OS X 10_13_1': '2017-10-31','Mac OS X 10_13_2': '2017-12-06',
'Mac OS X 10_13_3': '2018-01-23','Mac OS X 10_13_4': '2018-03-29','Mac OS X 10_13_5': '2018-06-01',
'Mac OS X 10_13_6': '2018-07-09','Mac OS X 10.14': '2018-09-24','Mac OS X 10_14': '2018-09-24',
'Mac OS X 10_14_0': '2018-09-24','Mac OS X 10_14_1': '2018-10-30','Mac OS X 10_14_2': '2018-12-05',
#iOS
'iOS 9.3.5':'2016-08-25','iOS 10.0.2':'2016-09-23','iOS 10.1.1':'2016-10-31','iOS 10.2.0':'2016-12-12',
'iOS 10.2.1':'2017-01-23','iOS 10.3.1':'2017-04-03','iOS 10.3.2':'2017-05-15','iOS 10.3.3':'2017-07-19',
'iOS 11.0.0':'2017-08-19','iOS 11.0.1':'2017-08-26','iOS 11.0.2':'2017-10-03','iOS 11.0.3':'2017-10-11',
'iOS 11.1.0':'2017-10-31','iOS 11.1.1':'2017-11-08','iOS 11.1.2':'2017-11-16','iOS 11.2.0':'2017-12-02',
'iOS 11.2.1':'2017-12-13','iOS 11.2.2':'2018-01-08','iOS 11.2.5':'2018-01-23','iOS 11.2.6':'2018-02-19',
'iOS 11.3.0':'2018-03-29','iOS 11.3.1':'2018-04-24','iOS 11.4.0':'2018-05-29','iOS 11.4.1':'2018-07-09',
'iOS 12.0.0':'2018-08-17','iOS 12.0.1':'2018-09-08','iOS 12.1.0':'2018-09-30','iOS 12.1.1':'2018-12-05',
'iOS 12.1.2':'2018-12-20',
}
id_30_dates = {k.lower():v for k,v in id_30_dates.items()}
with faith('1. Loading Data Hold On ....') as f:
df_train_identity = pd.read_csv('../input/train_identity.csv', dtype=dict.fromkeys(FLOAT64_ICOLS, np.float32),)
df_test_identity = pd.read_csv('../input/test_identity.csv', dtype=dict.fromkeys(FLOAT64_ICOLS, np.float32),)
df_train_transaction = pd.read_csv('../input/train_transaction.csv', dtype=dict.fromkeys(FLOAT64_TCOLS, np.float32),)
df_test_transaction = pd.read_csv('../input/test_transaction.csv', dtype=dict.fromkeys(FLOAT64_TCOLS, np.float32),)
X_train = pd.merge(df_train_transaction, df_train_identity, how='left', on='TransactionID')
X_test = pd.merge(df_test_transaction, df_test_identity, how='left', on='TransactionID')
org_cols = X_train.columns.tolist()
print('Number of Training Examples = {}'.format(df_train_transaction.shape[0]))
print('Number of Test Examples = {}\\n'.format(df_test_transaction.shape[0]))
print('Number of Training Examples with Identity = {}'.format(df_train_identity.shape[0]))
print('Number of Test Examples with Identity = {}\\n'.format(df_test_identity.shape[0]))
print('Training X Shape = {}'.format(X_train.shape))
print('Training y Shape = {}'.format(X_train['isFraud'].shape))
print('Test X Shape = {}\\n'.format(X_test.shape))
print('Training Set Memory Usage = {:.2f} MB'.format(X_train.memory_usage().sum() / 1024**2))
print('Test Set Memory Usage = {:.2f} MB\\n'.format(X_test.memory_usage().sum() / 1024**2))
del df_train_identity, df_test_identity, df_train_transaction, df_test_transaction
with faith('2. Adding simple time feats like minute hour etc will be dropped later for sure') as f:
for df in tqdm([X_train, X_test]):
# TransactionDT converted to a timestamp
df['TransactionDate'] = (df['TransactionDT'] - 86400).apply(lambda x: (START_DATE + timedelta(seconds=x)))
# Time features for aggregation and grouping
df['Minute'] = df['TransactionDate'].dt.minute.values
df['Hour'] = df['TransactionDate'].dt.hour.values
df['Day'] = df['TransactionDate'].dt.day.values
df['DayOfWeek'] = df['TransactionDate'].dt.dayofweek.values
df['DayOfYear'] = df['TransactionDate'].dt.dayofyear.values
df['Week'] = df['TransactionDate'].dt.week.values
df['Month'] = df['TransactionDate'].dt.month.values
# D9 is Hour divided by 24, so this will fill the NaNs of D9
df['D9'] = df['Hour'] / 24
with faith('3. Fixing id_30 and DeviceInfo and inferring more vals for other cols etc...') as f:
for df in tqdm([X_train, X_test]):
########## DeviceInfo ##########
# Finding DeviceInfo from id_31
df.loc[df.query('DeviceInfo.isnull() and id_31.str.contains("mobile safari")', engine='python').index, 'DeviceInfo'] = 'iOS Device'
df.loc[df.query('DeviceInfo.isnull() and id_31.str.contains("for ios")', engine='python').index, 'DeviceInfo'] = 'iOS Device'
df.loc[df.query('DeviceInfo.isnull() and id_31.str.startswith("google search application")', engine='python').index, 'DeviceInfo'] = 'iOS Device'
df.loc[df.query('DeviceInfo.isnull() and id_31 == "google"', engine='python').index, 'DeviceInfo'] = 'iOS Device'
df.loc[df.query('DeviceInfo.isnull() and id_31 == "safari"', engine='python').index, 'DeviceInfo'] = 'MacOS'
df.loc[df.query('DeviceInfo.isnull() and id_31 == "safari generic"', engine='python').index, 'DeviceInfo'] = 'MacOS'
df.loc[df.query('DeviceInfo.isnull() and id_31 == "safari 9.0"', engine='python').index, 'DeviceInfo'] = 'MacOS'
df.loc[df.query('DeviceInfo.isnull() and id_31 == "safari 10.0"', engine='python').index, 'DeviceInfo'] = 'MacOS'
df.loc[df.query('DeviceInfo.isnull() and id_31 == "safari 11.0"', engine='python').index, 'DeviceInfo'] = 'MacOS'
df.loc[df.query('DeviceInfo.isnull() and id_31 == "safari 12.0"', engine='python').index, 'DeviceInfo'] = 'MacOS'
########## DeviceType ##########
# Finding DeviceType from DeviceInfo
df.loc[df.query('DeviceType.isnull() and id_31 == "ie 11.0 for desktop"', engine='python').index, 'DeviceType'] = 'desktop'
df.loc[df.query('DeviceType.isnull() and id_31 == "chrome 65.0"', engine='python').index, 'DeviceType'] = 'desktop'
df.loc[df.query('DeviceType.isnull() and id_31 == "ie 11.0 for tablet"', engine='python').index, 'DeviceType'] = 'desktop'
# Finding DeviceType from id_31
df.loc[df.query('DeviceType.isnull() and ~DeviceInfo.isnull()', engine='python').index, 'DeviceType'] = 'desktop'
########## id_30 ##########
# Finding id_30 from DeviceInfo parsing errors
df.loc[df.query('id_30.isnull() and DeviceInfo == "Linux x86_64"', engine='python').index, 'id_30'] = 'Linux'
df.loc[df.query('id_30.isnull() and DeviceInfo == "Linux i686"', engine='python').index, 'id_30'] = 'Linux'
df.loc[df.query('id_30.isnull() and DeviceInfo == "BOIE9"', engine='python').index, 'id_30'] = 'Windows 7'
df.loc[df.query('id_30.isnull() and DeviceInfo == "MDDRJS"', engine='python').index, 'id_30'] = 'Windows 7'
df.loc[df.query('id_30.isnull() and DeviceInfo == "Windows NT 6.1"', engine='python').index, 'id_30'] = 'Windows 7'
df.loc[df.query('id_30.isnull() and DeviceInfo == "Windows NT 6.2"', engine='python').index, 'id_30'] = 'Windows 8'
df.loc[df.query('id_30.isnull() and DeviceInfo == "Microsoft"', engine='python').index, 'id_30'] = 'Windows 10'
df.loc[df.query('id_30.isnull() and DeviceInfo == "Windows" and id_31.str.startswith("edge")', engine='python').index, 'id_30'] = 'Windows 10'
df.loc[df.query('id_30.isnull() and DeviceInfo == "Android 5.1"', engine='python').index, 'id_30'] = 'Android 5.1'
df.loc[df.query('id_30.isnull() and DeviceInfo == "Android 4.4.2"', engine='python').index, 'id_30'] = 'Android 4.4.2'
df.loc[df.query('id_30.isnull() and DeviceInfo == "Android 5.1.1"', engine='python').index, 'id_30'] = 'Android 5.1.1'
df.loc[df.query('id_30.isnull() and DeviceInfo == "Android 6.0.1"', engine='python').index, 'id_30'] = 'Android 6.0.1'
df.loc[df.query('id_30.isnull() and DeviceInfo == "Android 6.0"', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo == "Android 7.0"', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo == "Android"', engine='python').index, 'id_30'] = 'Android'
df.loc[df.query('id_30.isnull() and DeviceInfo == "Android 7.1.2"', engine='python').index, 'id_30'] = 'Android 7.1.2'
df.loc[df.query('id_30.isnull() and DeviceInfo == "Android 8.0.0"', engine='python').index, 'id_30'] = 'Android 8.0.0'
# Finding id_30 from id_31 parsing errors
df.loc[df.query('id_30.isnull() and id_31 == "Generic/Android 7.0"', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and id_31.str.startswith("edge")', engine='python').index, 'id_30'] = 'Windows 10'
# Finding id_30 from Android Build Numbers
# Android devices without Build Numbers are labeled as Android
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("Build/Huawei")', engine='python').index, 'id_30'] = 'Android'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("Build/HUAWEI")', engine='python').index, 'id_30'] = 'Android'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("Build/S100")', engine='python').index, 'id_30'] = 'Android'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("Build/Vision")', engine='python').index, 'id_30'] = 'Android'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("Build/HONOR")', engine='python').index, 'id_30'] = 'Android'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("Build/Lenovo")', engine='python').index, 'id_30'] = 'Android'
# Android devices with Build Numbers are mapped with their correct id_30 values
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("IML74K")', engine='python').index, 'id_30'] = 'Android 4.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("JZO54K")', engine='python').index, 'id_30'] = 'Android 4.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("6.2.A.1.100")', engine='python').index, 'id_30'] = 'Android 4.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("9.8.2I-50_SML-25")', engine='python').index, 'id_30'] = 'Android 4.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("JDQ39")', engine='python').index, 'id_30'] = 'Android 4.2'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("JLS36C")', engine='python').index, 'id_30'] = 'Android 4.3'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("KTU84M")', engine='python').index, 'id_30'] = 'Android 4.4'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("KTU84P")', engine='python').index, 'id_30'] = 'Android 4.4'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("KOT49H")', engine='python').index, 'id_30'] = 'Android 4.4'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("KOT49I")', engine='python').index, 'id_30'] = 'Android 4.4'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("KVT49L")', engine='python').index, 'id_30'] = 'Android 4.4'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("KXB21.14-L1.40")', engine='python').index, 'id_30'] = 'Android 4.4'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("KXB20.9-1.10-1.24-1.1")', engine='python').index, 'id_30'] = 'Android 4.4'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("KXC21.5-40")', engine='python').index, 'id_30'] = 'Android 4.4'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("24.0.A.5.14")', engine='python').index, 'id_30'] = 'Android 4.4'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("SU6-7.7")', engine='python').index, 'id_30'] = 'Android 4.4'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("26.1.A.3.111")', engine='python').index, 'id_30'] = 'Android 5.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("27.1.A.1.81")', engine='python').index, 'id_30'] = 'Android 5.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("LRX21R")', engine='python').index, 'id_30'] = 'Android 5.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("LRX22C")', engine='python').index, 'id_30'] = 'Android 5.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("LRX21V")', engine='python').index, 'id_30'] = 'Android 5.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("LRX21M")', engine='python').index, 'id_30'] = 'Android 5.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("LRX21Y")', engine='python').index, 'id_30'] = 'Android 5.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("LRX21T")', engine='python').index, 'id_30'] = 'Android 5.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("LRX22G")', engine='python').index, 'id_30'] = 'Android 5.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("LPBS23.13-57-2")', engine='python').index, 'id_30'] = 'Android 5.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("LPCS23.13-56-5")', engine='python').index, 'id_30'] = 'Android 5.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("LPBS23.13-56-2")', engine='python').index, 'id_30'] = 'Android 5.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("LPAS23.12-21.7-1")', engine='python').index, 'id_30'] = 'Android 5.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("LPCS23.13-34.8-3")', engine='python').index, 'id_30'] = 'Android 5.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("E050L")', engine='python').index, 'id_30'] = 'Android 5.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("L050U")', engine='python').index, 'id_30'] = 'Android 5.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("LMY48B")', engine='python').index, 'id_30'] = 'Android 5.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("LMY47D")', engine='python').index, 'id_30'] = 'Android 5.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("LMY47I")', engine='python').index, 'id_30'] = 'Android 5.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("LMY47V")', engine='python').index, 'id_30'] = 'Android 5.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("LVY48F")', engine='python').index, 'id_30'] = 'Android 5.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("LMY47O")', engine='python').index, 'id_30'] = 'Android 5.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("LMY47X")', engine='python').index, 'id_30'] = 'Android 5.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("10.7.A.0.222")', engine='python').index, 'id_30'] = 'Android 5.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("14.6.A.0.368")', engine='python').index, 'id_30'] = 'Android 5.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("14.6.A.1.236")', engine='python').index, 'id_30'] = 'Android 5.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("18.6.A.0.182")', engine='python').index, 'id_30'] = 'Android 5.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("19.4.A.0.182")', engine='python').index, 'id_30'] = 'Android 5.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("29.1.A.0.101")', engine='python').index, 'id_30'] = 'Android 5.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("MPIS24.241-15.3-7")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("MPDS24.65-33-1-30")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("26.3.A.1.33")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("27.3.A.0.165")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("27.3.A.0.129")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("27.3.A.0.173")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("29.2.A.0.166")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("36.0.A.2.146")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("33.2.A.3.81")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("33.2.A.4.70")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("23.5.A.1.291")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("37.0.A.2.108")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("37.0.A.2.248")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("30.2.A.1.21")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("35.0.D.2.25")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("M4B30Z")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("MPB24.65-34-3")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("MPI24.65-25")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("MPDS24.107-52-11")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("MPIS24.65-33.1-2-10")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("MPIS24.65-33.1-2-16")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("MPIS24.65-25.1-19")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("MPIS24.107-55-2-17")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("MPIS24.241-2.35-1-17")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("MPIS24.241-15.3-26")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("MPI24.65-33.1-2")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("MCG24.251-5-5")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("MPB24.65-34")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("MPD24.107-52")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("MPD24.65-25")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("MPDS24.107-52-5")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("MPDS24.65-33-1-3")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("IEXCNFN5902303111S")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("MPD24.65-33")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("MHC19Q")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("MMB28B")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("MOB30M")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("MMB29K")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("MRA58K")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("MMB29M")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("MMB29T")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("MXB48T")', engine='python').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NRD90M")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NRD90N")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NRD90U")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("33.3.A.1.97")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("33.3.A.1.115")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("34.2.A.2.47")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("36.1.A.1.86")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("40.0.A.6.175")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("40.0.A.6.135")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("40.0.A.6.189")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("42.0.A.4.101")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("42.0.A.4.167")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("43.0.A.5.79")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("43.0.A.7.25")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("43.0.A.7.70")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("43.0.A.7.55")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("HONORBLN-L24")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("HONORDLI-L22")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NPHS25.200-15-8")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NPHS25.200-23-1")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NPNS25.137-92-4")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NPNS25.137-92-8")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NPNS25.137-15-11")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NPNS25.137-93-14")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NPP25.137-82")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NPN25.137-72")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NPPS25.137-15-11")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NPP25.137-33")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NPP25.137-72")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NPJ25.93-14.5")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NPJS25.93-14-10")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NPJ25.93-14")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NPJS25.93-14-15")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NPP25.137-93")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NPPS25.137-93-8")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NPKS25.200-17-8")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NPKS25.200-12-9")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NPP25.137-15")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NPP25.137-38")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NPJ25.93-14.7")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NPJS25.93-14-13")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NPN25.137-35")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NPN25.137-15")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NPN25.137-92")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NPN25.137-82")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NCK25.118-10.5")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NPPS25.137-93-4")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NPPS25.137-93-12")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NPPS25.137-93-14")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NPJS25.93-14-18")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NPJS25.93-14-8.1-4")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NPJS25.93-14-8")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NPJS25.93-14.7-8")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NPNS25.137-92-10")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NPNS25.137-92-14")', engine='python').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NJH47F")', engine='python').index, 'id_30'] = 'Android 7.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("N6F27M")', engine='python').index, 'id_30'] = 'Android 7.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NMF26O")', engine='python').index, 'id_30'] = 'Android 7.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NMF26V")', engine='python').index, 'id_30'] = 'Android 7.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NMF26F")', engine='python').index, 'id_30'] = 'Android 7.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("N2G47H")', engine='python').index, 'id_30'] = 'Android 7.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NMF26X")', engine='python').index, 'id_30'] = 'Android 7.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NMF26Q")', engine='python').index, 'id_30'] = 'Android 7.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("N2G48C")', engine='python').index, 'id_30'] = 'Android 7.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("32.4.A.1.54")', engine='python').index, 'id_30'] = 'Android 7.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("34.3.A.0.252")', engine='python').index, 'id_30'] = 'Android 7.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("34.3.A.0.228")', engine='python').index, 'id_30'] = 'Android 7.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("34.3.A.0.238")', engine='python').index, 'id_30'] = 'Android 7.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("41.2.A.7.76")', engine='python').index, 'id_30'] = 'Android 7.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NDNS26.118-23-12-3")', engine='python').index, 'id_30'] = 'Android 7.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NCQ26.69-56")', engine='python').index, 'id_30'] = 'Android 7.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NMA26.42-19")', engine='python').index, 'id_30'] = 'Android 7.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NPN26.118-22-2")', engine='python').index, 'id_30'] = 'Android 7.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NPD26.48-24-1")', engine='python').index, 'id_30'] = 'Android 7.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NPSS26.118-19-14")', engine='python').index, 'id_30'] = 'Android 7.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NPS26.118-19")', engine='python').index, 'id_30'] = 'Android 7.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NMA26.42-167")', engine='python').index, 'id_30'] = 'Android 7.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NMA26.42-152")', engine='python').index, 'id_30'] = 'Android 7.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NMA26.42-142")', engine='python').index, 'id_30'] = 'Android 7.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NMA26.42-69")', engine='python').index, 'id_30'] = 'Android 7.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NMA26.42-11-3")', engine='python').index, 'id_30'] = 'Android 7.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NMA26.42-157")', engine='python').index, 'id_30'] = 'Android 7.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NMA26.42-162")', engine='python').index, 'id_30'] = 'Android 7.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NPW26.83-42")', engine='python').index, 'id_30'] = 'Android 7.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NPW26.83-18-2-0-4")', engine='python').index, 'id_30'] = 'Android 7.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NDQS26.69-64-2")', engine='python').index, 'id_30'] = 'Android 7.1'
| 0 |
c4c2912c76dea40a7fe36e070c679142bc0f0425
|
Python
|
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NDQS26.69-23-2-3")', engine='python').index, 'id_30'] = 'Android 7.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NPIS26.48-36-2")', engine='python').index, 'id_30'] = 'Android 7.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NPIS26.48-43-2")', engine='python').index, 'id_30'] = 'Android 7.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NPSS26.118-19-22")', engine='python').index, 'id_30'] = 'Android 7.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NPI26.48-36")', engine='python').index, 'id_30'] = 'Android 7.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NPIS26.48-36-5")', engine='python').index, 'id_30'] = 'Android 7.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NPIS26.48-38-3")', engine='python').index, 'id_30'] = 'Android 7.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NPLS26.118-20-5-3")', engine='python').index, 'id_30'] = 'Android 7.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NPSS26.118-19-6")', engine='python').index, 'id_30'] = 'Android 7.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NPS26.74-16-1")', engine='python').index, 'id_30'] = 'Android 7.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NPSS26.118-19-11")', engine='python').index, 'id_30'] = 'Android 7.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NPSS26.118-19-4")', engine='python').index, 'id_30'] = 'Android 7.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NDSS26.118-23-19-6")', engine='python').index, 'id_30'] = 'Android 7.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NDQ26.69-64-9")', engine='python').index, 'id_30'] = 'Android 7.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("NPLS26.118-20-5-11")', engine='python').index, 'id_30'] = 'Android 7.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("34.4.A.2.19")', engine='python').index, 'id_30'] = 'Android 8.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("34.4.A.2.107")', engine='python').index, 'id_30'] = 'Android 8.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("47.1.A.12.270")', engine='python').index, 'id_30'] = 'Android 8.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("47.1.A.5.51")', engine='python').index, 'id_30'] = 'Android 8.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("48.1.A.2.21")', engine='python').index, 'id_30'] = 'Android 8.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("48.1.A.2.50")', engine='python').index, 'id_30'] = 'Android 8.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("50.1.A.10.40")', engine='python').index, 'id_30'] = 'Android 8.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("R16NW")', engine='python').index, 'id_30'] = 'Android 8.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("OCLS27.76-69-6")', engine='python').index, 'id_30'] = 'Android 8.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("OPR1.170623.032")', engine='python').index, 'id_30'] = 'Android 8.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("OPR5.170623.014")', engine='python').index, 'id_30'] = 'Android 8.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("OPR6.170623.013")', engine='python').index, 'id_30'] = 'Android 8.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("OPP27.61-14-4")', engine='python').index, 'id_30'] = 'Android 8.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("OPP27.91-25")', engine='python').index, 'id_30'] = 'Android 8.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("OPP27.91-87")', engine='python').index, 'id_30'] = 'Android 8.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("OPP27.91-140")', engine='python').index, 'id_30'] = 'Android 8.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("OPS27.82-41")', engine='python').index, 'id_30'] = 'Android 8.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("OPSS27.76-12-25-7")', engine='python').index, 'id_30'] = 'Android 8.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("OPS27.76-12-25")', engine='python').index, 'id_30'] = 'Android 8.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("OPSS27.82-87-3")', engine='python').index, 'id_30'] = 'Android 8.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("OPS27.82-87")', engine='python').index, 'id_30'] = 'Android 8.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("OPS27.82-72")', engine='python').index, 'id_30'] = 'Android 8.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("OPSS27.76-12-25-3")', engine='python').index, 'id_30'] = 'Android 8.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("OPP27.91-143")', engine='python').index, 'id_30'] = 'Android 8.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("OPR1.170623.027")', engine='python').index, 'id_30'] = 'Android 8.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("OPNS27.76-12-22-9")', engine='python').index, 'id_30'] = 'Android 8.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("OPNS27.76-12-22-3")', engine='python').index, 'id_30'] = 'Android 8.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("OPN27.76-12-22")', engine='python').index, 'id_30'] = 'Android 8.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("O00623")', engine='python').index, 'id_30'] = 'Android 8.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("OPW27.57-40")', engine='python').index, 'id_30'] = 'Android 8.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("OPW27.113-89")', engine='python').index, 'id_30'] = 'Android 8.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("OPWS27.113-25-4")', engine='python').index, 'id_30'] = 'Android 8.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("OPWS27.57-40-14")', engine='python').index, 'id_30'] = 'Android 8.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("OPWS27.57-40-17")', engine='python').index, 'id_30'] = 'Android 8.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("OPWS27.113-89-2")', engine='python').index, 'id_30'] = 'Android 8.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("OPP27.91-72")', engine='python').index, 'id_30'] = 'Android 8.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("OPP27.91-122")', engine='python').index, 'id_30'] = 'Android 8.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("OPP27.91-146")', engine='python').index, 'id_30'] = 'Android 8.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("OPWS27.57-40-6")', engine='python').index, 'id_30'] = 'Android 8.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("OPWS27.57-40-22")', engine='python').index, 'id_30'] = 'Android 8.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("OPR1.170623.026")', engine='python').index, 'id_30'] = 'Android 8.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("OPR4.170623.006")', engine='python').index, 'id_30'] = 'Android 8.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("OPM1.171019.012")', engine='python').index, 'id_30'] = 'Android 8.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("OPM3.171019.013")', engine='python').index, 'id_30'] = 'Android 8.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("OPM1.171019.011")', engine='python').index, 'id_30'] = 'Android 8.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("OPM1.171019.021")', engine='python').index, 'id_30'] = 'Android 8.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("OPM1.171019.019")', engine='python').index, 'id_30'] = 'Android 8.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("OPM1.171019.026")', engine='python').index, 'id_30'] = 'Android 8.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("OPM7.181005.003")', engine='python').index, 'id_30'] = 'Android 8.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("OPM2.171026.006.C1")', engine='python').index, 'id_30'] = 'Android 8.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("OPM4.171019.021.P1")', engine='python').index, 'id_30'] = 'Android 8.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("OPM2.171026.006.H1")', engine='python').index, 'id_30'] = 'Android 8.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("OPM2.171026.006.G1")', engine='python').index, 'id_30'] = 'Android 8.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("OPPS28.85-13-2")', engine='python').index, 'id_30'] = 'Android 8.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("OPW28.70-22")', engine='python').index, 'id_30'] = 'Android 8.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("OPS28.85-13")', engine='python').index, 'id_30'] = 'Android 8.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("OPSS28.85-13-3")', engine='python').index, 'id_30'] = 'Android 8.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("O11019")', engine='python').index, 'id_30'] = 'Android 8.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("OPGS28.54-19-2")', engine='python').index, 'id_30'] = 'Android 8.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("OPP28.85-13")', engine='python').index, 'id_30'] = 'Android 8.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("OPG28.54-19")', engine='python').index, 'id_30'] = 'Android 8.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("M1AJQ")', engine='python').index, 'id_30'] = 'Android 8.1'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("PPR2.181005.003")', engine='python').index, 'id_30'] = 'Android 9.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("PKQ1.180716.001")', engine='python').index, 'id_30'] = 'Android 9.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("PPR1.180610.009")', engine='python').index, 'id_30'] = 'Android 9.0'
df.loc[df.query('id_30.isnull() and DeviceInfo.str.contains("PPR2.180905.005")', engine='python').index, 'id_30'] = 'Android 9.0'
########## id_31 ##########
# Finding id_31 from DeviceInfo parsing errors
df.loc[df.query('id_31.isnull() and DeviceInfo == "rv:52.0"', engine='python').index, 'id_31'] = 'firefox 52.0'
########## id_32 ##########
# All iOS devices have 32 bit color depth
df.loc[df.query('DeviceInfo == "iOS Device" and id_32.isnull()', engine='python').index, 'id_32'] = 32.0
with faith('4. Fixing UserAgent, id_31 etc and inferring more vals for other cols etc...v1.0') as f:
for df in tqdm([X_train, X_test]):
########## DeviceInfo ##########
# Fixing DeviceInfo from id_31
df.loc[df.query('DeviceInfo == "Windows" and id_31.str.contains("mobile safari")', engine='python').index, 'DeviceInfo'] = 'iOS Device'
# Creating a UserAgent feature from DeviceInfo
df['UserAgent'] = df['DeviceInfo'].copy()
# Fixing DeviceInfo from UserAgent
df.loc[df.query('UserAgent == "Intel" and id_30.str.contains("Mac")', engine='python').index, 'DeviceInfo'] = 'MacOS'
df.loc[df.query('UserAgent != "MacOS" and id_30.str.startswith("Mac OS")', engine='python').index, 'DeviceInfo'] = 'MacOS'
df.loc[df.query('UserAgent == "CPU"').index, 'DeviceInfo'] = 'iOS Device'
df.loc[df.query('UserAgent == "Windows NT 6.1"').index, 'DeviceInfo'] = 'Windows'
df.loc[df.query('UserAgent == "Windows NT 6.2"').index, 'DeviceInfo'] = 'Windows'
df.loc[df.query('UserAgent == "MDDRJS"').index, 'DeviceInfo'] = 'Windows'
df.loc[df.query('UserAgent == "Microsoft"').index, 'DeviceInfo'] = 'Windows'
df.loc[df.query('UserAgent == "Trident/7.0"').index, 'DeviceInfo'] = 'Windows'
df.loc[df.query('UserAgent == "Touch"').index, 'DeviceInfo'] = 'Windows'
df.loc[df.query('UserAgent == "S.N.O.W.4"').index, 'DeviceInfo'] = 'Windows'
df.loc[df.query('UserAgent == "BOIE9"').index, 'DeviceInfo'] = 'Windows'
df.loc[df.query('UserAgent == "rv:11.0"').index, 'DeviceInfo'] = 'Windows'
df.loc[df.query('UserAgent == "WOW64"').index, 'DeviceInfo'] = 'Windows'
df.loc[df.query('UserAgent == "ATT-IE11"').index, 'DeviceInfo'] = 'Windows'
df.loc[df.query('UserAgent == "MAMI"').index, 'DeviceInfo'] = 'Windows'
df.loc[df.query('UserAgent == "MALC"').index, 'DeviceInfo'] = 'Windows'
df.loc[df.query('UserAgent == "hp2015"').index, 'DeviceInfo'] = 'Windows'
df.loc[df.query('UserAgent == "Northwell"').index, 'DeviceInfo'] = 'Windows'
df.loc[df.query('UserAgent == "xs-Z47b7VqTMxs"').index, 'DeviceInfo'] = 'Windows'
df.loc[df.query('UserAgent == "QwestIE8"').index, 'DeviceInfo'] = 'Windows'
df.loc[df.query('UserAgent == "ATT"').index, 'DeviceInfo'] = 'Windows'
df.loc[df.query('UserAgent == "NetHelper70"').index, 'DeviceInfo'] = 'Windows'
df.loc[df.query('UserAgent == "FunWebProducts"').index, 'DeviceInfo'] = 'Windows'
df.loc[df.query('UserAgent == "Lifesize"').index, 'DeviceInfo'] = 'Windows'
# Fixing DeviceInfo from id_30
df.loc[df.query('id_30.str.startswith("Windows") and UserAgent == "rv:27.0"', engine='python').index, 'DeviceInfo'] = 'Windows'
df.loc[df.query('id_30.str.startswith("Windows") and UserAgent == "rv:31.0"', engine='python').index, 'DeviceInfo'] = 'Windows'
df.loc[df.query('id_30.str.startswith("Windows") and UserAgent == "rv:37.0"', engine='python').index, 'DeviceInfo'] = 'Windows'
df.loc[df.query('id_30.str.startswith("Windows") and UserAgent == "rv:38.0"', engine='python').index, 'DeviceInfo'] = 'Windows'
df.loc[df.query('id_30.str.startswith("Windows") and UserAgent == "rv:39.0"', engine='python').index, 'DeviceInfo'] = 'Windows'
df.loc[df.query('id_30.str.startswith("Windows") and UserAgent == "rv:42.0"', engine='python').index, 'DeviceInfo'] = 'Windows'
df.loc[df.query('id_30.str.startswith("Windows") and UserAgent == "rv:43.0"', engine='python').index, 'DeviceInfo'] = 'Windows'
df.loc[df.query('id_30.str.startswith("Windows") and UserAgent == "rv:44.0"', engine='python').index, 'DeviceInfo'] = 'Windows'
df.loc[df.query('id_30.str.startswith("Windows") and UserAgent == "rv:45.0"', engine='python').index, 'DeviceInfo'] = 'Windows'
df.loc[df.query('id_30.str.startswith("Windows") and UserAgent == "rv:46.0"', engine='python').index, 'DeviceInfo'] = 'Windows'
df.loc[df.query('id_30.str.startswith("Windows") and UserAgent == "rv:47.0"', engine='python').index, 'DeviceInfo'] = 'Windows'
df.loc[df.query('id_30.str.startswith("Windows") and UserAgent == "rv:48.0"', engine='python').index, 'DeviceInfo'] = 'Windows'
df.loc[df.query('id_30.str.startswith("Mac") and UserAgent == "rv:48.0"', engine='python').index, 'DeviceInfo'] = 'MacOS'
df.loc[df.query('id_30.str.startswith("Windows") and UserAgent == "rv:49.0"', engine='python').index, 'DeviceInfo'] = 'Windows'
df.loc[df.query('id_30.str.startswith("Windows") and UserAgent == "rv:50.0"', engine='python').index, 'DeviceInfo'] = 'Windows'
df.loc[df.query('id_30.str.startswith("Windows") and UserAgent == "rv:51.0"', engine='python').index, 'DeviceInfo'] = 'Windows'
df.loc[df.query('id_30.str.startswith("Windows") and UserAgent == "rv:52.9"', engine='python').index, 'DeviceInfo'] = 'Windows'
df.loc[df.query('id_30.str.startswith("Mac OS") and UserAgent == "rv:57.0"', engine='python').index, 'DeviceInfo'] = 'MacOS'
df.loc[df.query('id_30.str.startswith("Windows") and UserAgent == "rv:53.0"', engine='python').index, 'DeviceInfo'] = 'Windows'
df.loc[df.query('id_30.str.startswith("Windows") and UserAgent == "rv:54.0"', engine='python').index, 'DeviceInfo'] = 'Windows'
df.loc[df.query('id_30.str.startswith("Windows") and UserAgent == "rv:55.0"', engine='python').index, 'DeviceInfo'] = 'Windows'
df.loc[df.query('id_30.str.startswith("Windows") and UserAgent == "rv:56.0"', engine='python').index, 'DeviceInfo'] = 'Windows'
df.loc[df.query('id_30.str.startswith("Windows") and UserAgent == "rv:57.0"', engine='python').index, 'DeviceInfo'] = 'Windows'
df.loc[df.query('id_30.str.startswith("Windows") and UserAgent == "rv:52.0"', engine='python').index, 'DeviceInfo'] = 'Windows'
df.loc[df.query('id_30.str.startswith("Windows") and UserAgent == "rv:58.0"', engine='python').index, 'DeviceInfo'] = 'Windows'
df.loc[df.query('id_30.str.startswith("Windows") and UserAgent == "rv:60.0"', engine='python').index, 'DeviceInfo'] = 'Windows'
df.loc[df.query('id_30.str.startswith("Windows") and UserAgent == "rv:61.0"', engine='python').index, 'DeviceInfo'] = 'Windows'
df.loc[df.query('id_30.str.startswith("Windows") and UserAgent == "rv:62.0"', engine='python').index, 'DeviceInfo'] = 'Windows'
df.loc[df.query('id_30.str.startswith("Windows") and UserAgent == "rv:63.0"', engine='python').index, 'DeviceInfo'] = 'Windows'
df.loc[df.query('id_30.str.startswith("Windows") and UserAgent == "rv:64.0"', engine='python').index, 'DeviceInfo'] = 'Windows'
# Incorrect DeviceInfo that can't be found are assigned with NaN
df.loc[df.query('DeviceInfo == "rv:14.0"').index, 'DeviceInfo'] = np.nan
df.loc[df.query('DeviceInfo == "rv:21.0"').index, 'DeviceInfo'] = np.nan
df.loc[df.query('DeviceInfo == "rv:27.0"').index, 'DeviceInfo'] = np.nan
df.loc[df.query('DeviceInfo == "rv:28.0"').index, 'DeviceInfo'] = np.nan
df.loc[df.query('DeviceInfo == "rv:29.0"').index, 'DeviceInfo'] = np.nan
df.loc[df.query('DeviceInfo == "rv:31.0"').index, 'DeviceInfo'] = np.nan
df.loc[df.query('DeviceInfo == "rv:33.0"').index, 'DeviceInfo'] = np.nan
df.loc[df.query('DeviceInfo == "rv:35.0"').index, 'DeviceInfo'] = np.nan
df.loc[df.query('DeviceInfo == "rv:37.0"').index, 'DeviceInfo'] = np.nan
df.loc[df.query('DeviceInfo == "rv:38.0"').index, 'DeviceInfo'] = np.nan
df.loc[df.query('DeviceInfo == "rv:39.0"').index, 'DeviceInfo'] = np.nan
df.loc[df.query('DeviceInfo == "rv:41.0"').index, 'DeviceInfo'] = np.nan
df.loc[df.query('DeviceInfo == "rv:42.0"').index, 'DeviceInfo'] = np.nan
df.loc[df.query('DeviceInfo == "rv:43.0"').index, 'DeviceInfo'] = np.nan
df.loc[df.query('DeviceInfo == "rv:44.0"').index, 'DeviceInfo'] = np.nan
df.loc[df.query('DeviceInfo == "rv:45.0"').index, 'DeviceInfo'] = np.nan
df.loc[df.query('DeviceInfo == "rv:46.0"').index, 'DeviceInfo'] = np.nan
df.loc[df.query('DeviceInfo == "rv:47.0"').index, 'DeviceInfo'] = np.nan
df.loc[df.query('DeviceInfo == "rv:48.0"').index, 'DeviceInfo'] = np.nan
df.loc[df.query('DeviceInfo == "rv:49.0"').index, 'DeviceInfo'] = np.nan
df.loc[df.query('DeviceInfo == "rv:50.0"').index, 'DeviceInfo'] = np.nan
df.loc[df.query('DeviceInfo == "rv:51.0"').index, 'DeviceInfo'] = np.nan
df.loc[df.query('DeviceInfo == "rv:52.0"').index, 'DeviceInfo'] = np.nan
df.loc[df.query('DeviceInfo == "rv:52.9"').index, 'DeviceInfo'] = np.nan
df.loc[df.query('DeviceInfo == "rv:53.0"').index, 'DeviceInfo'] = np.nan
df.loc[df.query('DeviceInfo == "rv:54.0"').index, 'DeviceInfo'] = np.nan
df.loc[df.query('DeviceInfo == "rv:55.0"').index, 'DeviceInfo'] = np.nan
df.loc[df.query('DeviceInfo == "rv:56.0"').index, 'DeviceInfo'] = np.nan
df.loc[df.query('DeviceInfo == "rv:57.0"').index, 'DeviceInfo'] = np.nan
df.loc[df.query('DeviceInfo == "rv:58.0"').index, 'DeviceInfo'] = np.nan
df.loc[df.query('DeviceInfo == "rv:59.0"').index, 'DeviceInfo'] = np.nan
df.loc[df.query('DeviceInfo == "rv:60.0"').index, 'DeviceInfo'] = np.nan
df.loc[df.query('DeviceInfo == "rv:60.1.0"').index, 'DeviceInfo'] = np.nan
df.loc[df.query('DeviceInfo == "rv:61.0"').index, 'DeviceInfo'] = np.nan
df.loc[df.query('DeviceInfo == "rv:62.0"').index, 'DeviceInfo'] = np.nan
df.loc[df.query('DeviceInfo == "rv:63.0"').index, 'DeviceInfo'] = np.nan
df.loc[df.query('DeviceInfo == "rv:64.0"').index, 'DeviceInfo'] = np.nan
df.loc[df.query('DeviceInfo == "rv:65.0"').index, 'DeviceInfo'] = np.nan
df.loc[df.query('DeviceInfo == "en-us"').index, 'DeviceInfo'] = np.nan
df.loc[df.query('DeviceInfo == "es-us"').index, 'DeviceInfo'] = np.nan
df.loc[df.query('DeviceInfo == "es-es"').index, 'DeviceInfo'] = np.nan
df.loc[df.query('DeviceInfo == "es-mx"').index, 'DeviceInfo'] = np.nan
df.loc[df.query('DeviceInfo == "en-gb"').index, 'DeviceInfo'] = np.nan
df.loc[df.query('DeviceInfo == "Linux"').index, 'DeviceInfo'] = np.nan
df.loc[df.query('DeviceInfo == "Android"').index, 'DeviceInfo'] = np.nan
df.loc[df.query('DeviceInfo == "Android 4.4.2"').index, 'DeviceInfo'] = np.nan
df.loc[df.query('DeviceInfo == "Android 5.1"').index, 'DeviceInfo'] = np.nan
df.loc[df.query('DeviceInfo == "Android 6.0.1"').index, 'DeviceInfo'] = np.nan
df.loc[df.query('DeviceInfo == "Android 6.0"').index, 'DeviceInfo'] = np.nan
df.loc[df.query('DeviceInfo == "Android 7.0"').index, 'DeviceInfo'] = np.nan
df.loc[df.query('DeviceInfo == "Android 7.1.2"').index, 'DeviceInfo'] = np.nan
df.loc[df.query('DeviceInfo == "Android 8.0.0"').index, 'DeviceInfo'] = np.nan
########## DeviceType ##########
# Fixing DeviceType from UserAgent
df.loc[df.query('UserAgent == "Android 4.4.2" and DeviceType == "desktop"').index, 'DeviceType'] = 'mobile'
# Fixing DeviceType from DeviceInfo
df.loc[df.query('DeviceInfo.str.contains("Build") and DeviceType == "desktop"', engine='python').index, 'DeviceType'] = 'mobile'
########## id_27 ##########
# id_27 is the flag which becomes True (Found) when id_23 is not NaN
# It is either "Found" for NaN, there is no such value as "NotFound"
df.loc[df.query('id_27 == "NotFound"').index, 'id_27'] = 'Found'
########## id_30 ##########
# Fixing id_30 from DeviceInfo (Android Build Number)
df.loc[df.query('DeviceInfo == "LG-TP260 Build/NRD90U" and id_30 == "func"').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('DeviceInfo.str.contains("IML74K") and id_30 == "Android"', engine='python').index, 'id_30'] = 'Android 4.0'
df.loc[df.query('DeviceInfo.str.contains("JDQ39") and id_30 == "Android"', engine='python').index, 'id_30'] = 'Android 4.2'
df.loc[df.query('DeviceInfo.str.contains("KTU84M") and id_30 == "Android"', engine='python').index, 'id_30'] = 'Android 4.4'
df.loc[df.query('DeviceInfo.str.contains("KTU84P") and id_30 == "Android"', engine='python').index, 'id_30'] = 'Android 4.4'
df.loc[df.query('DeviceInfo.str.contains("SU6-7.7") and id_30 == "Android"', engine='python').index, 'id_30'] = 'Android 4.4'
df.loc[df.query('DeviceInfo.str.contains("LRX22C") and id_30 == "Android"', engine='python').index, 'id_30'] = 'Android 5.0'
df.loc[df.query('DeviceInfo.str.contains("LMY47D") and id_30 == "Android"', engine='python').index, 'id_30'] = 'Android 5.1'
df.loc[df.query('DeviceInfo.str.contains("LMY47O") and id_30 == "Android"', engine='python').index, 'id_30'] = 'Android 5.1'
df.loc[df.query('DeviceInfo.str.contains("OPM2.171026.006.G1") and id_30 == "func"', engine='python').index, 'id_30'] = 'Android 8.1'
# Fixing id_30 from UserAgent
df.loc[df.query('UserAgent == "Microsoft" and id_30 == "Windows"').index, 'id_30'] = 'Windows 10'
df.loc[df.query('UserAgent == "MDDRJS" and id_30 == "Windows 10"').index, 'id_30'] = 'Windows 7'
df.loc[df.query('UserAgent == "Linux"').index, 'id_30'] = 'Linux'
df.loc[df.query('UserAgent == "rv:51.0" and id_30 == "Linux"').index, 'id_30'] = 'Android 7.0'
df.loc[df.query('UserAgent == "Android" and id_30 == "Windows"').index, 'id_30'] = 'Android'
# Fixing id_30 from id_31
df.loc[df.query('id_31.str.startswith("edge") and id_30 != "Windows 10"', engine='python').index, 'id_30'] = 'Windows 10'
# Incorrect id_30 that can't be found are assigned with NaN
df.loc[df.query('id_31 == "safari" and id_30 == "Android"').index, 'id_30'] = np.nan
df.loc[df.query('DeviceInfo.isnull() and id_30 == "other"', engine='python').index, 'id_30'] = np.nan
df.loc[df.query('DeviceInfo.isnull() and id_30 == "func"', engine='python').index, 'id_30'] = np.nan
# Fixing "other" and "func" id_30 values
df.loc[df.query('DeviceInfo == "Windows" and id_30 == "other"').index, 'id_30'] = 'Windows'
df.loc[df.query('DeviceInfo == "iOS Device" and id_30 == "other"').index, 'id_30'] = 'iOS'
df.loc[df.query('DeviceInfo == "Windows" and id_30 == "func"').index, 'id_30'] = 'Windows'
df.loc[df.query('DeviceInfo == "iOS Device" and id_30 == "func"').index, 'id_30'] = 'iOS'
df.loc[df.query('DeviceInfo == "MacOS" and id_30 == "func"').index, 'id_30'] = 'Mac'
# Grouping Android versions
df.loc[df.query('id_30 == "Android 4.4.2"').index, 'id_30'] = 'Android 4.4'
df.loc[df.query('id_30 == "Android 5.0" or id_30 == "Android 5.0.2"').index, 'id_30'] = 'Android 5.0'
df.loc[df.query('id_30 == "Android 5.1" or id_30 == "Android 5.1.1"').index, 'id_30'] = 'Android 5.1'
df.loc[df.query('id_30 == "Android 6.0" or id_30 == "Android 6.0.1"').index, 'id_30'] = 'Android 6.0'
df.loc[df.query('id_30 == "Android 7.1.1" or id_30 == "Android 7.1.2"').index, 'id_30'] = 'Android 7.1'
df.loc[df.query('id_30 == "Android 8.0.0"').index, 'id_30'] = 'Android 8.0'
df.loc[df.query('id_30 == "Android 8.1.0"').index, 'id_30'] = 'Android 8.1'
df.loc[df.query('id_30 == "Android 9"').index, 'id_30'] = 'Android 9.0'
# Grouping Mac OS X versions
df.loc[df.query('~id_30.isnull() and (id_30.str.startswith("Mac OS X 10_14") or id_30.str.startswith("Mac OS X 10.14"))', engine='python').index, 'id_30'] = 'Mac OS X 10.14'
df.loc[df.query('~id_30.isnull() and (id_30.str.startswith("Mac OS X 10_13") or id_30.str.startswith("Mac OS X 10.13"))', engine='python').index, 'id_30'] = 'Mac OS X 10.13'
df.loc[df.query('~id_30.isnull() and (id_30.str.startswith("Mac OS X 10_12") or id_30.str.startswith("Mac OS X 10.12"))', engine='python').index, 'id_30'] = 'Mac OS X 10.12'
df.loc[df.query('~id_30.isnull() and (id_30.str.startswith("Mac OS X 10_11") or id_30.str.startswith("Mac OS X 10.11"))', engine='python').index, 'id_30'] = 'Mac OS X 10.11'
df.loc[df.query('~id_30.isnull() and (id_30.str.startswith("Mac OS X 10_10") or id_30.str.startswith("Mac OS X 10.10"))', engine='python').index, 'id_30'] = 'Mac OS X 10.10'
df.loc[df.query('~id_30.isnull() and (id_30.str.startswith("Mac OS X 10_9") or id_30.str.startswith("Mac OS X 10.9"))', engine='python').index, 'id_30'] = 'Mac OS X 10.9'
df.loc[df.query('~id_30.isnull() and (id_30.str.startswith("Mac OS X 10_8") or id_30.str.startswith("Mac OS X 10.8"))', engine='python').index, 'id_30'] = 'Mac OS X 10.8'
df.loc[df.query('~id_30.isnull() and (id_30.str.startswith("Mac OS X 10_7") or id_30.str.startswith("Mac OS X 10.7"))', engine='python').index, 'id_30'] = 'Mac OS X 10.7'
df.loc[df.query('~id_30.isnull() and (id_30.str.startswith("Mac OS X 10_6") or id_30.str.startswith("Mac OS X 10.6"))', engine='python').index, 'id_30'] = 'Mac OS X 10.6'
########## id_31 ##########
# Fixing id_31 from UserAgent
df.loc[df.query('UserAgent == "rv:14.0" and id_31 == "Mozilla/Firefox"').index, 'id_31'] = 'firefox 14.0'
df.loc[df.query('UserAgent == "rv:21.0" and id_31 == "firefox"').index, 'id_31'] = 'firefox 21.0'
df.loc[df.query('UserAgent == "rv:27.0" and id_31 == "firefox"').index, 'id_31'] = 'firefox 27.0'
df.loc[df.query('UserAgent == "rv:28.0" and id_31.isnull()', engine='python').index, 'id_31'] = 'firefox 28.0'
df.loc[df.query('UserAgent == "rv:29.0" and id_31 == "firefox"').index, 'id_31'] = 'firefox 29.0'
df.loc[df.query('UserAgent == "rv:31.0" and id_31 == "firefox"').index, 'id_31'] = 'firefox 31.0'
df.loc[df.query('UserAgent == "rv:33.0" and id_31 == "firefox"').index, 'id_31'] = 'firefox 33.0'
df.loc[df.query('UserAgent == "rv:35.0" and id_31 == "firefox"').index, 'id_31'] = 'firefox 35.0'
df.loc[df.query('UserAgent == "rv:37.0" and id_31 == "firefox"').index, 'id_31'] = 'firefox 37.0'
df.loc[df.query('UserAgent == "rv:38.0" and id_31 == "firefox"').index, 'id_31'] = 'firefox 38.0'
df.loc[df.query('UserAgent == "rv:39.0" and id_31 == "firefox"').index, 'id_31'] = 'firefox 39.0'
df.loc[df.query('UserAgent == "rv:41.0" and id_31 == "firefox"').index, 'id_31'] = 'firefox 41.0'
df.loc[df.query('UserAgent == "rv:42.0" and id_31 == "firefox"').index, 'id_31'] = 'firefox 42.0'
df.loc[df.query('UserAgent == "rv:43.0" and id_31 == "firefox"').index, 'id_31'] = 'firefox 43.0'
df.loc[df.query('UserAgent == "rv:44.0" and id_31 == "firefox"').index, 'id_31'] = 'firefox 44.0'
df.loc[df.query('UserAgent == "rv:45.0" and id_31 == "firefox"').index, 'id_31'] = 'firefox 45.0'
df.loc[df.query('UserAgent == "rv:46.0" and id_31 == "firefox"').index, 'id_31'] = 'firefox 46.0'
df.loc[df.query('UserAgent == "rv:48.0" and id_31 == "firefox"').index, 'id_31'] = 'firefox 48.0'
df.loc[df.query('UserAgent == "rv:49.0" and id_31 == "firefox"').index, 'id_31'] = 'firefox 49.0'
df.loc[df.query('UserAgent == "rv:50.0" and id_31 == "firefox"').index, 'id_31'] = 'firefox 50.0'
df.loc[df.query('UserAgent == "rv:51.0" and id_31 == "Generic/Android 7.0"').index, 'id_31'] = 'firefox 51.0'
df.loc[df.query('UserAgent == "rv:51.0" and id_31 == "seamonkey"').index, 'id_31'] = 'firefox 51.0'
df.loc[df.query('UserAgent == "rv:51.0" and id_31 == "firefox"').index, 'id_31'] = 'firefox 51.0'
df.loc[df.query('UserAgent == "rv:52.0" and id_31 == "firefox"').index, 'id_31'] = 'firefox 52.0'
df.loc[df.query('UserAgent == "rv:52.9" and id_31 == "firefox"').index, 'id_31'] = 'other'
df.loc[df.query('UserAgent == "rv:52.9" and id_31 == "palemoon"').index, 'id_31'] = 'other'
df.loc[df.query('UserAgent == "rv:53.0" and id_31 == "firefox"').index, 'id_31'] = 'firefox 53.0'
df.loc[df.query('UserAgent == "rv:53.0" and id_31 == "Generic/Android"').index, 'id_31'] = 'firefox 53.0'
df.loc[df.query('UserAgent == "rv:54.0" and id_31 == "firefox"').index, 'id_31'] = 'firefox 54.0'
df.loc[df.query('UserAgent == "rv:55.0" and id_31 == "firefox"').index, 'id_31'] = 'firefox 55.0'
df.loc[df.query('UserAgent == "rv:55.0" and id_31 == "Generic/Android"').index, 'id_31'] = 'firefox 55.0'
df.loc[df.query('UserAgent == "rv:57.0" and id_31 == "firefox"').index, 'id_31'] = 'firefox 57.0'
df.loc[df.query('UserAgent == "rv:56.0" and id_31 == "Generic/Android"').index, 'id_31'] = 'firefox 56.0'
df.loc[df.query('UserAgent == "rv:56.0" and id_31 == "firefox"').index, 'id_31'] = 'firefox 56.0'
df.loc[df.query('UserAgent == "rv:57.0" and id_31 == "Generic/Android"').index, 'id_31'] = 'firefox 57.0'
df.loc[df.query('UserAgent == "rv:57.0" and id_31 == "Generic/Android 7.0"').index, 'id_31'] = 'firefox 57.0'
df.loc[df.query('UserAgent == "rv:58.0" and id_31 == "Generic/Android"').index, 'id_31'] = 'firefox 58.0'
df.loc[df.query('UserAgent == "rv:58.0" and id_31 == "Generic/Android 7.0"').index, 'id_31'] = 'firefox 58.0'
df.loc[df.query('UserAgent == "rv:58.0" and id_31 == "firefox"').index, 'id_31'] = 'firefox 58.0'
df.loc[df.query('UserAgent == "rv:58.0" and id_31 == "firefox generic"').index, 'id_31'] = 'firefox 58.0'
df.loc[df.query('UserAgent == "rv:59.0" and id_31 == "firefox"').index, 'id_31'] = 'firefox 59.0'
df.loc[df.query('UserAgent == "rv:59.0" and id_31 == "firefox generic"').index, 'id_31'] = 'firefox 59.0'
df.loc[df.query('UserAgent == "rv:59.0" and id_31 == "Generic/Android"').index, 'id_31'] = 'firefox 59.0'
df.loc[df.query('UserAgent == "rv:60.0" and id_31 == "firefox"').index, 'id_31'] = 'firefox 60.0'
df.loc[df.query('UserAgent == "rv:60.0" and id_31 == "firefox generic"').index, 'id_31'] = 'firefox 60.0'
df.loc[df.query('UserAgent == "rv:60.0" and id_31 == "Generic/Android"').index, 'id_31'] = 'firefox 60.0'
df.loc[df.query('UserAgent == "rv:61.0" and id_31 == "Generic/Android"').index, 'id_31'] = 'firefox 61.0'
df.loc[df.query('UserAgent == "rv:61.0" and id_31 == "firefox mobile 61.0"').index, 'id_31'] = 'firefox 61.0'
df.loc[df.query('UserAgent == "rv:62.0" and id_31 == "firefox mobile 62.0"').index, 'id_31'] = 'firefox 62.0'
df.loc[df.query('UserAgent == "rv:63.0" and id_31 == "firefox mobile 63.0"').index, 'id_31'] = 'firefox 63.0'
df.loc[df.query('UserAgent == "rv:64.0" and id_31 == "firefox"').index, 'id_31'] = 'firefox 64.0'
df.loc[df.query('UserAgent == "rv:65.0" and id_31 == "firefox"').index, 'id_31'] = 'firefox 65.0'
# Fixing id_31 from id_30
# Safari is part of the iOS and its version is the same as the iOS, finding safari versions from iOS versions
df.loc[df.query('id_30.str.startswith("iOS 9") and id_31 == "mobile safari uiwebview"', engine='python').index, 'id_31'] = 'safari 9.0'
df.loc[df.query('id_30.str.startswith("iOS 10") and id_31 == "mobile safari uiwebview"', engine='python').index, 'id_31'] = 'safari 10.0'
df.loc[df.query('id_30.str.startswith("iOS 11") and id_31 == "mobile safari uiwebview"', engine='python').index, 'id_31'] = 'safari 11.0'
# Grouping mobile and desktop safari browsers
df.loc[df.query('id_31 == "mobile safari generic" or id_31 == "safari generic"').index, 'id_31'] = 'safari 0.0'
df.loc[df.query('id_31 == "mobile safari 8.0" or id_31 == "safari 8.0"').index, 'id_31'] = 'safari 8.0'
df.loc[df.query('id_31 == "mobile safari 9.0" or id_31 == "safari 9.0"').index, 'id_31'] = 'safari 9.0'
df.loc[df.query('id_31 == "mobile safari 10.0" or id_31 == "safari 10.0"').index, 'id_31'] = 'safari 10.0'
df.loc[df.query('id_31 == "mobile safari 11.0" or id_31 == "safari 11.0"').index, 'id_31'] = 'safari 11.0'
df.loc[df.query('id_31 == "mobile safari 12.0" or id_31 == "safari 12.0"').index, 'id_31'] = 'safari 12.0'
# Grouping mobile and desktop chrome browsers
df.loc[df.query('id_31 == "chrome 39.0 for android"').index, 'id_31'] = 'chrome 39.0'
df.loc[df.query('id_31 == "chrome 43.0 for android"').index, 'id_31'] = 'chrome 43.0'
df.loc[df.query('id_31 == "chrome 46.0 for android"').index, 'id_31'] = 'chrome 46.0'
df.loc[df.query('id_31 == "google search application 48.0"').index, 'id_31'] = 'chrome 48.0'
df.loc[df.query('id_31 == "chrome 49.0 for android" or id_31 == "chrome 49.0" or id_31 == "google search application 49.0"').index, 'id_31'] = 'chrome 49.0'
df.loc[df.query('id_31 == "chrome 50.0 for android"').index, 'id_31'] = 'chrome 50.0'
df.loc[df.query('id_31 == "chrome 51.0 for android" or id_31 == "chrome 51.0"').index, 'id_31'] = 'chrome 51.0'
df.loc[df.query('id_31 == "chrome 52.0 for android" or id_31 == "google search application 52.0"').index, 'id_31'] = 'chrome 52.0'
df.loc[df.query('id_31 == "chrome 53.0 for android"').index, 'id_31'] = 'chrome 53.0'
df.loc[df.query('id_31 == "chrome 54.0 for android" or id_31 == "google search application 54.0"').index, 'id_31'] = 'chrome 54.0'
df.loc[df.query('id_31 == "chrome 55.0 for android" or id_31 == "chrome 55.0"').index, 'id_31'] = 'chrome 55.0'
df.loc[df.query('id_31 == "chrome 56.0 for android" or id_31 == "chrome 56.0" or id_31 == "google search application 56.0"').index, 'id_31'] = 'chrome 56.0'
df.loc[df.query('id_31 == "chrome 57.0 for android" or id_31 == "chrome 57.0"').index, 'id_31'] = 'chrome 57.0'
df.loc[df.query('id_31 == "chrome 58.0 for android" or id_31 == "chrome 58.0" or id_31 == "google search application 58.0"').index, 'id_31'] = 'chrome 58.0'
df.loc[df.query('id_31 == "chrome 59.0 for android" or id_31 == "chrome 59.0" or id_31 == "google search application 59.0"').index, 'id_31'] = 'chrome 59.0'
df.loc[df.query('id_31 == "chrome 60.0 for android" or id_31 == "chrome 60.0" or id_31 == "google search application 60.0"').index, 'id_31'] = 'chrome 60.0'
df.loc[df.query('id_31 == "chrome 61.0 for android" or id_31 == "chrome 61.0" or id_31 ==
| 1 |
c4c2912c76dea40a7fe36e070c679142bc0f0425
|
Python
|
"google search application 61.0"').index, 'id_31'] = 'chrome 61.0'
df.loc[df.query('id_31 == "chrome 62.0 for android" or id_31 == "chrome 62.0 for ios" or id_31 == "chrome 62.0" or id_31 == "google search application 62.0"').index, 'id_31'] = 'chrome 62.0'
df.loc[df.query('id_31 == "chrome 63.0 for android" or id_31 == "chrome 63.0 for ios" or id_31 == "chrome 63.0" or id_31 == "google search application 63.0"').index, 'id_31'] = 'chrome 63.0'
df.loc[df.query('id_31 == "chrome 64.0 for android" or id_31 == "chrome 64.0 for ios" or id_31 == "chrome 64.0" or id_31 == "google search application 64.0"').index, 'id_31'] = 'chrome 64.0'
df.loc[df.query('id_31 == "chrome 65.0 for android" or id_31 == "chrome 65.0 for ios" or id_31 == "chrome 65.0" or id_31 == "google search application 65.0"').index, 'id_31'] = 'chrome 65.0'
df.loc[df.query('id_31 == "chrome 66.0 for android" or id_31 == "chrome 66.0 for ios" or id_31 == "chrome 66.0"').index, 'id_31'] = 'chrome 66.0'
df.loc[df.query('id_31 == "chrome 67.0 for android" or id_31 == "chrome 67.0 for ios" or id_31 == "chrome 67.0"').index, 'id_31'] = 'chrome 67.0'
df.loc[df.query('id_31 == "chrome 68.0 for android" or id_31 == "chrome 68.0 for ios" or id_31 == "chrome 68.0"').index, 'id_31'] = 'chrome 68.0'
df.loc[df.query('id_31 == "chrome 69.0 for android" or id_31 == "chrome 69.0 for ios" or id_31 == "chrome 69.0"').index, 'id_31'] = 'chrome 69.0'
df.loc[df.query('id_31 == "chrome 70.0 for android" or id_31 == "chrome 70.0 for ios" or id_31 == "chrome 70.0"').index, 'id_31'] = 'chrome 70.0'
df.loc[df.query('id_31 == "chrome 71.0 for android" or id_31 == "chrome 71.0 for ios" or id_31 == "chrome 71.0"').index, 'id_31'] = 'chrome 71.0'
# Grouping mobile and desktop firefox browsers
df.loc[df.query('id_31 == "firefox mobile 61.0" or id_31 == "firefox 61.0"').index, 'id_31'] = 'firefox 61.0'
df.loc[df.query('id_31 == "firefox mobile 62.0" or id_31 == "firefox 62.0"').index, 'id_31'] = 'firefox 62.0'
df.loc[df.query('id_31 == "firefox mobile 63.0" or id_31 == "firefox 63.0"').index, 'id_31'] = 'firefox 63.0'
# Grouping other id_31 values
df.loc[df.query('id_31 == "Samsung/SM-G532M"').index, 'id_31'] = 'other'
df.loc[df.query('id_31 == "Samsung/SM-G531H"').index, 'id_31'] = 'other'
df.loc[df.query('id_31 == "Generic/Android 7.0"').index, 'id_31'] = 'other'
df.loc[df.query('id_31 == "palemoon"').index, 'id_31'] = 'other'
df.loc[df.query('id_31 == "cyberfox"').index, 'id_31'] = 'other'
df.loc[df.query('id_31 == "android"').index, 'id_31'] = 'other'
df.loc[df.query('id_31 == "Cherry"').index, 'id_31'] = 'other'
df.loc[df.query('id_31 == "M4Tel/M4"').index, 'id_31'] = 'other'
df.loc[df.query('id_31 == "Samsung/SCH"').index, 'id_31'] = 'other'
df.loc[df.query('id_31 == "chromium"').index, 'id_31'] = 'other'
df.loc[df.query('id_31 == "BLU/Dash"').index, 'id_31'] = 'other'
df.loc[df.query('id_31 == "Nokia/Lumia"').index, 'id_31'] = 'other'
df.loc[df.query('id_31 == "LG/K-200"').index, 'id_31'] = 'other'
df.loc[df.query('id_31 == "iron"').index, 'id_31'] = 'other'
df.loc[df.query('id_31 == "Inco/Minion"').index, 'id_31'] = 'other'
df.loc[df.query('id_31 == "waterfox"').index, 'id_31'] = 'other'
df.loc[df.query('id_31 == "facebook"').index, 'id_31'] = 'other'
df.loc[df.query('id_31 == "puffin"').index, 'id_31'] = 'other'
df.loc[df.query('id_31 == "Lanix/Ilium"').index, 'id_31'] = 'other'
df.loc[df.query('id_31 == "icedragon"').index, 'id_31'] = 'other'
df.loc[df.query('id_31 == "aol"').index, 'id_31'] = 'other'
df.loc[df.query('id_31 == "comodo"').index, 'id_31'] = 'other'
df.loc[df.query('id_31 == "line"').index, 'id_31'] = 'other'
df.loc[df.query('id_31 == "maxthon"').index, 'id_31'] = 'other'
df.loc[df.query('id_31 == "ZTE/Blade"').index, 'id_31'] = 'other'
df.loc[df.query('id_31 == "mobile"').index, 'id_31'] = 'other'
df.loc[df.query('id_31 == "silk"').index, 'id_31'] = 'other'
df.loc[df.query('id_31 == "Microsoft/Windows"').index, 'id_31'] = 'other'
df.loc[df.query('id_31 == "rim"').index, 'id_31'] = 'other'
df.loc[df.query('id_31 == "blackberry"').index, 'id_31'] = 'other'
df.loc[df.query('id_31 == "uc"').index, 'id_31'] = 'other'
########## UserAgent ##########
# Grouping rv:60 UserAgent values
df.loc[df.query('UserAgent == "rv:60.1.0"').index, 'UserAgent'] = 'rv:60.0'
# Removing DeviceInfo values from UserAgent
df.loc[df.query('~UserAgent.isin(@USER_AGENTS) and ~UserAgent.isnull()', engine='python').index, 'UserAgent'] = np.nan
########## id_32 ##########
# 0.0 color depth is fixed with the mode value
df.loc[df.query('id_32 == 0.0 and UserAgent == "rv:59.0"').index, 'id_32'] = 24.0
with faith('5. Fixing UserAgent, id_31 etc and inferring more vals for other cols etc... v2.0, Yes it\'s twice') as f:
for df in tqdm([X_train, X_test]):
# DeviceInfo
df.loc[df.query('DeviceInfo.str.contains("SM-J700M") and ~DeviceInfo.isnull()', engine='python').index, 'DeviceInfo'] = 'Samsung-J700M'
df.loc[df.query('DeviceInfo.str.contains("SM-G610M") and ~DeviceInfo.isnull()', engine='python').index, 'DeviceInfo'] = 'Samsung-G610M'
df.loc[df.query('DeviceInfo.str.contains("SM-G531H") and ~DeviceInfo.isnull()', engine='python').index, 'DeviceInfo'] = 'Samsung-G531H'
df.loc[df.query('DeviceInfo.str.contains("SM-G935F") and ~DeviceInfo.isnull()', engine='python').index, 'DeviceInfo'] = 'Samsung-G935F'
df.loc[df.query('DeviceInfo.str.contains("SM-G955U") and ~DeviceInfo.isnull()', engine='python').index, 'DeviceInfo'] = 'Samsung-G955U'
df.loc[df.query('DeviceInfo.str.contains("SM-G532M") and ~DeviceInfo.isnull()', engine='python').index, 'DeviceInfo'] = 'Samsung-G532M'
df.loc[df.query('DeviceInfo.str.contains("ALE") and ~DeviceInfo.isnull()', engine='python').index, 'DeviceInfo'] = 'Huawei-ALE'
df.loc[df.query('DeviceInfo.str.contains("SM-G950U") and ~DeviceInfo.isnull()', engine='python').index, 'DeviceInfo'] = 'Samsung-G950U'
df.loc[df.query('DeviceInfo.str.contains("SM-G930V") and ~DeviceInfo.isnull()', engine='python').index, 'DeviceInfo'] = 'Samsung-G930V'
df.loc[df.query('DeviceInfo.str.contains("SM-G950F") and ~DeviceInfo.isnull()', engine='python').index, 'DeviceInfo'] = 'Samsung-G950F'
df.loc[df.query('DeviceInfo.str.contains("Moto G \(4\)") and ~DeviceInfo.isnull()', engine='python').index, 'DeviceInfo'] = 'Motorola-Moto-G4'
df.loc[df.query('DeviceInfo.str.contains("SM-N950U") and ~DeviceInfo.isnull()', engine='python').index, 'DeviceInfo'] = 'Samsung-N950U'
df.loc[df.query('DeviceInfo.str.contains("SM-A300H") and ~DeviceInfo.isnull()', engine='python').index, 'DeviceInfo'] = 'Samsung-A300H'
df.loc[df.query('DeviceInfo.str.contains("hi6210sft") and ~DeviceInfo.isnull()', engine='python').index, 'DeviceInfo'] = 'Huawei-hi6210sft'
df.loc[df.query('DeviceInfo.str.contains("SM-J730GM") and ~DeviceInfo.isnull()', engine='python').index, 'DeviceInfo'] = 'Samsung-J730GM'
df.loc[df.query('DeviceInfo.str.contains("SM-G570M") and ~DeviceInfo.isnull()', engine='python').index, 'DeviceInfo'] = 'Samsung-G570M'
df.loc[df.query('DeviceInfo.str.contains("CAM-") and ~DeviceInfo.isnull()', engine='python').index, 'DeviceInfo'] = 'Huawei-CAM'
df.loc[df.query('DeviceInfo.str.contains("SM-J320M") and ~DeviceInfo.isnull()', engine='python').index, 'DeviceInfo'] = 'Samsung-J320M'
df.loc[df.query('DeviceInfo.str.contains("Moto E \(4\) Plus") and ~DeviceInfo.isnull()', engine='python').index, 'DeviceInfo'] = 'Motorola-Moto-E4-Plus'
df.loc[df.query('DeviceInfo.str.contains("Moto E \(4\)") and ~DeviceInfo.isnull()', engine='python').index, 'DeviceInfo'] = 'Motorola-Moto-E4'
df.loc[df.query('DeviceInfo.str.contains("LG-M700") and ~DeviceInfo.isnull()', engine='python').index, 'DeviceInfo'] = 'LG-M700'
df.loc[df.query('DeviceInfo.str.contains("ANE") and ~DeviceInfo.isnull()', engine='python').index, 'DeviceInfo'] = 'Huawei-ANE'
df.loc[df.query('DeviceInfo.str.contains("SM-J510MN") and ~DeviceInfo.isnull()', engine='python').index, 'DeviceInfo'] = 'Samsung-J510MN'
df.loc[df.query('DeviceInfo.str.contains("SM-J701M") and ~DeviceInfo.isnull()', engine='python').index, 'DeviceInfo'] = 'Samsung-J701M'
df.loc[df.query('DeviceInfo.str.contains("LG-D693n") and ~DeviceInfo.isnull()', engine='python').index, 'DeviceInfo'] = 'LG-D693n'
df.loc[df.query('DeviceInfo.str.contains("SM-A520F") and ~DeviceInfo.isnull()', engine='python').index, 'DeviceInfo'] = 'Samsung-A520F'
df.loc[df.query('DeviceInfo.str.contains("SM-G930F") and ~DeviceInfo.isnull()', engine='python').index, 'DeviceInfo'] = 'Samsung-G930F'
df.loc[df.query('DeviceInfo.str.contains("SM-G935V") and ~DeviceInfo.isnull()', engine='python').index, 'DeviceInfo'] = 'Samsung-G935V'
df.loc[df.query('DeviceInfo.str.contains("LG-K410") and ~DeviceInfo.isnull()', engine='python').index, 'DeviceInfo'] = 'LG-K410'
df.loc[df.query('DeviceInfo.str.contains("PRA-") and ~DeviceInfo.isnull()', engine='python').index, 'DeviceInfo'] = 'Huawei-PRA'
df.loc[df.query('DeviceInfo.str.contains("SM-G955F") and ~DeviceInfo.isnull()', engine='python').index, 'DeviceInfo'] = 'Samsung-G955F'
df.loc[df.query('DeviceInfo.str.contains("Moto G \(5\) Plus") and ~DeviceInfo.isnull()', engine='python').index, 'DeviceInfo'] = 'Motorola-Moto-G5-Plus'
df.loc[df.query('DeviceInfo.str.contains("Moto G \(5\)") and ~DeviceInfo.isnull()', engine='python').index, 'DeviceInfo'] = 'Motorola-Moto-G5'
df.loc[df.query('DeviceInfo.str.contains("Moto Z2") and ~DeviceInfo.isnull()', engine='python').index, 'DeviceInfo'] = 'Motorola-Moto-Z2-Play'
df.loc[df.query('DeviceInfo.str.contains("TRT-") and ~DeviceInfo.isnull()', engine='python').index, 'DeviceInfo'] = 'Huawei-TRT'
df.loc[df.query('DeviceInfo.str.contains("Moto G Play") and ~DeviceInfo.isnull()', engine='python').index, 'DeviceInfo'] = 'Motorola-Moto-G-Play'
df.loc[df.query('DeviceInfo.str.contains("SM-A720F") and ~DeviceInfo.isnull()', engine='python').index, 'DeviceInfo'] = 'Samsung-A720F'
df.loc[df.query('DeviceInfo.str.contains("LG-K580") and ~DeviceInfo.isnull()', engine='python').index, 'DeviceInfo'] = 'LG-K580'
df.loc[df.query('DeviceInfo.str.contains("TAG-") and ~DeviceInfo.isnull()', engine='python').index, 'DeviceInfo'] = 'Huawei-TAG'
df.loc[df.query('DeviceInfo.str.contains("VNS-") and ~DeviceInfo.isnull()', engine='python').index, 'DeviceInfo'] = 'Huawei-VNS'
df.loc[df.query('DeviceInfo.str.contains("Moto X Play") and ~DeviceInfo.isnull()', engine='python').index, 'DeviceInfo'] = 'Motorola-Moto-X-Play'
df.loc[df.query('DeviceInfo.str.contains("LG-X230") and ~DeviceInfo.isnull()', engine='python').index, 'DeviceInfo'] = 'LG-X230'
df.loc[df.query('DeviceInfo.str.contains("WAS-") and ~DeviceInfo.isnull()', engine='python').index, 'DeviceInfo'] = 'Huawei-WAS'
# id_30
df.loc[df.query('DeviceInfo == "Samsung-J700M" and id_30.isnull()', engine='python').index, 'id_30'] = 'Android'
df.loc[df.query('DeviceInfo == "Samsung-G610M" and id_30.isnull()', engine='python').index, 'id_30'] = 'Android'
df.loc[df.query('DeviceInfo == "Samsung-G531H" and id_30.isnull()', engine='python').index, 'id_30'] = 'Android'
df.loc[df.query('DeviceInfo == "Samsung-G935F" and id_30.isnull()', engine='python').index, 'id_30'] = 'Android'
df.loc[df.query('DeviceInfo == "Samsung-G955U" and id_30.isnull()', engine='python').index, 'id_30'] = 'Android'
df.loc[df.query('DeviceInfo == "Samsung-G532M" and id_30.isnull()', engine='python').index, 'id_30'] = 'Android'
df.loc[df.query('DeviceInfo == "Huawei-ALE" and id_30.isnull()', engine='python').index, 'id_30'] = 'Android'
df.loc[df.query('DeviceInfo == "Samsung-G950U" and id_30.isnull()', engine='python').index, 'id_30'] = 'Android'
df.loc[df.query('DeviceInfo == "Samsung-G930V" and id_30.isnull()', engine='python').index, 'id_30'] = 'Android'
df.loc[df.query('DeviceInfo == "Samsung-G950F" and id_30.isnull()', engine='python').index, 'id_30'] = 'Android'
df.loc[df.query('DeviceInfo == "Motorola-Moto-G4" and id_30.isnull()', engine='python').index, 'id_30'] = 'Android'
df.loc[df.query('DeviceInfo == "Samsung-N950U" and id_30.isnull()', engine='python').index, 'id_30'] = 'Android'
df.loc[df.query('DeviceInfo == "Samsung-A300H" and id_30.isnull()', engine='python').index, 'id_30'] = 'Android'
df.loc[df.query('DeviceInfo == "Huawei-hi6210sft" and id_30.isnull()', engine='python').index, 'id_30'] = 'Android'
df.loc[df.query('DeviceInfo == "Samsung-J730GM" and id_30.isnull()', engine='python').index, 'id_30'] = 'Android'
df.loc[df.query('DeviceInfo == "Samsung-G570M" and id_30.isnull()', engine='python').index, 'id_30'] = 'Android'
df.loc[df.query('DeviceInfo == "Huawei-CAM" and id_30.isnull()', engine='python').index, 'id_30'] = 'Android'
df.loc[df.query('DeviceInfo == "Samsung-J320M" and id_30.isnull()', engine='python').index, 'id_30'] = 'Android'
df.loc[df.query('DeviceInfo == "Motorola-Moto-E4-Plus" and id_30.isnull()', engine='python').index, 'id_30'] = 'Android'
df.loc[df.query('DeviceInfo == "Motorola-Moto-E4" and id_30.isnull()', engine='python').index, 'id_30'] = 'Android'
df.loc[df.query('DeviceInfo == "LG-M700" and id_30.isnull()', engine='python').index, 'id_30'] = 'Android'
df.loc[df.query('DeviceInfo == "Huawei-ANE" and id_30.isnull()', engine='python').index, 'id_30'] = 'Android'
df.loc[df.query('DeviceInfo == "Samsung-J510MN" and id_30.isnull()', engine='python').index, 'id_30'] = 'Android'
df.loc[df.query('DeviceInfo == "Samsung-J701M" and id_30.isnull()', engine='python').index, 'id_30'] = 'Android'
df.loc[df.query('DeviceInfo == "LG-D693n" and id_30.isnull()', engine='python').index, 'id_30'] = 'Android'
df.loc[df.query('DeviceInfo == "Samsung-A520F" and id_30.isnull()', engine='python').index, 'id_30'] = 'Android'
df.loc[df.query('DeviceInfo == "Samsung-G930F" and id_30.isnull()', engine='python').index, 'id_30'] = 'Android'
df.loc[df.query('DeviceInfo == "Samsung-G935V" and id_30.isnull()', engine='python').index, 'id_30'] = 'Android'
df.loc[df.query('DeviceInfo == "LG-K410" and id_30.isnull()', engine='python').index, 'id_30'] = 'Android'
df.loc[df.query('DeviceInfo == "Huawei-PRA" and id_30.isnull()', engine='python').index, 'id_30'] = 'Android'
df.loc[df.query('DeviceInfo == "Samsung-G955F" and id_30.isnull()', engine='python').index, 'id_30'] = 'Android'
df.loc[df.query('DeviceInfo == "Motorola-Moto-G5-Plus" and id_30.isnull()', engine='python').index, 'id_30'] = 'Android'
df.loc[df.query('DeviceInfo == "Motorola-Moto-G5" and id_30.isnull()', engine='python').index, 'id_30'] = 'Android'
df.loc[df.query('DeviceInfo == "Motorola-Moto-Z2-Play" and id_30.isnull()', engine='python').index, 'id_30'] = 'Android'
df.loc[df.query('DeviceInfo == "Huawei-TRT" and id_30.isnull()', engine='python').index, 'id_30'] = 'Android'
df.loc[df.query('DeviceInfo == "Motorola-Moto-G-Play" and id_30.isnull()', engine='python').index, 'id_30'] = 'Android'
df.loc[df.query('DeviceInfo == "Samsung-A720F" and id_30.isnull()', engine='python').index, 'id_30'] = 'Android'
df.loc[df.query('DeviceInfo == "LG-K580" and id_30.isnull()', engine='python').index, 'id_30'] = 'Android'
df.loc[df.query('DeviceInfo == "Huawei-TAG" and id_30.isnull()', engine='python').index, 'id_30'] = 'Android'
df.loc[df.query('DeviceInfo == "Huawei-VNS" and id_30.isnull()', engine='python').index, 'id_30'] = 'Android'
df.loc[df.query('DeviceInfo == "Motorola-Moto-X-Play" and id_30.isnull()', engine='python').index, 'id_30'] = 'Android'
df.loc[df.query('DeviceInfo == "LG-X230" and id_30.isnull()', engine='python').index, 'id_30'] = 'Android'
df.loc[df.query('DeviceInfo == "Huawei-WAS" and id_30.isnull()', engine='python').index, 'id_30'] = 'Android'
# id_33
df.loc[df.query('DeviceInfo == "Samsung-J700M"').index, 'id_33'] = '1280x720'
df.loc[df.query('DeviceInfo == "Samsung-G610M"').index, 'id_33'] = '1920x1080'
df.loc[df.query('DeviceInfo == "Samsung-G531H"').index, 'id_33'] = '1280x720'
df.loc[df.query('DeviceInfo == "Samsung-G935F"').index, 'id_33'] = '2560x1440'
df.loc[df.query('DeviceInfo == "Samsung-G955U"').index, 'id_33'] = '2960x1440'
df.loc[df.query('DeviceInfo == "Samsung-G532M"').index, 'id_33'] = '960x540'
df.loc[df.query('DeviceInfo == "Huawei-ALE"').index, 'id_33'] = '1280x720'
df.loc[df.query('DeviceInfo == "Samsung-G950U"').index, 'id_33'] = '2960x1440'
df.loc[df.query('DeviceInfo == "Samsung-G930V"').index, 'id_33'] = '2560x1440'
df.loc[df.query('DeviceInfo == "Samsung-G950F"').index, 'id_33'] = '2960x1440'
df.loc[df.query('DeviceInfo == "Motorola-Moto-G4"').index, 'id_33'] = '1920x1080'
df.loc[df.query('DeviceInfo == "Samsung-N950U"').index, 'id_33'] = '2960x1440'
df.loc[df.query('DeviceInfo == "Samsung-A300H"').index, 'id_33'] = '960x540'
df.loc[df.query('DeviceInfo == "Huawei-hi6210sft"').index, 'id_33'] = '1280x720'
df.loc[df.query('DeviceInfo == "Samsung-J730GM"').index, 'id_33'] = '1920x1080'
df.loc[df.query('DeviceInfo == "Samsung-G570M"').index, 'id_33'] = '1280x720'
df.loc[df.query('DeviceInfo == "Huawei-CAM"').index, 'id_33'] = '1280x720'
df.loc[df.query('DeviceInfo == "Samsung-J320M"').index, 'id_33'] = '1280x720'
df.loc[df.query('DeviceInfo == "Motorola-Moto-E4-Plus"').index, 'id_33'] = '1280x720'
df.loc[df.query('DeviceInfo == "Motorola-Moto-E4"').index, 'id_33'] = '1280x720'
df.loc[df.query('DeviceInfo == "LG-M700"').index, 'id_33'] = '2880x1440'
df.loc[df.query('DeviceInfo == "Huawei-ANE"').index, 'id_33'] = '2280x1080'
df.loc[df.query('DeviceInfo == "Samsung-J510MN"').index, 'id_33'] = '1280x720'
df.loc[df.query('DeviceInfo == "Samsung-J701M"').index, 'id_33'] = '1280x720'
df.loc[df.query('DeviceInfo == "LG-D693n"').index, 'id_33'] = '960x540'
df.loc[df.query('DeviceInfo == "Samsung-A520F"').index, 'id_33'] = '960x540'
df.loc[df.query('DeviceInfo == "Samsung-G930F"').index, 'id_33'] = '2560x1440'
df.loc[df.query('DeviceInfo == "Samsung-G935V"').index, 'id_33'] = '2560x1440'
df.loc[df.query('DeviceInfo == "LG-K410"').index, 'id_33'] = '1280x720'
df.loc[df.query('DeviceInfo == "Huawei-PRA"').index, 'id_33'] = '1920x1080'
df.loc[df.query('DeviceInfo == "Samsung-G955F"').index, 'id_33'] = '2960x1440'
df.loc[df.query('DeviceInfo == "Motorola-Moto-G5-Plus"').index, 'id_33'] = '1920x1080'
df.loc[df.query('DeviceInfo == "Motorola-Moto-G5"').index, 'id_33'] = '1920x1080'
df.loc[df.query('DeviceInfo == "Motorola-Moto-Z2-Play"').index, 'id_33'] = '1920x1080'
df.loc[df.query('DeviceInfo == "Huawei-TRT"').index, 'id_33'] = '1280x720'
df.loc[df.query('DeviceInfo == "Motorola-Moto-G-Play"').index, 'id_33'] = '1920x1080'
df.loc[df.query('DeviceInfo == "Samsung-A720F"').index, 'id_33'] = '1920x1080'
df.loc[df.query('DeviceInfo == "LG-K580"').index, 'id_33'] = '1920x1080'
df.loc[df.query('DeviceInfo == "Huawei-TAG"').index, 'id_33'] = '1280x720'
df.loc[df.query('DeviceInfo == "Huawei-VNS"').index, 'id_33'] = '1280x720'
df.loc[df.query('DeviceInfo == "Motorola-Moto-X-Play"').index, 'id_33'] = '1920x1080'
df.loc[df.query('DeviceInfo == "LG-X230"').index, 'id_33'] = '854x480'
df.loc[df.query('DeviceInfo == "Huawei-WAS"').index, 'id_33'] = '1920x1080'
# In[7]:
D_COLS = [f'D{i}' for i in range(1, 16) if i != 9]
C_COLS = [f'C{i}' for i in range(1, 15)]
CN_COLS = [f'C{i}_Week_Norm' for i in range(1, 15)]
with faith('6. Creating ID cols suing card cols and adding C, D, V blocks norm feats etc') as f:
for df in tqdm([X_train, X_test]):
# Feature Combination
df['Card_ID1'] = df['card1'].astype(str) + '_' + df['card2'].astype(str)
df['Card_ID2'] = df['Card_ID1'].astype(str) + '_' + df['card3'].astype(str) + '_' + df['card5'].astype(str)
df['Card_ID3'] = df['Card_ID2'].astype(str) + '_' + df['addr1'].astype(str) + '_' + df['addr2'].astype(str)
df['Item_ID'] = df['TransactionAmt'].astype(str) + '_' + df['ProductCD'].astype(str)
#df['Device_ID'] = df['DeviceInfo'].astype(str) + '_' + df['DeviceType'].astype(str) + '_' + df['id_30'].astype(str) + '_' + df['id_31'].astype(str)
df['PAccount_ID'] = df['addr1'].astype(str) + '_' + df['addr2'].astype(str) + '_' + df['P_emaildomain'].astype(str)
df['RAccount_ID'] = df['addr1'].astype(str) + '_' + df['addr2'].astype(str) + '_' + df['R_emaildomain'].astype(str)
df['PR_emaildomain'] = df['P_emaildomain'].astype(str) + '_' + df['R_emaildomain'].astype(str)
# D unique count
df['D_Uniques'] = df[D_COLS].nunique(axis=1)
# D Normalized
for d in D_COLS:
df[f'{d}_Week_Norm'] = df[d] - df['Week'].map(
pd.concat([X_train[[d, 'Week']], X_test[[d, 'Week']]], ignore_index=True).groupby('Week')[d].mean()
)
# V-Block Aggregation
for block in [(1, 12), (12, 35), (35, 53), (53, 75), (75, 95), (95, 138), (138, 167), (167, 217), (217, 279), (279, 322), (322, 340)]:
df['V{}-V{}_Sum'.format(*block)] = df[['V{}'.format(i) for i in range(*block)]].sum(axis=1)
df['V{}-V{}_Mean'.format(*block)] = df[['V{}'.format(i) for i in range(*block)]].mean(axis=1)
df['V{}-V{}_Std'.format(*block)] = df[['V{}'.format(i) for i in range(*block)]].std(axis=1)
# CONTINUOUS/CATEGORICAL GROUPING AGGREGATIONS
DN_COLS = [f'D{i}_Week_Norm' for i in range(1, 16) if i != 9]
CONT_COLS = ['TransactionAmt', 'dist1', 'dist2'] + C_COLS + DN_COLS
CAT_COLS = ['card1', 'card2', 'card3', 'card5', 'Card_ID1', 'Card_ID2', 'Card_ID3', 'addr1', 'P_emaildomain',
'R_emaildomain', 'PAccount_ID', 'RAccount_ID', 'PR_emaildomain']
AGG_TYPES = ['std', 'mean', 'sum']
with faith('7. Adding various other agg feats') as f:
for cat_col in CAT_COLS:
for cont_col in CONT_COLS:
for agg_type in AGG_TYPES:
new_col_name = cat_col + f'_{cont_col}_' + agg_type.capitalize()
temp_df = pd.concat([X_train[[cat_col, cont_col]], X_test[[cat_col, cont_col]]])
temp_df = temp_df.groupby([cat_col])[cont_col].agg([agg_type]).reset_index().rename(columns={agg_type: new_col_name})
temp_df.index = list(temp_df[cat_col])
temp_df = temp_df[new_col_name].to_dict()
X_train[new_col_name] = X_train[cat_col].map(temp_df).astype(np.float32)
X_test[new_col_name] = X_test[cat_col].map(temp_df).astype(np.float32)
# Not using for memory issues
'''
CONT_COLS = ['TransactionAmt', 'id_01', 'id_02']
TIME_COLS = ['Hour', 'Day', 'Week']
for df in [X_train, X_test]:
# Continuous - Continuous/Categorical group Mean/Median Difference
for cont_col in CONT_COLS:
for cat_col in CAT_COLS:
df['{}_({}Mean{})_Difference'.format(cont_col, cat_col, cont_col)] = df[cont_col] - df[cat_col].map(pd.concat([X_train[[cont_col, cat_col]], X_test[[cont_col, cat_col]]], ignore_index=True).groupby(cat_col)[cont_col].mean())
df['{}_({}Median{})_Difference'.format(cont_col, cat_col, cont_col)] = df[cont_col] - df[cat_col].map(pd.concat([X_train[[cont_col, cat_col]], X_test[[cont_col, cat_col]]], ignore_index=True).groupby(cat_col)[cont_col].median())
gc.collect()
# Time-based continuous aggregation
for cont_col in CONT_COLS:
for time_col in TIME_COLS:
df['{}_{}_Sum'.format(time_col, cont_col)] = df[time_col].map(pd.concat([X_train[[cont_col, time_col]], X_test[[cont_col, time_col]]], ignore_index=True).groupby(time_col)[cont_col].sum())
df['{}_{}_Count'.format(time_col, cont_col)] = df[time_col].map(pd.concat([X_train[[cont_col, time_col]], X_test[[cont_col, time_col]]], ignore_index=True).groupby(time_col)[cont_col].count())
df['{}_{}_Mean'.format(time_col, cont_col)] = df[time_col].map(pd.concat([X_train[[cont_col, time_col]], X_test[[cont_col, time_col]]], ignore_index=True).groupby(time_col)[cont_col].mean())
df['{}_{}_Std'.format(time_col, cont_col)] = df[time_col].map(pd.concat([X_train[[cont_col, time_col]], X_test[[cont_col, time_col]]], ignore_index=True).groupby(time_col)[cont_col].std())
gc.collect()
'''
with faith('8. Fixing UserAgent, id_31 etc and Adding cents col etc...') as f:
for df in tqdm([X_train, X_test]):
# ParsingError
df['ParsingError'] = np.nan
df.loc[df.query('~DeviceInfo.isnull() or ~UserAgent.isnull()', engine='python').index, 'ParsingError'] = 0
df.loc[df.query('~UserAgent.isnull()', engine='python').index, 'ParsingError'] = 1
# BrowserUpToDate
df['BrowserUpToDate'] = np.nan
df.loc[df.query('~id_31.isnull()', engine='python').index, 'BrowserUpToDate'] = 0
df.loc[df.query('id_31 == "safari 10.0" and TransactionDate < "2017-09-19 00:00:00"').index, 'BrowserUpToDate'] = 1
df.loc[df.query('id_31 == "safari 11.0" and TransactionDate < "2018-09-17 00:00:00"').index, 'BrowserUpToDate'] = 1
df.loc[df.query('id_31 == "safari 12.0"').index, 'BrowserUpToDate'] = 1
df.loc[df.query('id_31 == "chrome 62.0" and TransactionDate < "2017-12-05 00:00:00"').index, 'BrowserUpToDate'] = 1
df.loc[df.query('id_31 == "chrome 63.0" and TransactionDate < "2018-01-24 00:00:00"').index, 'BrowserUpToDate'] = 1
df.loc[df.query('id_31 == "chrome 64.0" and TransactionDate < "2018-03-06 00:00:00"').index, 'BrowserUpToDate'] = 1
df.loc[df.query('id_31 == "chrome 65.0" and TransactionDate < "2018-04-17 00:00:00"').index, 'BrowserUpToDate'] = 1
df.loc[df.query('id_31 == "chrome 66.0" and TransactionDate < "2018-05-29 00:00:00"').index, 'BrowserUpToDate'] = 1
df.loc[df.query('id_31 == "chrome 67.0" and TransactionDate < "2018-07-24 00:00:00"').index, 'BrowserUpToDate'] = 1
df.loc[df.query('id_31 == "chrome 68.0" and TransactionDate < "2018-09-04 00:00:00"').index, 'BrowserUpToDate'] = 1
df.loc[df.query('id_31 == "chrome 69.0" and TransactionDate < "2018-10-16 00:00:00"').index, 'BrowserUpToDate'] = 1
df.loc[df.query('id_31 == "chrome 70.0" and TransactionDate < "2018-12-04 00:00:00"').index, 'BrowserUpToDate'] = 1
df.loc[df.query('id_31 == "chrome 71.0"').index, 'BrowserUpToDate'] = 1
df.loc[df.query('id_31 == "edge 16.0" and TransactionDate < "2018-04-30 00:00:00"').index, 'BrowserUpToDate'] = 1
df.loc[df.query('id_31 == "edge 17.0" and TransactionDate < "2018-11-13 00:00:00"').index, 'BrowserUpToDate'] = 1
df.loc[df.query('id_31 == "edge 18.0"').index, 'BrowserUpToDate'] = 1
df.loc[df.query('id_31 == "opera 49.0" and TransactionDate < "2018-01-04 00:00:00"').index, 'BrowserUpToDate'] = 1
df.loc[df.query('id_31 == "opera 51.0" and TransactionDate < "2018-03-22 00:00:00"').index, 'BrowserUpToDate'] = 1
df.loc[df.query('id_31 == "opera 52.0" and TransactionDate < "2018-05-10 00:00:00"').index, 'BrowserUpToDate'] = 1
df.loc[df.query('id_31 == "opera 53.0" and TransactionDate < "2018-06-28 00:00:00"').index, 'BrowserUpToDate'] = 1
df.loc[df.query('id_31 == "opera 54.0" and TransactionDate < "2018-08-16 00:00:00"').index, 'BrowserUpToDate'] = 1
df.loc[df.query('id_31 == "opera 55.0" and TransactionDate < "2018-09-25 00:00:00"').index, 'BrowserUpToDate'] = 1
df.loc[df.query('id_31 == "opera 56.0" and TransactionDate < "2018-11-28 00:00:00"').index, 'BrowserUpToDate'] = 1
df.loc[df.query('id_31 == "samsung browser 6.2" and TransactionDate < "2018-02-19 00:00:00"').index, 'BrowserUpToDate'] = 1
df.loc[df.query('id_31 == "samsung browser 6.4" and TransactionDate < "2018-06-07 00:00:00"').index, 'BrowserUpToDate'] = 1
df.loc[df.query('id_31 == "samsung browser 7.0" and TransactionDate < "2018-07-07 00:00:00"').index, 'BrowserUpToDate'] = 1
df.loc[df.query('id_31 == "samsung browser 7.2" and TransactionDate < "2018-08-19 00:00:00"').index, 'BrowserUpToDate'] = 1
df.loc[df.query('id_31 == "samsung browser 7.4" and TransactionDate < "2018-12-21 00:00:00"').index, 'BrowserUpToDate'] = 1
df.loc[df.query('id_31 == "samsung browser 8.2"').index, 'BrowserUpToDate'] = 1
df.loc[df.query('id_31 == "firefox 56.0" and TransactionDate < "2017-11-14 00:00:00"').index, 'BrowserUpToDate'] = 1
df.loc[df.query('id_31 == "firefox 57.0" and TransactionDate < "2018-01-23 00:00:00"').index, 'BrowserUpToDate'] = 1
df.loc[df.query('id_31 == "firefox 58.0" and TransactionDate < "2018-03-13 00:00:00"').index, 'BrowserUpToDate'] = 1
df.loc[df.query('id_31 == "firefox 59.0" and TransactionDate < "2018-05-09 00:00:00"').index, 'BrowserUpToDate'] = 1
df.loc[df.query('id_31 == "firefox 60.0" and TransactionDate < "2018-06-26 00:00:00"').index, 'BrowserUpToDate'] = 1
df.loc[df.query('id_31 == "firefox 61.0" and TransactionDate < "2018-09-05 00:00:00"').index, 'BrowserUpToDate'] = 1
df.loc[df.query('id_31 == "firefox 62.0" and TransactionDate < "2018-10-23 00:00:00"').index, 'BrowserUpToDate'] = 1
df.loc[df.query('id_31 == "firefox 63.0" and TransactionDate < "2018-12-11 00:00:00"').index, 'BrowserUpToDate'] = 1
df.loc[df.query('id_31 == "firefox 64.0"').index, 'BrowserUpToDate'] = 1
df.loc[df.query('id_31 == "firefox 65.0"').index, 'BrowserUpToDate'] = 1
# TransactionAmtCents
df['TransactionAmtCents'] = df['TransactionAmt'] - df['TransactionAmt'].astype('int')
with faith('9. Label + Frequency Encoding M and various ID cols') as f:
for df in tqdm([X_train, X_test]):
# Rounding
df['id_11'] = df['id_11'].round(2)
# Casting
df['id_33'] = df['id_33'].str.split('x', expand=True)[0].astype(np.float32) * df['id_33'].str.split('x', expand=True)[1].astype(np.float32)
# Label Encoding
df['M1'] = df['M1'].map({'F': 0, 'T': 1})
df['M2'] = df['M2'].map({'F': 0, 'T': 1})
df['M3'] = df['M3'].map({'F': 0, 'T': 1})
df['M4'] = df['M4'].map({'M0': 0, 'M1': 1, 'M2': 2})
df['M5'] = df['M5'].map({'F': 0, 'T': 1})
df['M6'] = df['M6'].map({'F': 0, 'T': 1})
df['M7'] = df['M7'].map({'F': 0, 'T': 1})
df['M8'] = df['M8'].map({'F': 0, 'T': 1})
df['M9'] = df['M9'].map({'F': 0, 'T': 1})
df['id_12'] = df['id_12'].map({'NotFound': 0, 'Found': 1})
df['id_15'] = df['id_15'].map({'Unknown': 0, 'New': 1, 'Found': 2})
df['id_16'] = df['id_16'].map({'NotFound': 0, 'Found': 1})
df['id_23'] = df['id_23'].map({'IP_PROXY:TRANSPARENT': 0, 'IP_PROXY:ANONYMOUS': 1, 'IP_PROXY:HIDDEN': 2})
df['id_27'] = df['id_27'].map({'Found': 1})
df['id_28'] = df['id_28'].map({'New': 0, 'Found': 1})
df['id_29'] = df['id_29'].map({'NotFound': 0, 'Found': 1})
df['id_34'] = df['id_34'].map({'match_status:-1': -1, 'match_status:0': 0, 'match_status:1': 1, 'match_status:2': 2})
df['id_35'] = df['id_35'].map({'F': 0, 'T': 1})
df['id_36'] = df['id_36'].map({'F': 0, 'T': 1})
df['id_37'] = df['id_37'].map({'F': 0, 'T': 1})
df['id_38'] = df['id_38'].map({'F': 0, 'T': 1})
# Frequency Encoding
for col in CAT_COLS + ['TransactionAmt', 'TransactionAmtCents']:
df[f'{col}_VC'] = df[col].map(pd.concat([X_train[col], X_test[col]], ignore_index=True).value_counts(dropna=False))
def build_ranges(ranges):
out = []
for arange in ranges:
out.append(np.arange(arange[0], arange[-1]+1, 1).tolist())
return sum(out, [])
# from SO [link TO-DO]
def sliding_window(iterable, size=2, step=1, fillvalue=None):
if size < 0 or step < 1:
raise ValueError
it = iter(iterable)
q = deque(islice(it, size), maxlen=size)
if not q:
return # empty iterable or size == 0
q.extend(fillvalue for _ in range(size - len(q))) # pad to size
while True:
yield iter(q) # iter() to avoid accidental outside modifications #can be changed
# as per our req we don't need an iter, we need the indices
try:
q.append(next(it))
except StopIteration: # Python 3.5 pep 479 support
return
q.extend(next(it, fillvalue) for _ in range(step - 1))
with faith('10. mapping domain email cols etc') as f:
for data in tqdm([X_train, X_test]):
data['pemail_domain'] = data.P_emaildomain.astype(str).apply(domain)
data['pemail_ext'] = data.P_emaildomain.astype(str).apply(pemail_country).map(country_map)
data['remail_domain'] = data.R_emaildomain.astype(str).apply(domain)
data['remail_ext'] = data.R_emaildomain.astype(str).apply(pemail_country).map(country_map)
data['p_and_r_email'] = data.P_emaildomain.astype(str) + ' ' + data.R_emaildomain.astype(str)
cont_cols = ['TransactionAmt','dist1']
# mem intensive
def calculate_rolling_feats(df, periods=periods, min_instances=min_instances, aggr_cols=aggr_cols, cont_cols=cont_cols):
for period in periods:
for col in tqdm(aggr_cols):
# For aggregate values appearing 1000x or more:
vcs = df[col].value_counts()
vcs = vcs[vcs>min_instances].index.values
mask = ~df[col].isin(vcs)
#For these two continuous columns:
# TODO: Experiment w/ other high card cont columns V* here such as: 'V307', 'V314'??
# Chosen for having low nans and high roc
for cont in cont_cols:
# Calculate rolling period mean and mean_diffs:
new_colA = '{}_mean__{}_group_{}'.format(period, cont, col)
new_colB = cont + '_-_' + new_colA
temp = df.groupby(col + ['ProductCD']).rolling(period, on='TransactionDate')[cont].mean().reset_index()
temp.rename(columns={cont:new_colA}, inplace=True)
temp.drop_duplicates(['TransactionDate', col, 'ProductCD'], inplace=True)
df = df.merge(temp, how='left', on=['TransactionDate', col, 'ProductCD'])
df[new_colB] = df[cont] - df[new_colA]
# NAN out any newly generated col where our groupby col,
# the aggregate, appears less than 1000x in the dset:
df.loc[mask, new_colA] = np.nan
df.loc[mask, new_colB] = np.nan
return df
with faith('11. Adding Rolling mean feats') as f:
X_train = calculate_rolling_feats(X_train, periods, min_instances, aggr_cols, cont_cols)
X_test = calculate_rolling_feats(X_test, periods, min_instances, aggr_cols, cont_cols)
# Count of M1=T transactions having the sample in question's addr in the past week
# Count of M1=T transactions having the sample in question's card1-6 in the past week
# Count of M1=T transactions having the sample in question's productcd in the past week
# Count of M1=T transactions having the sample in question's r/e email domain in the past week
CARD_COLS=[f'card{i}' for i in range(1,7)]
with faith('12. Adding m1==1(t) feats aggs etc') as f:
for df in tqdm([X_train, X_test]):
for col in CARD_COLS + ['addr1', 'ProductCD', 'P_emaildomain', 'R_emaildomain']:
df['M1T'] = df.M1== 1
temp = df.groupby(f'{col}').rolling('7d', min_periods=7, on='TransactionDate').M1T.sum().reset_index()
temp.rename(columns={'M1T':'M1T_7D_{col}'}, inplace=True)
temp.drop_duplicates([f'{col}', 'TransactionDate'], inplace=True)
del df['M1T']
df = df.merge(temp, how='left', on=['TransactionDate', f'{col}'])
del temp
gc.collect()
# DOWNCASTING INT COLS
INT64_COLS = [col for col in X_test.columns if X_test[col].dtype == 'int64']
with faith('13. Downcasting feats....') as f:
for df in [X_train, X_test]:
for col in tqdm(INT64_COLS):
c_min = df[col].min()
c_max = df[col].max()
if c_min > np.iinfo(np.int8).min and c_max < np.iinfo(np.int8).max:
df[col] = df[col].astype(np.int8)
elif c_min > np.iinfo(np.int16).min and c_max < np.iinfo(np.int16).max:
df[col] = df[col].astype(np.int16)
elif c_min > np.iinfo(np.int32).min and c_max < np.iinfo(np.int32).max:
df[col] = df[col].astype(np.int32)
elif c_min > np.iinfo(np.int64).min and c_max < np.iinfo(np.int64).max:
df[col] = df[col].astype(np.int64)
print('{} columns are downcasted.'.format(len(INT64_COLS)))
with faith('14. Caching files etc and otehr new cols') as f:
X_train.to_csv('__preprocessed_train.csv', index=None)
X_test.to_csv('__preprocessed_test.csv', index=None)
xtra_cols_added = list(set(X_train.columns) - set(org_cols))
pd.Series(xtra_cols_added).to_hdf('new_cols_added_in_preprocess_and_fes.hdf', key='preprocess')
pd.Series(org_cols).to_hdf('org_cols_raw_data.hdf', key='raw')
print('
| 2 |
c4c2912c76dea40a7fe36e070c679142bc0f0425
|
Python
|
DONE')
| 3 |
0d21598b4f513100ee33aa9f2b86daf80a4ec2a3
|
Python
|
# -*- coding: utf8 -*-
# python
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
bl_info = {
'name': 'Texture Paint plus',
'author': 'Bart Crouch, scorpion81, Spirou4D, CDMJ',
'version': (2, 20),
'blender': (2, 73, 0),
'location': 'Paint editor > 3D view',
'warning': '',
'description': 'Several improvements for Texture Paint mode',
'wiki_url': '',
'tracker_url': '',
'category': 'Paint'}
import bgl
import blf
import bpy
import mathutils
import os
import time
import copy
import math
from bpy_extras.io_utils import ImportHelper
from bpy.types import Operator, Menu, Panel, UIList
#################################################
# #
# Functions for #
# '''Sync selection from uv-editor to 3d-view'''#
#################################################
# draw in 3d-view =>OK!
def draw_callback(self, context):
r, g, b = context.tool_settings.image_paint.brush.cursor_color_add
#x0, y0, x1, y1 = context.window_manager["straight_line"]
start = self.stroke[0]
end = self.stroke[-1]
x0 = start["mouse"][0]
y0 = start["mouse"][1]
x1 = end["mouse"][0]
y1 = end["mouse"][1]
# draw straight line
bgl.glEnable(bgl.GL_BLEND)
bgl.glColor4f(r, g, b, 1.0)
bgl.glBegin(bgl.GL_LINE_STRIP)
bgl.glVertex2i(x0, y0)
bgl.glVertex2i(x1, y1)
bgl.glEnd()
# restore opengl defaults
bgl.glDisable(bgl.GL_BLEND)
bgl.glColor4f(0.0, 0.0, 0.0, 1.0)
# return a list of all images that are being displayed in an editor =>OK!
def get_images_in_editors(context):
images = []
for area in context.screen.areas:
if area.type != 'IMAGE_EDITOR':
continue
for space in area.spaces:
if space.type != 'IMAGE_EDITOR':
continue
if space.image:
images.append(space.image)
area.tag_redraw()
return(images)
# calculate for 3d-view =>OK!
def sync_calc_callback(self, context, area, region):
mid_x = region.width/2.0
mid_y = region.height/2.0
width = region.width
height = region.height
region_3d = False
for space in area.spaces:
if space.type == 'VIEW_3D':
region_3d = space.region_3d
if not region_3d:
return
view_mat = region_3d.perspective_matrix
ob_mat = context.active_object.matrix_world
total_mat = view_mat * ob_mat
mesh = context.active_object.data
def transform_loc(loc):
vec = total_mat * loc
vec = mathutils.Vector([vec[0]/vec[3], vec[1]/vec[3], vec[2]/vec[3]])
x = int(mid_x + vec[0]*width/2.0)
y = int(mid_y + vec[1]*height/2.0)
return([x, y])
# vertices
locs = [mesh.vertices[v].co.to_4d() for v in self.overlay_vertices]
self.position_vertices = []
for loc in locs:
self.position_vertices.append(transform_loc(loc))
# edges
locs = [[mesh.vertices[mesh.edges[edge].vertices[0]].co.to_4d(),
mesh.vertices[mesh.edges[edge].vertices[1]].co.to_4d()] \
for edge in self.overlay_edges]
self.position_edges = []
for v1, v2 in locs:
self.position_edges.append(transform_loc(v1))
self.position_edges.append(transform_loc(v2))
# faces
locs = [[mesh.vertices[mesh.faces[face].vertices[0]].co.to_4d(),
mesh.vertices[mesh.faces[face].vertices[1]].co.to_4d(),
mesh.vertices[mesh.faces[face].vertices[2]].co.to_4d(),
mesh.vertices[mesh.faces[face].vertices[3]].co.to_4d(),] \
for face in self.overlay_faces]
self.position_faces = []
for v1, v2, v3, v4 in locs:
self.position_faces.append(transform_loc(v1))
self.position_faces.append(transform_loc(v2))
self.position_faces.append(transform_loc(v3))
self.position_faces.append(transform_loc(v4))
# draw in 3d-view =>OK!
def sync_draw_callback(self, context):
# polling
if context.mode != "EDIT_MESH":
return
# draw vertices
bgl.glColor4f(1.0, 0.0, 0.0, 1.0)
bgl.glPointSize(4)
bgl.glBegin(bgl.GL_POINTS)
for x, y in self.position_vertices:
bgl.glVertex2i(x, y)
bgl.glEnd()
# draw edges
bgl.glColor4f(1.0, 0.0, 0.0, 1.0)
bgl.glLineWidth(1.5)
bgl.glBegin(bgl.GL_LINES)
for x, y in self.position_edges:
bgl.glVertex2i(x, y)
bgl.glEnd()
bgl.glLineWidth(1)
# draw faces
bgl.glEnable(bgl.GL_BLEND)
bgl.glColor4f(1.0, 0.0, 0.0, 0.3)
bgl.glBegin(bgl.GL_QUADS)
for x, y in self.position_faces:
bgl.glVertex2i(x, y)
bgl.glEnd()
bgl.glDisable(bgl.GL_BLEND)
# draw in image-editor =>OK!
def sync_draw_callback2(self, context):
# polling
if context.mode != "EDIT_MESH":
return
# draw vertices
bgl.glColor4f(1.0, 0.0, 0.0, 1.0)
bgl.glPointSize(6)
bgl.glBegin(bgl.GL_POINTS)
for x, y in self.position2_vertices:
bgl.glVertex2f(x, y)
bgl.glEnd()
# draw paint tool and blendmode in 3d-view =>?
def toolmode_draw_callback(self, context):
# polling
if context.mode != 'PAINT_TEXTURE':
return
# draw
if context.region:
main_y = context.region.height - 32
else:
return
blend_dic = {"MIX": "Mix",
"ADD": "Add",
"SUB": "Subtract",
"MUL": "Multiply",
"LIGHTEN": "Lighten",
"DARKEN": "Darken",
"ERASE_ALPHA": "Erase Alpha",
"ADD_ALPHA": "Add Alpha",
"OVERLAY": "Overlay",
"HARDLIGHT": "Hard light",
"COLORBURN": "Color burn",
"LINEARBURN": "Linear burn",
"COLORDODGE": "Color dodge",
"SCREEN": "Screen",
"SOFTLIGHT": "Soft light",
"PINLIGHT": "Pin light",
"VIVIDLIGHT": "Vivid light",
"LINEARLIGHT": "Linear light",
"DIFFERENCE": "Difference",
"EXCLUSION": "Exclusion",
"HUE": "Hue",
"SATURATION": "Saturation",
"LUMINOSITY": "Luminosity",
"COLOR": "Color"
}
brush = context.tool_settings.image_paint.brush
text = brush.name + " - " + blend_dic[brush.blend]
# text in top-left corner
bgl.glColor3f(0.6, 0.6, 0.6)
blf.position(0, 21, main_y, 0)
blf.draw(0, text)
# text above brush
dt = time.time() - context.window_manager["tpp_toolmode_time"]
if dt < 1:
if "tpp_toolmode_brushloc" not in context.window_manager:
return
brush_x, brush_y = context.window_manager["tpp_toolmode_brushloc"]
brush_x -= blf.dimensions(0, text)[0] / 2
bgl.glColor4f(0.6, 0.6, 0.6, min(1.0, (1.0 - dt)*2))
blf.position(0, brush_x, brush_y, 0)
blf.draw(0, text)
# add ID-properties to window-manager
def init_props():
wm = bpy.context.window_manager
wm["tpp_automergeuv"] = 0
# remove ID-properties from window-manager
def remove_props():
wm = bpy.context.window_manager
if "tpp_automergeuv" in wm:
del wm["tpp_automergeuv"]
if "tpp_toolmode_time" in wm:
del wm["tpp_toolmode_time"]
if "tpp_toolmode_brushloc" in wm:
del wm["tpp_toolmode_brusloc"]
# calculate new snapped location based on start point (sx,sy)
# and current mouse point (mx,my). These coords appear to be
# in 2D screen coords, with the origin at:
# bottom-left, +x right, +y up.
# =>?
def do_snap( sx, sy, mx, my ):
# compute delta between current mouse position and
# start position
dx = mx - sx
dy = my - sy
adx = abs(dx)
ady = abs(dy)
# if delta is "close enough" to the diagonal
if abs( ady - adx ) < 0.5 * max(adx, ady):
# use a simple algorithm to snap based on horizontal
# distance (could use vertical distance, or could use
# radial distance but that would require more calcs).
if (dx > 0 and dy > 0) or (dx < 0 and dy < 0):
x = mx
y = sy + dx
elif (dx > 0 and dy < 0) or (dx < 0 and dy > 0):
x = mx
y = sy - dx
else:
x = mx
y = my
elif ( adx > ady ):
# closer to y-axis, snap vertical
x = mx
y = sy
else:
# closer to x-axis, snap horizontal
x = sx
y = my
return (x, y)
##########################################
# #
# Classes =>? #
# #
##########################################
# =>?
class ImageBuffer:
# based on script by Domino from BlenderArtists
# licensed GPL v2 or later
def __init__(self, image):
self.image = image
self.x, self.y = self.image.size
self.buffer = list(self.image.pixels)
def update(self):
self.image.pixels = self.buffer
def _index(self, x, y):
if x < 0 or y < 0 or x >= self.x or y >= self.y:
return None
return (x + y * self.x) * 4
def set_pixel(self, x, y, colour):
index = self._index(x, y)
if index is not None:
index = int(index)
self.buffer[index:index + 4] = colour
def get_pixel(self, x, y):
index = self._index(x, y)
if index is not None:
index = int(index)
return self.buffer[index:index + 4]
else:
return None
# 2d bin packing =>?
class PackTree(object):
# based on python recipe by S W on ActiveState
# PSF license, 16 oct 2005. (GPL compatible)
def __init__(self, area):
if len(area) == 2:
area = (0,0,area[0],area[1])
self.area = area
def get_width(self):
return self.area[2] - self.area[0]
width = property(fget=get_width)
def get_height(self):
return self.area[3] - self.area[1]
height = property(fget=get_height)
def insert(self, area):
if hasattr(self, 'child'):
a = self.child[0].insert(area)
if a is None:
return self.child[1].insert(area)
else:
return a
area = PackTree(area)
if area.width <= self.width and area.height <= self.height:
self.child = [None,None]
self.child[0] = PackTree((self.area[0]+area.width, self.area[1], self.area[2], self.area[1] + area.height))
self.child[1] = PackTree((self.area[0], self.area[1]+area.height, self.area[2], self.area[3]))
return PackTree((self.area[0], self.area[1], self.area[0]+area.width, self.area[1]+area.height))
##########################################
# #
# Class Operators #
# #
##########################################
class AddDefaultImage(Operator):
'''Create and assign a new default image to the object'''
bl_idname = "object.add_default_image"
bl_label = "Add default image"
@classmethod
def poll(cls, context):
return(context.active_object and context.active_object.type=='MESH')
def invoke(self, context, event):
ob = context.active_object
mat = bpy.data.materials.new("default")
#Add texture to the mat
tex = bpy.data.textures.new("default", 'IMAGE')
img = bpy.data.images.new("default", 1024, 1024, alpha=True)
ts = mat.texture_slots.add()
tex.image = img
ts.texture = tex
ob.data.materials.append(mat)
return {'FINISHED'}
class AutoMergeUV(Operator):
'''Have UV Merge enabled by default for merge actions'''
bl_idname = "paint.auto_merge_uv"
bl_label = "AutoMerge UV"
def invoke(self, context, event):
wm = context.window_manager
if "tpp_automergeuv" not in wm:
init_props()
wm["tpp_automergeuv"] = 1 - wm["tpp_automergeuv"]
km = bpy.context.window_manager.keyconfigs.default.keymaps['Mesh']
for kmi in km.keymap_items:
if kmi.idname == "mesh.merge":
kmi.properties.uvs = wm["tpp_automergeuv"]
return {'FINISHED'}
class MakeBrushImageTexture(Operator): #class command
bl_label = "New Texture from Image"
bl_idname = "gizmo.image_texture"
filepath = bpy.props.StringProperty(subtype="FILE_PATH")
def invoke(self, context, event):
context.window_manager.fileselect_add(self)
return {'RUNNING_MODAL'}
def execute(self,context):
tex = bpy.data.textures.new("ImageTexture",'NONE')
tex.use_nodes = True
remove = tex.node_tree.nodes[1]
tex.node_tree.nodes.remove(remove)
tex.node_tree.nodes.new("TextureNodeImage")
tex.node_tree.links.new(tex.node_tree.nodes[0].inputs[0],tex.node_tree.nodes[1].outputs[0])
i = bpy.data.images.load(self.filepath)
tex.node_tree.nodes[1].image = i
bpy.context.tool_settings.image_paint.brush.texture = tex
tex.node_tree.nodes[1].location = [0,50]
tex.node_tree.nodes[0].location = [200,50]
if bpy.context.mode == 'SCULPT':
bpy.context.tool_settings.sculpt.brush.texture = tex
elif bpy.context.mode == 'PAINT_VERTEX':
bpy.context.tool_settings.vertex_paint.brush.texture = tex
#elif bpy.context.mode == 'PAINT_WEIGHT':
# bpy.context.tool_settings.weight_paint.brush.texture = tex
elif bpy.context.mode == 'PAINT_TEXTURE':
bpy.context.tool_settings.image_paint.brush.texture = tex
return set()
class MakeBrushImageTextureMask(Operator): #class command
bl_label = "New Mask Texture from Image"
bl_idname = "gizmo.image_texture_mask"
filepath = bpy.props.StringProperty(subtype="FILE_PATH")
def invoke(self, context, event):
context.window_manager.fileselect_add(self)
return {'RUNNING_MODAL'}
def execute(self,context):
tex = bpy.data.textures.new("ImageTextureMask",'NONE')
tex.use_nodes = True
remove = tex.node_tree.nodes[1]
tex.node_tree.nodes.remove(remove)
tex.node_tree.nodes.new("TextureNodeImage")
tex.node_tree.nodes.new("TextureNodeRGBToBW")
tex.node_tree.links.new(tex.node_tree.nodes[0].inputs[0],tex.node_tree.nodes[2].outputs[0])
tex.node_tree.links.new(tex.node_tree.nodes[2].inputs[0],tex.node_tree.nodes[1].outputs[0])
tex.node_tree.nodes[1].location = [0,50]
tex.node_tree.nodes[2].location = [200,50]
tex.node_tree.nodes[0].location = [400,50]
i = bpy.data.images.load(self.filepath)
tex.node_tree.nodes[1].image = i
#if bpy.context.mode == 'SCULPT':
# bpy.context.tool_settings.sculpt.brush.mask_texture = tex
#elif bpy.context.mode == 'PAINT_VERTEX':
# bpy.context.tool_settings.vertex_paint.brush.mask_texture = tex
#elif bpy.context.mode == 'PAINT_WEIGHT':
# bpy.context.tool_settings.weight_paint.brush.mask_texture = tex
if bpy.context.mode == 'PAINT_TEXTURE':
bpy.context.tool_settings.image_paint.brush.mask_texture = tex
return set()
class BrushPopup(Operator):
bl_idname = "view3d.brush_popup"
bl_label = "Brush settings"
bl_options = {'REGISTER', 'UNDO'}
@staticmethod
def paint_settings(context):
toolsettings = context.tool_settings
if context.vertex_paint_object:
return toolsettings.vertex_paint
elif context.weight_paint_object:
return toolsettings.weight_paint
elif context.image_paint_object:
if (toolsettings.image_paint and toolsettings.image_paint.detect_data()):
return toolsettings.image_paint
return None
return None
@staticmethod
def unified_paint_settings(parent, context):
ups = context.tool_settings.unified_paint_settings
parent.label(text="Unified Settings:")
row = parent.row()
row.prop(ups, "use_unified_size", text="Size")
row.prop(ups, "use_unified_strength", text="Strength")
if context.weight_paint_object:
parent.prop(ups, "use_unified_weight", text="Weight")
elif context.vertex_paint_object or context.image_paint_object:
parent.prop(ups, "use_unified_color", text="Color")
else:
parent.prop(ups, "use_unified_color", text="Color")
@staticmethod
def prop_unified_size(parent, context, brush, prop_name, icon='NONE', text="", slider=False):
ups = context.tool_settings.unified_paint_settings
ptr = ups if ups.use_unified_size else brush
parent.prop(ptr, prop_name, icon=icon, text=text, slider=slider)
@staticmethod
def prop_unified_strength(parent, context, brush, prop_name, icon='NONE', text="", slider=False):
ups = context.tool_settings.unified_paint_settings
ptr = ups if ups.use_unified_strength else brush
parent.prop(ptr, prop_name, icon=icon, text=text, slider=slider)
@staticmethod
def prop_unified_weight(parent, context, brush, prop_name, icon='NONE', text="", slider=False):
ups = context.tool_settings.unified_paint_settings
ptr = ups if ups.use_unified_weight else brush
parent.prop(ptr, prop_name, icon=icon, text=text, slider=slider)
@staticmethod
def prop_unified_color(parent, context, brush, prop_name, text=""):
ups = context.tool_settings.unified_paint_settings
ptr = ups if ups.use_unified_color else brush
parent.prop(ptr, prop_name, text=text)
@staticmethod
def prop_unified_color_picker(parent, context, brush, prop_name, value_slider=True):
ups = context.tool_settings.unified_paint_settings
ptr = ups if ups.use_unified_color else brush
parent.template_color_picker(ptr, prop_name, value_slider=value_slider)
def check(self, context):
'''
settings = self.paint_settings(context)
brush_updated = settings.brush.is_updated
if brush_updated:
return True
'''
return True
@classmethod
def poll(self, context):
obj = context.active_object
if obj is not None:
A = context.active_object.type == 'MESH'
B = context.mode in {'PAINT_TEXTURE','PAINT_VERTEX','PAINT_WEIGHT'}
return A and B
def brush_texpaint_common(self, layout, context, brush, settings, projpaint=False):
capabilities = brush.image_paint_capabilities
col = layout.column()
if brush.image_tool in {'DRAW', 'FILL'}:
if brush.blend not in {'ERASE_ALPHA', 'ADD_ALPHA'}:
if not brush.use_gradient:
self.prop_unified_color_picker(col, context, brush, "color", value_slider=True)
if settings.palette:
col.template_palette(settings, "palette", color=True)
if brush.use_gradient:
col.label("Gradient Colors")
col.template_color_ramp(brush, "gradient", expand=True)
if brush.image_tool != 'FILL':
col.label("Background Color")
row = col.row(align=True)
self.prop_unified_color(row, context, brush, "secondary_color", text="")
if brush.image_tool == 'DRAW':
col.prop(brush, "gradient_stroke_mode", text="Mode")
if brush.gradient_stroke_mode in {'SPACING_REPEAT', 'SPACING_CLAMP'}:
col.prop(brush, "grad_spacing")
elif brush.image_tool == 'FILL':
col.prop(brush, "gradient_fill_mode")
else:
row = col.row(align=True)
self.prop_unified_color(row, context, brush, "color", text="")
if brush.image_tool == 'FILL' and not projpaint:
col.prop(brush, "fill_threshold")
else:
self.prop_unified_color(row, context, brush, "secondary_color", text="")
row.separator()
row.operator("paint.brush_colors_flip", icon='FILE_REFRESH', text="")
elif brush.image_tool == 'SOFTEN':
col = layout.column(align=True)
col.row().prop(brush, "direction", expand=True)
col.separator()
col.prop(brush, "sharp_threshold")
if not projpaint:
col.prop(brush, "blur_kernel_radius")
col.separator()
col.prop(brush, "blur_mode")
elif brush.image_tool == 'MASK':
col.prop(brush, "weight", text="Mask Value", slider=True)
elif brush.image_tool == 'CLONE':
col.separator()
if projpaint:
if settings.mode == 'MATERIAL':
col.prop(settings, "use_clone_layer", text="Clone from paint slot")
elif settings.mode == 'IMAGE':
col.prop(settings, "use_clone_layer", text="Clone from image/UV map")
if settings.use_clone_layer:
ob = context.active_object
col = layout.column()
if settings.mode == 'MATERIAL':
if len(ob.material_slots) > 1:
col.label("Materials")
col.template_list("MATERIAL_UL_matslots", "",
ob, "material_slots",
ob, "active_material_index", rows=2)
mat = ob.active_material
if mat:
col.label("Source Clone Slot")
col.template_list("TEXTURE_UL_texpaintslots", "",
mat, "texture_paint_images",
mat, "paint_clone_slot", rows=2)
elif settings.mode == 'IMAGE':
mesh = ob.data
clone_text = mesh.uv_texture_clone.name if mesh.uv_texture_clone else ""
col.label("Source Clone Image")
col.template_ID(settings, "clone_image")
col.label("Source Clone UV Map")
col.menu("VIEW3D_MT_tools_projectpaint_clone", text=clone_text, translate=False)
else:
col.prop(brush, "clone_image", text="Image")
col.prop(brush, "clone_alpha", text="Alpha")
col.separator()
if capabilities.has_radius:
row = col.row(align=True)
self.prop_unified_size(row, context, brush, "size", slider=True, text="Radius")
self.prop_unified_size(row, context, brush, "use_pressure_size")
row = col.row(align=True)
if capabilities.has_space_attenuation:
row.prop(brush, "use_space_attenuation", toggle=True, icon_only=True)
self.prop_unified_strength(row, context, brush, "strength", text="Strength")
self.prop_unified_strength(row, context, brush, "use_pressure_strength")
if brush.image_tool in {'DRAW', 'FILL'}:
col.separator()
col.prop(brush, "blend", text="Blend")
col = layout.column()
# use_accumulate
if capabilities.has_accumulate:
col = layout.column(align=True)
col.prop(brush, "use_accumulate")
if projpaint:
col.prop(brush, "use_alpha")
col.prop(brush, "use_gradient")
col.separator()
col.template_ID(settings, "palette", new="palette.new")
def draw(self, context):
# Init values
toolsettings = context.tool_settings
settings = self.paint_settings(context)
brush = settings.brush
ipaint = toolsettings.image_paint
layout = self.layout
# Stroke mode
col = layout.column()
col.prop(brush, "stroke_method", text="")
if brush.use_anchor:
col.separator()
col.prop(brush, "use_edge_to_edge", "Edge To Edge")
if brush.use_airbrush:
col.separator()
col.prop(brush, "rate", text="Rate", slider=True)
if brush.use_space:
col.separator()
row = col.row(align=True)
row.prop(brush, "spacing", text="Spacing")
row.prop(brush, "use_pressure_spacing", toggle=True, text="")
if brush.use_line or brush.use_curve:
col.separator()
row = col.row(align=True)
row.prop(brush, "spacing", text="Spacing")
if brush.use_curve:
col.separator()
col.template_ID(brush, "paint_curve", new="paintcurve.new")
col.operator("paintcurve.draw")
else:
col.separator()
row = col.row(align=True)
row.prop(brush, "use_relative_jitter", icon_only=True)
if brush.use_relative_jitter:
row.prop(brush, "jitter", slider=True)
else:
row.prop(brush, "jitter_absolute")
row.prop(brush, "use_pressure_jitter", toggle=True, text="")
col = layout.column()
col.separator()
if brush.brush_capabilities.has_smooth_stroke:
col.prop(brush, "use_smooth_stroke")
sub = col.column()
sub.active = brush.use_smooth_stroke
sub.prop(brush, "smooth_stroke_radius", text="Radius", slider=True)
sub.prop(brush, "smooth_stroke_factor", text="Factor", slider=True)
layout.prop(settings, "input_samples")
# Curve stroke
col = layout.column(align=True)
settings = self.paint_settings(context)
brush = settings.brush
layout.template_curve_mapping(brush, "curve", brush=True)
col = layout.column(align=True)
row = col.row(align=True)
row.operator("brush.curve_preset", icon='SMOOTHCURVE', text="").shape = 'SMOOTH'
row.operator("brush.curve_preset", icon='SPHERECURVE', text="").shape = 'ROUND'
row.operator("brush.curve_preset", icon='ROOTCURVE', text="").shape = 'ROOT'
row.operator("brush.curve_preset", icon='SHARPCURVE', text="").shape = 'SHARP'
row.operator("brush.curve_preset", icon='LINCURVE', text="").shape = 'LINE'
row.operator("brush.curve_preset", icon='NOCURVE', text="").shape = 'MAX'
# Symetries mode
col = layout.column(align=True)
row = col.row(align=True)
row.prop(ipaint, "use_symmetry_x", text="X", toggle=True)
row.prop(ipaint, "use_symmetry_y", text="Y", toggle=True)
row.prop(ipaint, "use_symmetry_z", text="Z", toggle=True)
# imagepaint tool operate buttons
col = layout.split().column()
col.template_ID_preview(settings, "brush", new="brush.add", rows=3, cols=8)
########################################################################
# Texture Paint Mode #
if context.image_paint_object and brush:
self.brush_texpaint_common( layout, context, brush, settings, True)
########################################################################
# Weight Paint Mode #
elif context.weight_paint_object and brush:
col = layout.column()
row = col.row(align=True)
self.prop_unified_weight(row, context, brush, "weight", slider=True, text="Weight")
row = col.row(align=True)
self.prop_unified_size(row, context, brush, "size", slider=True, text="Radius")
self.prop_unified_size(row, context, brush, "use_pressure_size")
row = col.row(align=True)
self.prop_unified_strength(row, context, brush, "strength", text="Strength")
self.prop_unified_strength(row, context, brush, "use_pressure_strength")
col.prop(brush, "vertex_tool", text="Blend")
if brush.vertex_tool == 'BLUR':
col.prop(brush, "use_accumulate")
col.separator()
col = layout.column()
col.prop(toolsettings, "use_auto_normalize", text="Auto Normalize")
col.prop(toolsettings, "use_multipaint", text="Multi-Paint")
########################################################################
# Vertex Paint Mode #
elif context.vertex_paint_object and brush:
col = layout.column()
self.prop_unified_color_picker(col, context, brush, "color", value_slider=True)
if settings.palette:
col.template_palette(settings, "palette", color=True)
self.prop_unified_color(col, context, brush, "color", text="")
col.separator()
row = col.row(align=True)
self.prop_unified_size(row, context, brush, "size", slider=True, text="Radius")
self.prop_unified_size(row, context, brush, "use_pressure_size")
row = col.row(align=True)
self.prop_unified_strength(row, context, brush, "strength", text="Strength")
self.prop_unified_strength(row, context, brush, "use_pressure_strength")
col.separator()
col.prop(brush, "vertex_tool", text="Blend")
col.separator()
col.template_ID(settings, "palette", new="palette.new")
def invoke(self, context, event):
if context.space_data.type == 'IMAGE_EDITOR':
context.space_data.mode = 'PAINT'
wm = context.window_manager
return wm.invoke_props_dialog(self, width=160)
def execute(self, context):
return {'FINISHED'}
class TexturePopup(Operator):
bl_idname = "view3d.texture_popup"
bl_label = "Textures and Mask Textures settings"
bl_options = {'REGISTER', 'UNDO'}
toggleMenu = bpy.props.BoolProperty(default=True) # toogle texture or Mask menu
def check(self, context):
return True
@classmethod
def poll(self, context):
obj = context.active_object
if obj is not None:
A = obj.type == 'MESH'
B = context.mode == 'PAINT_TEXTURE'
return A and B
def draw(self, context):
# Init values
toolsettings = context.tool_settings
brush = toolsettings.image_paint.brush
tex_slot = brush.texture_slot
mask_tex_slot = brush.mask_texture_slot
unified = toolsettings.unified_paint_settings
settings = toolsettings.image_paint
# textures panel
layout = self.layout
# Parameter Toggle Menu
_TITLE = 'TEXTURES' if self.toggleMenu else 'MASKS'
_ICON = 'TEXTURE' if self.toggleMenu else 'MOD_MASK'
Menu = layout.row()
Menu.prop(self, "toggleMenu", text=_TITLE, icon=_ICON)
if self.toggleMenu:
col = layout.column() #TEXTURES
col.template_ID_preview(brush, "texture", new="texture.new", \
rows=3, cols=8)
layout.label(text="Brush Mapping:")
# texture_map_mode
layout.row().prop(tex_slot, "tex_paint_map_mode", text="")
layout.separator()
if tex_slot.map_mode == 'STENCIL':
if brush.texture and brush.texture.type == 'IMAGE':
layout.operator("brush.stencil_fit_image_aspect")
layout.operator("brush.stencil_reset_transform")
# angle and texture_angle_source
if tex_slot.has_texture_angle:
col = layout.column()
col.label(text="Angle:")
col.prop(tex_slot, "angle", text="")
if tex_slot.has_texture_angle_source:
col.prop(tex_slot, "use_rake", text="Rake")
if brush.brush_capabilities.has_random_texture_angle and tex_slot.has_random_texture_angle:
col.prop(tex_slot, "use_random", text="Random")
if tex_slot.use_random:
col.prop(tex_slot, "random_angle", text="")
# scale and offset
split = layout.split()
split.prop(tex_slot, "offset")
split.prop(tex_slot, "scale")
row = layout.row()
row.operator(MakeBrushImageTexture.bl_idname)
else:
col = layout.column() #MASK TEXTURE
col.template_ID_preview(brush, "mask_texture", new="texture.new", \
rows=3, cols=8)
layout.label(text="Mask Mapping:")
# map_mode
layout.row().prop(mask_tex_slot, "mask_map_mode", text="")
layout.separator()
if mask_tex_slot.map_mode == 'STENCIL':
if brush.mask_texture and brush.mask_texture.type == 'IMAGE':
layout.operator("brush.stencil_fit_image_aspect").mask = True
layout.operator("brush.stencil_reset_transform").mask = True
col = layout.column()
col.prop(brush, "use_pressure_masking", text="")
# angle and texture_angle_source
if mask_tex_slot.has_texture_angle:
col = layout.column()
col.label(text="Angle:")
col.prop(mask_tex_slot, "angle", text="")
if mask_tex_slot.has_texture_angle_source:
col.prop(mask_tex_slot, "use_rake", text="Rake")
if brush.brush_capabilities.has_random_texture_angle and mask_tex_slot.has_random_texture_angle:
col.prop(mask_tex_slot, "use_random", text="Random")
if mask_tex_slot.use_random:
col.prop(mask_tex_slot, "random_angle", text="")
# scale and offset
split = layout.split()
split.prop(mask_tex_slot, "offset")
split.prop(mask_tex_slot, "scale")
row = layout.row()
row.operator(MakeBrushImageTextureMask.bl_idname)
def invoke(self, context, event):
if context.space_data.type == 'IMAGE_EDITOR':
context.space_data.mode = 'PAINT'
return context.window_manager.\
invoke_props_dialog(self, width=160)
def execute(self, context):
return {'FINISHED'}
class SelectVertgroup(bpy.types.Operator):
"""Select Vertgroup"""
bl_idname = "object.select_vgroup"
bl_label = "Select VGroup"
bl_options = { 'REGISTER', 'UNDO' }
def execute(self, context):
bpy.ops.object.editmode_toggle()#toggle editmode
bpy.ops.object.vertex_group_select()#select current active vgroup
bpy.ops.object.editmode_toggle()#toggle editmode
bpy.ops.paint.texture_paint_toggle()#Texpaint
bpy.context.object.data.use_paint_mask = True #set face select masking on in case we forgot
return {'FINISHED'}
class DeselectVertgroup(bpy.types.Operator):
"""Deselect Vertgroup"""
bl_idname = "object.deselect_vgroup"
bl_label = "Deselect VGroup"
bl_options = { 'REGISTER', 'UNDO' }
def execute(self, context):
bpy.ops.object.editmode_toggle()#toggle editmode
bpy.ops.object.vertex_group_deselect()#select current active vgroup
bpy.ops.object.editmode_toggle()#toggle editmode
bpy.ops.paint.texture_paint_toggle()#Texpaint
bpy.context.object.data.use_paint_mask = True #set face select masking on in case we forgot
return {'FINISHED'}
class Slots_projectpaint(Operator):
bl_idname = "slots.projectpaint"
bl_label = "Slots & VGroups"
bl_options = {'REGISTER', 'UNDO'}
def check(self, context):
return True
@classmethod
def poll(cls, context):
brush = context.tool_settings.image_paint.brush
ob = context.active_object
if (brush is not None and ob is not None):
A = context.active_object.type == 'MESH'
B = context.mode == 'PAINT_TEXTURE'
return A and B
def draw(self, context):
settings = context.tool_settings.image_paint
ob = context.active_object
layout = self.layout
col = layout.column()
col.separator()
col.operator("image.save_dirty", text="Save All Images")
layout = self.layout
ob = context.object
group = ob.vertex_groups.active
rows = 2
if group:
rows = 4
row = layout.row()
row.template_list("MESH_UL_vgroups", "", ob, "vertex_groups", ob.vertex_groups, "active_index", rows=rows)
col = row.column(align=True)
col.operator("object.vertex_group_add", icon='ZOOMIN', text="")
col.operator("object.vertex_group_remove", icon='ZOOMOUT', text="").all = False
col.menu("MESH_MT_vertex_group_specials", icon='DOWNARROW_HLT', text="")
if group:
col.separator()
col.operator("object.vertex_group_move", icon='TRIA_UP', text="").direction = 'UP'
col.operator("object.vertex_group_move", icon='TRIA_DOWN', text="").direction = 'DOWN'
if ob.vertex_groups and (ob.mode == 'EDIT' or (ob.mode == 'WEIGHT_PAINT' and ob.type == 'MESH' and ob.data.use_paint_mask_vertex)):
row = layout.row()
sub = row.row(align=True)
sub.operator("object.vertex_group_assign", text="Assign")
sub.operator("object.vertex_group_remove_from", text="Remove")
sub = row.row(align=True)
sub.operator("object.vertex_group_select", text="Select")
sub.operator("object.vertex_group_deselect", text="Deselect")
layout.prop(context.tool_settings, "vertex_group_weight", text="Weight")
#row = layout.row()
row = layout.row(align=True)
row.operator("object.select_vgroup", text = "Select VGroup", icon = 'ROTACTIVE')
#row = layout.column()
row.operator("object.deselect_vgroup", text = "Deselect VGroup", icon = 'ROTACTIVE')
layout = self.layout
col = layout.column()
col.label("Painting Mode")
col.prop(settings, "mode", text="")
col.separator()
if settings.mode == 'MATERIAL':
if len(ob.material_slots) > 1:
col.label("Materials")
col.template_list("MATERIAL_UL_matslots", "layers",
ob, "material_slots",
ob, "active_material_index", rows=2)
mat = ob.active_material
if mat:
col.label("Available Paint Slots")
col.template_list("TEXTURE_UL_texpaintslots", "",
mat, "texture_paint_images",
mat, "paint_active_slot", rows=2)
if mat.texture_paint_slots:
slot = mat.texture_paint_slots[mat.paint_active_slot]
else:
slot = None
if (not mat.use_nodes) and context.scene.render.engine in {'BLENDER_RENDER', 'BLENDER_GAME'}:
row = col.row(align=True)
row.operator_menu_enum("paint.add_texture_paint_slot", "type")
row.operator("paint.delete_texture_paint_slot", text="", icon='X')
if slot:
col.prop(mat.texture_slots[slot.index], "blend_type")
col.separator()
if slot and slot.index != -1:
col.label("UV Map")
col.prop_search(slot, "uv_layer", ob.data, "uv_textures", text="")
elif settings.mode == 'IMAGE':
mesh = ob.data
uv_text = mesh.uv_textures.active.name if mesh.uv_textures.active else ""
col.label("Canvas Image")
col.template_ID(settings, "canvas")
col.operator("image.new", text="New").gen_context = 'PAINT_CANVAS'
col.label("UV Map")
col.menu("VIEW3D_MT_tools_projectpaint_uvlayer", text=uv_text, translate=False)
def invoke(self, context,event):
if context.space_data.type == 'IMAGE_EDITOR':
context.space_data.mode = 'PAINT'
return context.window_manager.invoke_props_dialog(self, width=240)
def execute(self, context):
return {'FINISHED'}
class ChangeSelection(Operator):
'''Select more or less vertices/edges/faces, connected to the original selection'''
bl_idname = "paint.change_selection"
bl_label = "Change selection"
mode = bpy.props.EnumProperty(name="Mode",
items = (("more", "More", "Select more vertices/edges/faces"),
("less", "Less", "Select less vertices/edges/faces")),
description = "Choose whether the selection should be increased or decreased",
default = 'more')
@classmethod
def poll(cls, context):
return bpy.ops.paint.image_paint.poll()
def invoke(self, context, event):
bpy.ops.object.mode_set(mode='EDIT')
if self.mode == 'more':
bpy.ops.mesh.select_more()
else: #self.mode == 'less'
bpy.ops.mesh.select_less()
bpy.ops.object.mode_set(mode='TEXTURE_PAINT')
return {'FINISHED'}
class DefaultMaterial(Operator):
'''Add a default dif/spec/normal material to an object'''
bl_idname = "object.default_material"
bl_label = "Default material"
@classmethod
def poll(cls, context):
object = context.active_object
if not object or not object.data:
return False
return object.type == 'MESH'
def invoke(self, context, event):
objects = context.selected_objects
for ob in objects:
if not ob.data or ob.type != 'MESH':
continue
mat = bpy.data.materials.new(ob.name)
# diffuse texture
tex = bpy.data.textures.new(ob.name+"_DIFF", 'IMAGE')
ts = mat.texture_slots.add()
ts.texture_coords = 'UV'
ts.texture = tex
# specular texture
tex = bpy.data.textures.new(ob.name+"_SPEC", 'IMAGE')
ts = mat.texture_slots.add()
ts.texture_coords = 'UV'
ts.use_map_color_diffuse = False
ts.use_map_specular = True
ts.texture = tex
# normal texture
tex = bpy.data.textures.new(ob.name+"_NORM", 'IMAGE')
tex.use_normal_map = True
ts = mat.texture_slots.add()
ts.texture_coords = 'UV'
ts.use_map_color_diffuse = False
ts.use_map_normal = True
ts.texture = tex
ob.data.materials.append(mat)
return {'FINISHED'}
class GridTexture(Operator):
'''Toggle between current texture and UV / Colour grids'''
bl_idname = "paint.grid_texture"
bl_label = "Grid texture"
@classmethod
def poll(cls, context):
return bpy.ops.paint.image_paint.poll()
def invoke(self, context, event):
Egne = bpy.context.scene.render.engine
if Egne == 'BLENDER_RENDER':
objects = bpy.context.selected_objects
meshes = [object.data for object in objects if object.type == 'MESH']
if not meshes:
self.report({'INFO'}, "Couldn't locate meshes to operate on")
return {'CANCELLED'}
tex_image = []
for mesh in meshes:
for mat in mesh.materials:
for tex in [ts.texture for ts in mat.texture_slots if ts and ts.texture.type=='IMAGE' and ts.texture.image]:
tex_image.append([tex.name, tex.image.name])
if not tex_image:
self.report({'INFO'}, "Couldn't locate textures to operate on")
return {'CANCELLED'}
first_image = bpy.data.images[tex_image[0][1]]
if "grid_texture_mode" in first_image:
mode = first_image["grid_texture_mode"]
else:
mode = 1
if mode == 1:
# original textures, change to new UV grid
width = max([bpy.data.images[image].size[0] for tex, image in tex_image])
height = max([bpy.data.images[image].size[1] for tex, image in tex_image])
new_image = bpy.data.images.new("temp_grid", width=width, height=height)
new_image.generated_type = 'UV_GRID'
new_image["grid_texture"] = tex_image
new_image["grid_texture_mode"] = 2
for tex, image in tex_image:
bpy.data.textures[tex].image = new_image
elif mode == 2:
# change from UV grid to Colour grid
first_image.generated_type = 'COLOR_GRID'
first_image["grid_texture_mode"] = 3
elif mode == 3:
# change from Colour grid back to original textures
if "grid_texture" not in first_image:
first_image["grid_texture_mode"] = 1
self.report({'ERROR'}, "Couldn't retrieve original images")
return {'FINISHED'}
tex_image = first_image["grid_texture"]
for tex, image in tex_image:
if tex in bpy.data.textures and image in bpy.data.images:
bpy.data.textures[tex].image = bpy.data.images[image]
bpy.data.images.remove(first_image)
return {'FINISHED'}
elif Egne =='CYCLES':
return {'FINISHED'}
else:
return {'FINISHED'}
class MassLinkAppend(Operator, ImportHelper):
'''Import objects from multiple blend-files at the same time'''
bl_idname = "wm.mass_link_append"
bl_label = "Mass Link/Append"
bl_options = {'REGISTER', 'UNDO'}
active_layer = bpy.props.BoolProperty(name="Active Layer",
default=True,
description="Put the linked objects on the active layer")
autoselect = bpy.props.BoolProperty(name="Select",
default=True,
description="Select the linked objects")
instance_groups = bpy.props.BoolProperty(name="Instance Groups",
default=False,
description="Create instances for each group as a DupliGroup")
link = bpy.props.BoolProperty(name="Link",
default=False,
description="Link the objects or datablocks rather than appending")
relative_path = bpy.props.BoolProperty(name="Relative Path",
default=True,
description="Select the file relative to the blend file")
def execute(self, context):
directory, filename = os.path.split(bpy.path.abspath(self.filepath))
files = []
# find all blend-files in the given directory
for root, dirs, filenames in os.walk(directory):
for file in filenames:
if file.endswith(".blend"):
files.append([root+os.sep, file])
break # don't search in subdirectories
# append / link objects
old_selection = context.selected_objects
new_selection = []
print("_______ Texture Paint Plus _______")
print("You can safely ignore the line(s) below")
for directory, filename in files:
# get object names
with bpy.data.libraries.load(directory + filename) as (append_lib, current_lib):
ob_names = append_lib.objects
for name in ob_names:
append_libs = [{"name":name} for name in ob_names]
# appending / linking
bpy.ops.wm.link_append(filepath=os.sep+filename+os.sep+"Object"+os.sep,
filename=name, directory=directory+filename+os.sep+"Object"+os.sep,
link=self.link, autoselect=True, active_layer=self.active_layer,
relative_path=self.relative_path, instance_groups=self.instance_groups,
files=append_libs)
if not self.link:
bpy.ops.object.mode_set(mode='OBJECT')
bpy.ops.object.make_local()
bpy.ops.object.make_local(type='SELECTED_OBJECTS_DATA')
new_selection += context.selected_objects
print("__________________________________")
bpy.ops.object.select_all(action='DESELECT')
if self.autoselect:
for ob in new_selection:
ob.select = True
else:
for ob in old_selection:
ob.select = True
return {'FINISHED'}
class ReloadImage(Operator): #unused class?
'''Reload image displayed in image-editor'''
bl_idname = "paint.reload_image"
bl_label = "Reload image"
def invoke(self, context, event):
images = get_images_in_editors(context)
for img in images:
img.reload()
# make the changes immediately visible in 3d-views
# image editor updating is handled in get_images_in_editors()
for area in context.screen.areas:
if area.type == 'VIEW_3D':
area.tag_redraw()
return{'FINISHED'}
class ReloadImages(Operator):
'''Reload all images'''
bl_idname = "paint.reload_images"
bl_label = "Reload all images"
def invoke(self, context, event):
reloaded = [0, 0]
for img in bpy.data.images:
img.reload()
# make the changes immediately visible in image editors and 3d-views
for area in context.screen.areas:
if area.type == 'IMAGE_EDITOR' or area.type == 'VIEW_3D':
area.tag_redraw()
return {'FINISHED'}
class SampleColor(Operator):
'''Sample color'''
bl_idname = "paint.sample_color_custom"
bl_label = "Sample color"
@classmethod
def poll(cls, context):
return bpy.ops.paint.image_paint.poll()
def invoke(self, context, event):
mesh = context.active_object.data
paint_mask = mesh.use_paint_mask
mesh.use_paint_mask = False
bpy.ops.paint.sample_color('INVOKE_REGION_WIN')
mesh.use_paint_mask = paint_mask
return {'FINISHED'}
class SaveImage(Operator):
'''Save image
| 0 |
0d21598b4f513100ee33aa9f2b86daf80a4ec2a3
|
Python
|
displayed in image-editor'''
bl_idname = "paint.save_image"
bl_label = "Save image"
def invoke(self, context, event):
images = get_images_in_editors(context)
for img in images:
img.save()
return{'FINISHED'}
class SaveImages(Operator):
'''Save all images'''
bl_idname = "wm.save_images"
bl_label = "Save all images"
def invoke(self, context, event):
correct = 0
for img in bpy.data.images:
try:
img.save()
correct += 1
except:
# some images don't have a source path (e.g. render result)
pass
self.report({'INFO'}, "Saved " + str(correct) + " images")
return {'FINISHED'}
class SyncSelection(Operator):
'''Sync selection from uv-editor to 3d-view'''
bl_idname = "uv.sync_selection"
bl_label = "Sync selection"
_timer = None
_selection_3d = []
handle1 = None
handle2 = None
handle3 = None
area = None
region = None
overlay_vertices = []
overlay_edges = []
overlay_faces = []
position_vertices = []
position_edges = []
position_faces = []
position2_vertices = []
position2_edges = []
position2_edges = []
@classmethod
def poll(cls, context):
return(context.active_object and context.active_object.mode=='EDIT')
def modal(self, context, event):
if self.area:
self.area.tag_redraw()
if context.area:
context.area.tag_redraw()
if context.window_manager.tpp.sync_enabled == -1:
self.region.callback_remove(self.handle1)
self.region.callback_remove(self.handle2)
context.region.callback_remove(self.handle3)
self.area = None
self.region = None
context.window_manager.tpp.sync_enabled = 0
return {"CANCELLED"}
return {'PASS_THROUGH'}
def invoke(self, context, event):
if context.window_manager.tpp.sync_enabled < 1:
for area in context.screen.areas:
if area.type == 'VIEW_3D':
self.area = area
for region in area.regions:
if region.type == 'WINDOW':
self.region = region
context.window_manager.tpp.sync_enabled = 1
# getting overlay selection
old_sync = context.tool_settings.use_uv_select_sync
old_select_mode = [x for x in context.tool_settings.mesh_select_mode]
context.tool_settings.mesh_select_mode = [True, False, False]
bpy.ops.object.mode_set(mode='OBJECT')
mesh = context.active_object.data
self._selection_3d = [v.index for v in mesh.vertices if v.select]
tfl = mesh.uv_textures.active
selected = []
for mface, tface in zip(mesh.faces, tfl.data):
selected += [mface.vertices[i] for i, x in enumerate(tface.select_uv) if x]
bpy.ops.object.mode_set(mode='EDIT')
bpy.ops.mesh.select_all(action='DESELECT')
bpy.ops.object.mode_set(mode='OBJECT')
context.tool_settings.use_uv_select_sync = True
for v in selected:
mesh.vertices[v].select = True
bpy.ops.object.mode_set(mode='EDIT')
bpy.ops.object.mode_set(mode='OBJECT')
# indices for overlay in 3d-view
self.overlay_vertices = [vertex.index for vertex in mesh.vertices if vertex.select]
self.overlay_edges = [edge.index for edge in mesh.edges if edge.select]
self.overlay_faces = [face.index for face in mesh.faces if face.select]
# overlay positions for image editor
dict_vertex_pos = dict([[i, []] for i in range(len(mesh.vertices))])
tfl = mesh.uv_textures.active
for mface, tface in zip(mesh.faces, tfl.data):
for i, vert in enumerate(mface.vertices):
dict_vertex_pos[vert].append([co for co in tface.uv[i]])
self.position2_vertices = []
for v in self.overlay_vertices:
for pos in dict_vertex_pos[v]:
self.position2_vertices.append(pos)
# set everything back to original state
bpy.ops.object.mode_set(mode='EDIT')
context.tool_settings.use_uv_select_sync = old_sync
bpy.ops.mesh.select_all(action='DESELECT')
bpy.ops.object.mode_set(mode='OBJECT')
for v in self._selection_3d:
mesh.vertices[v].select = True
bpy.ops.object.mode_set(mode='EDIT')
context.tool_settings.mesh_select_mode = old_select_mode
# 3d view callbacks
context.window_manager.modal_handler_add(self)
self.handle1 = region.callback_add(sync_calc_callback,
(self, context, area, region), "POST_VIEW")
self.handle2 = region.callback_add(sync_draw_callback,
(self, context), "POST_PIXEL")
# image editor callback
self.handle3 = context.region.callback_add(sync_draw_callback2,
(self, context), "POST_VIEW")
break
break
else:
context.window_manager.tpp.sync_enabled = -1
return {'RUNNING_MODAL'}
class ToggleAddMultiply(Operator):
'''Toggle between Add and Multiply blend modes'''
bl_idname = "paint.toggle_add_multiply"
bl_label = "Toggle add/multiply"
@classmethod
def poll(cls, context):
return bpy.ops.paint.image_paint.poll()
def invoke(self, context, event):
brush = context.tool_settings.image_paint.brush
if brush.blend != 'ADD':
brush.blend = 'ADD'
else:
brush.blend = 'MUL'
return {'FINISHED'}
class ToggleColorSoftLightScreen(Operator):
'''Toggle between Color and Softlight and Screen blend modes'''
bl_idname = "paint.toggle_color_soft_light_screen"
bl_label = "Toggle color-softlight-screen"
@classmethod
def poll(cls, context):
return bpy.ops.paint.image_paint.poll()
def invoke(self, context, event):
brush = context.tool_settings.image_paint.brush
if brush.blend != 'COLOR' and brush.blend != 'SOFTLIGHT':
brush.blend = 'COLOR'
elif brush.blend == 'COLOR':
brush.blend = 'SOFTLIGHT'
elif brush.blend == 'SOFTLIGHT':
brush.blend = 'SCREEN'
return {'FINISHED'}
class ToggleAlphaMode(Operator):
'''Toggle between Add Alpha and Erase Alpha blend modes'''
bl_idname = "paint.toggle_alpha_mode"
bl_label = "Toggle alpha mode"
@classmethod
def poll(cls, context):
return bpy.ops.paint.image_paint.poll()
def invoke(self, context, event):
brush = context.tool_settings.image_paint.brush
if brush.blend != 'ERASE_ALPHA':
brush.blend = 'ERASE_ALPHA'
else:
brush.blend = 'ADD_ALPHA'
return {'FINISHED'}
class ToggleImagePaint(bpy.types.Operator):
'''Toggle image painting in the UV/Image editor'''
bl_idname = "paint.toggle_image_paint"
bl_label = "Image Painting"
@classmethod
def poll(cls, context):
return(context.space_data.type == 'IMAGE_EDITOR')
def invoke(self, context, event):
if (context.space_data.mode == 'VIEW'):
context.space_data.mode = 'PAINT'
elif (context.space_data.mode == 'PAINT'):
context.space_data.mode = 'MASK'
elif (context.space_data.mode == 'MASK'):
context.space_data.mode = 'VIEW'
return {'FINISHED'}
class InitPaintBlend(bpy.types.Operator):
'''Toggle between Add Alpha and Erase Alpha blend modes'''
bl_idname = "paint.init_blend_mode"
bl_label = "Init paint blend mode"
@classmethod
def poll(cls, context):
return bpy.ops.paint.image_paint.poll()
def invoke(self, context, event):
brush = context.tool_settings.image_paint.brush
brush.blend = 'MIX'
return {'FINISHED'}
class ToggleUVSelectSync(Operator):
'''Toggle use_uv_select_sync in the UV editor'''
bl_idname = "uv.toggle_uv_select_sync"
bl_label = "UV Select Sync"
@classmethod
def poll(cls, context):
return(context.space_data.type == 'IMAGE_EDITOR')
def invoke(self, context, event):
context.tool_settings.use_uv_select_sync = not context.tool_settings.use_uv_select_sync
return {'FINISHED'}
##########################################
# #
# User Properties #
# #
##########################################
# property group containing all properties of the add-on
class TexturePaintPlusProps(bpy.types.PropertyGroup):
sync_enabled = bpy.props.IntProperty(name = "Enabled",
description = "internal use",
default = 0)
toolmode_enabled = bpy.props.IntProperty(name = "Enabled",
description = "internal use",
default = 0)
toolmode_mode = bpy.props.StringProperty(name = "Mode",
description = "internal use",
default = "")
toolmode_tool = bpy.props.StringProperty(name = "Tool",
description = "internal use",
default = "")
line_last = bpy.props.BoolProperty(name = "Last_f",
description = "Last position valid",
default = False)
line_x = bpy.props.IntProperty(name = "Last_x",
description = "Last position X",
default = 0)
line_y = bpy.props.IntProperty(name = "Last_y",
description = "Last position y",
default = 0)
#legend:
#(--) = No shortcut!
classes = [AddDefaultImage, #add defauft paint image (Shift ALt X) 3DVIEW
AutoMergeUV, #add "autofusion UVs" in >> UI>Mesh menu [EDIT MODE] (--)
MakeBrushImageTexture, #add a paint texture (--)
MakeBrushImageTextureMask, #add a mask paint texture (--)
BrushPopup, #brush panel (W) PAINT
TexturePopup, #textures et mask panel (Alt W) PAINT
Slots_projectpaint, #images slots panel (Shift W) PAINT
ChangeSelection, #multi-selections in UI Blender (--)
DefaultMaterial, #add a default material (Ctrl Alt X) 3DVIEW
GridTexture, #show an UV grid texture (G) PAINT
MassLinkAppend, #add several images folder (Ctrl F1) WINDOW
ReloadImage, #reload active paint image [unused?] ========================
ReloadImages, #reload all paint images (Ctrl Alt R) WINDOW
SampleColor, #color sample tool (OS clic droit) PAINT
SaveImage, #save paint image (ALt S) PAINT
SaveImages, #save all paint images (Ctrl Alt S) WINDOW
SyncSelection, #[unused?] 3DVIEW-EDIT =======================
ToggleUVSelectSync, #[unused?] IMAGE_EDITOR =======================
ToggleAddMultiply, #Toggle Add/Multiply paint mode (D) PAINT
ToggleColorSoftLightScreen, #Toggle Color*softlight paint mode (shift D) PAINT
ToggleAlphaMode, #Toggle AddAlpha/EraseAlpha paint mode (A) PAINT
ToggleImagePaint, #Cyclic image/paint/mask mode (B) IMAGE_EDITOR
InitPaintBlend, #Reinit mix paint mode (Alt D) PAINT
SelectVertgroup, #Select active vertex group in Texture Paint mode
DeselectVertgroup, #Deselect active vertex group in Texture paint mode
TexturePaintPlusProps] #toutes les variables de l'addon
def menu_func(self, context): #Add to UI>Mesh menu [EDIT MODE] => checkbox "Automerge uv"
layout = self.layout
wm = context.window_manager
AME = "tpp_automergeuv" in wm
Icon = 'CHECKBOX_HLT' if AME else 'CHECKBOX_DEHLT'
layout.operator("paint.auto_merge_uv", icon = Icon)
def menu_mesh_select_mode(self, context): #Add to Selection mode Menu (ctrl Tab) [EDIT MODE] => multi-selections
layout = self.layout
layout.separator()
prop = layout.operator("wm.context_set_value", text="Vertex + Edge", icon='EDITMODE_HLT')
prop.value = "(True, True, False)"
prop.data_path = "tool_settings.mesh_select_mode"
prop = layout.operator("wm.context_set_value", text="Vertex + Face", icon='ORTHO')
prop.value = "(True, False, True)"
prop.data_path = "tool_settings.mesh_select_mode"
prop = layout.operator("wm.context_set_value", text="Edge + Face", icon='SNAP_FACE')
prop.value = "(False, True, True)"
prop.data_path = "tool_settings.mesh_select_mode"
layout.separator()
prop = layout.operator("wm.context_set_value", text="All", icon='OBJECT_DATAMODE')
prop.value = "(True, True, True)"
prop.data_path = "tool_settings.mesh_select_mode"
def menu_snap(self, context): #Add to Snap menu (Shift S)[OBJECT MODE] => object origins changes
layout = self.layout
layout.separator()
layout.operator("object.origin_set", text="Geometry to Origin")
layout.operator("object.origin_set", text="Origin to Geometry").type = 'ORIGIN_GEOMETRY'
layout.operator("object.origin_set", text="Origin to 3D Cursor").type = 'ORIGIN_CURSOR'
def register():
import bpy
# register classes
init_props()
for c in classes:
bpy.utils.register_class(c)
bpy.types.WindowManager.tpp = bpy.props.PointerProperty(\
type = TexturePaintPlusProps)
# add ImagePaint keymap entries
km = bpy.context.window_manager.keyconfigs.default.keymaps['Image Paint']
kmi = km.keymap_items.new("paint.toggle_alpha_mode", 'A', 'PRESS') #ok
kmi = km.keymap_items.new("wm.context_toggle", 'B', 'PRESS')
kmi.properties.data_path = "user_preferences.system.use_mipmaps"
kmi = km.keymap_items.new("paint.toggle_add_multiply", 'D', 'PRESS')#ok
kmi = km.keymap_items.new("paint.toggle_color_soft_light_screen", 'D', 'PRESS', shift=True)#ok
kmi = km.keymap_items.new("paint.init_blend_mode", 'D', 'PRESS', alt=True)#ok
kmi = km.keymap_items.new("paint.sample_color_custom", 'RIGHTMOUSE', 'PRESS', oskey=True)
kmi = km.keymap_items.new("paint.grid_texture", 'G', 'PRESS')
kmi = km.keymap_items.new("paint.save_image", 'S', 'PRESS', alt=True) #?
kmi = km.keymap_items.new("view3d.brush_popup", 'W', 'PRESS')#ok
kmi = km.keymap_items.new("view3d.texture_popup", 'W', 'PRESS', alt=True)#ok
kmi = km.keymap_items.new("slots.projectpaint", 'W', 'PRESS', shift=True)#ok
# add 3DView keymap entries
km = bpy.context.window_manager.keyconfigs.default.keymaps['3D View']
kmi = km.keymap_items.new("object.default_material", 'X', 'PRESS', alt=True, ctrl=True)
kmi = km.keymap_items.new("object.add_default_image", 'X', 'PRESS', shift=True, alt=True) #ok object.add_default_image
# deactivate to prevent clashing------------------------------------
km = bpy.context.window_manager.keyconfigs.default.keymaps['Window']
for kmi in km.keymap_items:
if kmi.type == 'S' and not kmi.any and not kmi.shift and kmi.ctrl and kmi.alt and not kmi.oskey:
kmi.active = False
# add Window keymap entry
km = bpy.context.window_manager.keyconfigs.default.keymaps['Window']
kmi = km.keymap_items.new("wm.mass_link_append", 'F1', 'PRESS', ctrl=True)#ok
kmi = km.keymap_items.new("paint.reload_images", 'R', 'PRESS', alt=True, ctrl=True)#ok
kmi = km.keymap_items.new("image.save_dirty", 'S','PRESS', alt=True, ctrl=True)#ok
# deactivate and remap to prevent clashing -------------------------
if bpy.context.user_preferences.inputs.select_mouse == 'RIGHT':
right_mouse = ['RIGHTMOUSE', 'SELECTIONMOUSE']
else: #'LEFT'
right_mouse = ['RIGHTMOUSE', 'ACTIONMOUSE']
km = bpy.context.window_manager.keyconfigs.default.keymaps['3D View']
for kmi in km.keymap_items:
if kmi.type in right_mouse and kmi.alt and not kmi.ctrl and not kmi.shift:
# deactivate
kmi.active = False
for kmi in km.keymap_items:
if kmi.type in right_mouse and not kmi.alt and not kmi.ctrl and not kmi.shift:
# remap
kmi.alt = True
# add menu entries
bpy.types.VIEW3D_MT_edit_mesh.prepend(menu_func)
bpy.types.VIEW3D_MT_edit_mesh_select_mode.append(menu_mesh_select_mode)
bpy.types.VIEW3D_MT_snap.append(menu_snap)
def unregister():
# menu entries
bpy.types.VIEW3D_MT_snap.remove(menu_snap)
bpy.types.VIEW3D_MT_edit_mesh_select_mode.remove(menu_mesh_select_mode)
bpy.types.VIEW3D_MT_edit_mesh.remove(menu_func)
# ImagePaint keymap entries
km = bpy.context.window_manager.keyconfigs.default.keymaps['Image Paint']
for kmi in km.keymap_items:
if kmi.idname in ["view3d.brush_popup", "view3d.texture_popup", "paint.toggle_alpha_mode", "paint.sample_color_custom",
"paint.toggle_add_multiply", "paint.toggle_color_soft_light_screen", "paint.init_blend_mode", "paint.grid_texture", "paint.reload_image", "paint.save_image"]:
km.keymap_items.remove(kmi)
elif kmi.idname == "wm.context_toggle":
if getattr(kmi.properties, "data_path", False) in [ "active_object.show_wire", "user_preferences.system.use_mipmaps"]:
km.keymap_items.remove(kmi)
elif kmi.idname == "wm.context_set_enum":
if getattr(kmi.properties, "data_path", False) in ["tool_settings.image_paint.brush.blend"]:
km.keymap_items.remove(kmi)
# 3DView keymap entry
km = bpy.context.window_manager.keyconfigs.default.keymaps['3D View']
for kmi in km.keymap_items:
if kmi.idname in ["object.add_default_image", "object.default_material"]:
km.keymap_items.remove(kmi)
# remap and reactivate original items
if bpy.context.user_preferences.inputs.select_mouse == 'RIGHT':
right_mouse = ['RIGHTMOUSE', 'SELECTIONMOUSE']
else: #'LEFT'
right_mouse = ['RIGHTMOUSE', 'ACTIONMOUSE']
km = bpy.context.window_manager.keyconfigs.default.keymaps['3D View']
for kmi in km.keymap_items:
if kmi.type in right_mouse and kmi.alt and not kmi.ctrl and not kmi.shift:
if kmi.active:
# remap
kmi.alt = False
else:
# reactivate
kmi.active = True
# reactive original item
km = bpy.context.window_manager.keyconfigs.default.keymaps['Window']
for kmi in km.keymap_items:
if kmi.type == 'S' and not kmi.any and not kmi.shift and kmi.ctrl and kmi.alt and not kmi.oskey:
kmi.active = True
# unregister classes
remove_props()
for c in classes:
bpy.utils.unregister_class(c)
try:
del bpy.types.WindowManager.tpp
except:
pass
if __name__ == "__main__":
register()
| 1 |
5109eb2ffd5d01484a8cb1b7fd588d7214116878
|
Python
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2014, 2015 Patrick Moran for Verizon
#
# Distributes WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License. If not, see <http://www.gnu.org/licenses/>.
from collections import deque
import g_config
import g_eon_api_bridge
# from g_graphics import plot_assets
import time
import logging
import json
from g_lat_lon_distance import lat_lon_distance, move_to_lat_lon, compute_resolution
from sortedcontainers import SortedDict
import pickle
import copy
import pandas
from numpy import int64, fmax, argsort, array, interp, linspace, diff, random
import arrow
import Queue
import os
import threading
ON = 1
OFF = 0
class GroomingMessageHandler(threading.Thread):
def __init__(self,
incoming_q,
incoming_queue_lock,
outgoing_q,
outgoing_queue_lock,
module_instance_name='Unnamed',
shared_data=None, shared_data_lock=None):
self.incoming_rabbit_mq = incoming_q
self.incoming_queue_lock = incoming_queue_lock
self.outgoing_q = outgoing_q
self.outgoing_queue_lock = outgoing_queue_lock
self.my_local_logger = logging.getLogger(module_instance_name)
self.my_local_logger.setLevel(logging.DEBUG)
self.local_q = deque()
self.eon_api_bridge = g_eon_api_bridge.EonApiBridge()
self.handle_queue = False
self.instance_name = module_instance_name
# This is used to run the main loop
self.run_enable = True
self.shared_data = shared_data
self.shared_data_lock = shared_data_lock
self.start_time = 0
self.run_start_time = time.time()
self.groomer_state = "0:IDLE" # Used to determine the current state of this thread in a multi-threaded env
self.groom_run_state = "0:IDLE" # Used to determine the current run mode of this thread
self.idle_count = 0
self.end_time = 0
self.query_count = 0
self.asset_dictionary = {}
self.working_radius = g_config.START_RADIUS # This will hold the radius units 0.12
self.cell_collection_set = set()
self.resolution = compute_resolution(self.working_radius)
self.cell_count = 0
self.utility_region = g_config.UTILITY_REGION
self.ttl = g_config.TTL_MAX
self.SHOW_PLOTS = False
self.cell_time_event = False
threading.Thread.__init__(self)
@staticmethod
def check_message_payload(dequeued_item):
"""
This method checks that the message payload keys matches the required (specified) keys
:return: False is any key is missing otherwise True
"""
key_array = ["dateTime",
"payload",
"messageType"]
# Note that the "ttl" key (and others) may be present but its not checked here!
for key in key_array:
if key not in dequeued_item.keys():
return False
key_array = ["zoomR",
"spatial",
"circuitID",
"reputationEnabled",
"assetID",
"temporal",
"outageTime",
"company",
"votes",
"zoomT",
"longitude",
"latitude"]
for key in key_array:
if key not in dequeued_item["payload"].keys():
return False
return True
def process_incoming_rabbit_mq(self):
"""
Processes the Rabbit MQ bus messages and process the queue depending on the type
If the type is Query then put it on the local queue for processing later
"""
self.groomer_state = "3:PROCESS QUEUE"
lock_counter = 0
while not self.incoming_queue_lock.acquire(False):
self.my_local_logger.debug("Trying to acquire lock. Sleeping 0.05s.")
time.sleep(g_config.SLEEP_TIME)
lock_counter += 1
if lock_counter > 100:
self.my_local_logger.debug("Cant acquire incoming queue lock, returning")
self.my_local_logger.error("Unable to acquire lock in process_incoming_queue, returning!")
self.groomer_state = "4:PROCESS QUEUE LOCK ERROR"
return
while not self.incoming_rabbit_mq.empty():
self.my_local_logger.debug(
"Groomer says Incoming Rabbit MQ not empty, length is %d" % self.incoming_rabbit_mq.qsize())
self.my_local_logger.debug("Acquired lock")
# This is where the incoming grooming message is pulled off the Rabbit MQ.
dequeued_item = self.incoming_rabbit_mq.get()
if self.check_message_payload(dequeued_item):
self.my_local_logger.info("A %s type message was dequeued " %
dequeued_item['messageType'])
else:
self.my_local_logger.error("Message payload is malformed in process_incoming_queue, returning")
if self.incoming_queue_lock:
self.incoming_queue_lock.release()
self.my_local_logger.debug("GROOMER rabbit MQ lock was released")
self.my_local_logger.info("The rabbit MQ lock was released")
self.groomer_state = "5:PROCESS QUEUE MALFORMED"
return
# Determine what is queue command type is and dispatch it.
if dequeued_item['messageType'] == 'Test':
# This is a dummy Test which is dropped for now.
pass
elif dequeued_item['messageType'] == 'Clear':
# Restore the previous results
pass
elif dequeued_item['messageType'] == 'Save':
# Save the current groom (filter) settings and kick off a new Utility wide groom process
# Grab the Query message type and stuff it in a local fifo queue
self.my_local_logger.debug("Save type message received")
self.my_local_logger.debug("query_guid = %s" % "None - missing on save") # dequeued_item['queryGuid'])
#######################################################
# Collect interesting payload information here
#######################################################
if "ttl" not in dequeued_item.keys():
dequeued_item["ttl"] = g_config.TTL_UTILITY_SPAN
self.local_q.append(dequeued_item)
self.my_local_logger.debug("Message queued to the local incoming queue (len=%d)" % len(self.local_q))
self.my_local_logger.info("Message queued to the local incoming queue (len=%d)" % len(self.local_q))
pass
elif dequeued_item['messageType'] == 'Query':
# Grab the Query message type and stuff it in a local fifo queue
self.my_local_logger.debug("Query type message received")
self.my_local_logger.debug("query_guid = %s" % dequeued_item['queryGuid'])
#######################################################
# Collect interesting payload information here
#######################################################
if "ttl" not in dequeued_item.keys():
dequeued_item["ttl"] = g_config.TTL_MAX
self.local_q.append(dequeued_item)
self.my_local_logger.debug("Message queued to the local incoming queue (len=%d)" % len(self.local_q))
self.my_local_logger.info("Message queued to the local incoming queue (len=%d)" % len(self.local_q))
else:
self.my_local_logger.error("incoming_rabbit_mq TYPE is a UNKNOWN")
if self.incoming_queue_lock:
self.incoming_queue_lock.release()
self.my_local_logger.debug("GROOMER rabbit MQ lock was released")
self.my_local_logger.info("The rabbit MQ lock was released")
self.my_local_logger.debug("process_incoming_rabbit_mq finished")
self.groomer_state = "0:IDLE"
def get_data_in_cell_area(self, cell_parameters, ttl):
"""
Ask the EON API for onts, circuits and transformers for a given lat, lon and radius
Returns a group of items that are inside the circle with a given center (lat, lon) and
radius.
Note: convert the time units in the ONT event list into minutes by dividing by 60000
:param cell_parameters: Latitude
:param ttl: The time to live.
:return: this_cell # A hexagonal cell dictionary
this_cell = {'neighbors': [], # the 6 nearest neighbor cells
'assets': {}, # The utility assets including their lat and lon and events
'onts': {}, # Verizon's ONTs including their lat and lon and events
'state': '' # A string representing the state of this cell.
This is used for multi threading purposes so that neighboring cells can see
whats going on.
'circuits': {} # This is a set of circuits in this cell. All assets on a circuit
are in the circuits list
'lat_lon': [] # The lat and lon array of the center of the cell
'radius': 1.00 # The radius of the circumscribed cell.
ont_items is a dictionary of {'lat_lon':[],'assets':[],'events':[]}
asset_items is a dictionary of {'lat_lon':[],'onts':[],'events':[]}
circuit_items is a dictionary of {'connected_items' , asset_item_key}
where asset_item_key is a key entry in the asset_item dictionary
events is an array of 2 sets of events. events[0] is the "fail_time" and events[1] is the "restore_time"
A call to teh API is done in a loop to gather all items, here is a test of teh api call:
The swagger test example is
http://10.123.0.27:8080/eon360/api/query
With a json payload of
{
"itemType":"ALL",
"circle": {
"unit": "MILES",
"longitude": -73.8773389,
"radius": 1.0,
"latitude": 41.2693778
},
"pageParameter": {
"page": 0,
"size": 100
}
}
This will return a data structure like this
dd['eligibility']['dataItems']
dd['alarm']['dataItems']
dd['utility']['dataItems']
"""
# query_guid = payload["query_guid"]
this_lat = cell_parameters["latitude"]
this_lon = cell_parameters["longitude"]
# utility = cell_parameters["company"]
groom_time = cell_parameters["outageTime"]
# circuit_id = cell_parameters["circuitID"]
# asset_id = cell_parameters["assetID"]
# votes = cell_parameters["votes"]
# spatial = cell_parameters["spatial"]
# temporal = cell_parameters["temporal"]
# reputation_ena = cell_parameters["reputationEnabled"]
# zoom_t = cell_parameters["zoomT"]
# zoom_r = cell_parameters["zoomR"]
this_radius = cell_parameters["radius"]
# units = cell_parameters["units"]
query_type = "ALL"
ont_serial_number_set = set()
ont_items = {}
asset_serial_number_set = set()
asset_items = {}
circuit_serial_number_set = set()
circuit_items = {}
# The six neighbor cells are initially set to be empty
# This a string quid and an angle (in degrees)
neighbor_array = [["", 0], ["", 60], ["", 120], ["", 180], ["", 240], ["", 300]]
this_cell = {'neighbors': neighbor_array,
'assets': {},
'onts': {},
'circuits': {},
'state': 'creating',
'lat_lon': [this_lat, this_lon],
'radius': this_radius,
'groom_time': groom_time,
'ttl': 0
}
page_number = 0
page_size = 20
query_parameter = json.dumps({"itemType": query_type,
"circle": {"longitude": this_lon,
"latitude": this_lat,
"radius": this_radius, "unit": g_config.RADIUS_UNITS},
"pageParameter": {"page": page_number, "size": page_size}})
self.my_local_logger.debug("Formed query parameter: %s" % query_parameter)
dd = self.eon_api_bridge.query_post_eon_data_30(query_parameter=query_parameter)
more_pages = True
# Loop here until no more utility components of the first collection are found
while more_pages and dd is not None:
# This is the ONTs loop through them and find all the ONTs in the area
for this_ont in dd['eligibility']['dataItems']:
ont_dictionary_keyword = this_ont['ontSerialNumber']
ont_serial_number_set.add(ont_dictionary_keyword)
if ont_dictionary_keyword == "[PENDING INSTALL]":
self.my_local_logger.debug("skipping this ont in eligibility list")
continue
ont_items[ont_dictionary_keyword] = {'lat_lon': [this_ont['latitude'], this_ont['longitude']]}
alarm_set_time = set()
alarm_clear_time = set()
ont_items[ont_dictionary_keyword]['events'] = [alarm_set_time, alarm_clear_time]
ont_items[ont_dictionary_keyword]['assets'] = set()
for this_alarm in dd['alarm']['dataItems']:
alarm_dictionary_keyword = this_alarm['ontSerialNumber']
if alarm_dictionary_keyword not in ont_serial_number_set:
if alarm_dictionary_keyword == "[PENDING INSTALL]":
self.my_local_logger.debug("skipping this ONT in the alarm list")
continue
ont_serial_number_set.add(alarm_dictionary_keyword)
ont_items[alarm_dictionary_keyword] = {'lat_lon': [this_alarm['latitude'], this_alarm['longitude']]}
alarm_set_time = set()
alarm_clear_time = set()
ont_items[alarm_dictionary_keyword]['events'] = [alarm_set_time, alarm_clear_time]
ont_items[alarm_dictionary_keyword]['assets'] = set()
if this_alarm['alarmReceiveTime']:
alarm_set = float(this_alarm['alarmReceiveTime']) # * 1e-3) / 60
ont_items[alarm_dictionary_keyword]['events'][0].add(alarm_set)
if this_alarm['alarmClearTime']:
alarm_clear = float(this_alarm['alarmClearTime']) # * 1e-3) / 60
ont_items[alarm_dictionary_keyword]['events'][1].add(alarm_clear)
# Now go through the assets and associate the assets to the ONTs and the ONTs to the assets
for this_item in dd['utility']['dataItems']:
asset_dictionary_keyword = this_item['transformerID']
if asset_dictionary_keyword not in asset_serial_number_set:
asset_serial_number_set.add(asset_dictionary_keyword)
asset_items[asset_dictionary_keyword] = {'lat_lon': [this_item['latitude'], this_item['longitude']]}
asset_items[asset_dictionary_keyword]['events'] = [set(), set()]
asset_items[asset_dictionary_keyword]['onts'] = set()
asset_items[asset_dictionary_keyword]['guid'] = this_item['guid']
asset_items[asset_dictionary_keyword]['serviceAddress'] = this_item['serviceAddress']
for this_ont in this_item['eligibilityList']:
ont_dictionary_keyword = this_ont['ontSerialNumber']
if ont_dictionary_keyword not in ont_serial_number_set:
ont_serial_number_set.add(ont_dictionary_keyword)
ont_items[ont_dictionary_keyword] = {
'lat_lon': [this_ont['latitude'], this_ont['longitude']]}
alarm_set_time = set()
alarm_clear_time = set()
ont_items[ont_dictionary_keyword]['events'] = [alarm_set_time, alarm_clear_time]
ont_items[ont_dictionary_keyword]['assets'] = set()
# Skip the ONTs that don't have an installation.
if ont_dictionary_keyword == "[PENDING INSTALL]":
self.my_local_logger.debug("skipping the ONT listed on eligibility list in asset_id=%s" %
asset_dictionary_keyword)
self.my_local_logger.info("Skipping %s because it's status is PENDING INSTALL" %
asset_dictionary_keyword)
continue
# Stitch up the assets in the onts
ont_items[ont_dictionary_keyword]['assets'].add(asset_dictionary_keyword)
# Stitch up the onts in the assets
asset_items[asset_dictionary_keyword]['onts'].add(ont_dictionary_keyword)
circuit_dictionary_keyword = this_item['circuitID']
if circuit_dictionary_keyword not in circuit_serial_number_set:
# add the circuit item to the circuit_serial_number_set is needed
circuit_serial_number_set.add(circuit_dictionary_keyword)
# and create an empty set
circuit_items[circuit_dictionary_keyword] = {'connected_items': set()}
# Now add the data structure to the set
circuit_items[circuit_dictionary_keyword]['connected_items'].add(asset_dictionary_keyword)
###########################
# Look for the next page #
###########################
if (dd['utility']['pageTotalItems'] == page_size) or \
(dd['alarm']['pageTotalItems'] == page_size) or \
(dd['eligibility']['pageTotalItems'] == page_size):
self.my_local_logger.debug("Collecting next page for this message")
page_number += 1
more_pages = True
query_parameter = json.dumps({"itemType": query_type,
"circle": {"longitude": this_lon,
"latitude": this_lat,
"radius": this_radius,
"unit": g_config.RADIUS_UNITS},
"pageParameter": {"page": page_number, "size": page_size}})
dd = self.eon_api_bridge.query_post_eon_data_30(query_parameter=query_parameter)
else:
more_pages = False
this_cell['assets'] = asset_items
# Go over the ONT set and see it there are any that don't have alarms. This might happen if there were no alarms
# posted to this ONT because the main alarm injestion loop failed for some reason. There will still be alarms
# that are posted on the ONTs and those can be recovered here.
for this_ont in ont_items:
if len(ont_items[this_ont]['events'][0]) == 0 or len(ont_items[this_ont]['events'][1]) == 0:
# To find any ONTs that don't seem to have alarms make this call:
# where ONT_SERIAL_NUMBER is 00ABB96 in this example.
# http://10.123.0.27:8080/eon360/api/alarms?sortBy=alarmReceiveTime&ontSerialNumber=000ABB96&p=0&s=20
dd = self.eon_api_bridge.alarm_get_pons_nms_00(ont_serial_number=this_ont)
if dd:
if 'alarms' in dd.keys():
for this_alarm in dd['alarms']:
if this_alarm['alarmReceiveTime']:
alarm_set = float(this_alarm['alarmReceiveTime']) # * 1e-3) / 60
ont_items[this_ont]['events'][0].add(alarm_set)
self.my_local_logger.info("Adding an AlarmReceiveTime to the data")
if this_alarm['alarmClearTime']:
alarm_clear = float(this_alarm['alarmClearTime']) # * 1e-3) / 60
ont_items[this_ont]['events'][1].add(alarm_clear)
else:
self.my_local_logger.warning("No alarms found in call to alarm_get_pons_nms_00(ont_serial_number=%s)" % this_ont )
else:
self.my_local_logger.warning("Nothing returned from the API call")
this_cell['onts'] = ont_items
this_cell['circuits'] = circuit_items
this_cell['state'] = 'populated'
this_cell['ttl'] = ttl
self.my_local_logger.info("This CELL (radius= %3.3f %s @ lat=%f, lon=%f) has %d circuits, %d assets and %d onts." %
(this_radius, g_config.RADIUS_UNITS, this_lat, this_lon,
len(circuit_items), len(asset_items), len(ont_items))
)
# Note convert the time units into minutes by dividing by 60000
return this_cell
@staticmethod
def persist_cell_pickle(cell, filename=""):
"""
:param cell: The cell structure that is persisted to disk
:return:
"""
this_lat = cell['lat_lon'][0]
this_lon = cell['lat_lon'][1]
if this_lat < 0:
lat_str = ("%03.2f" % (float(round(-this_lat * 100)) / 100.0)).replace('.', 'm')
else:
lat_str = ("%03.2f" % (float(round(this_lat * 100)) / 100.0)).replace('.', 'p')
if this_lon < 0:
lon_str = ("%03.2f" % (float(round(-this_lon * 100)) / 100.0)).replace('.', 'm')
else:
lon_str = ("%03.2f" % (float(round(this_lon * 100)) / 100.0)).replace('.', 'p')
if filename == "":
filename = 'cell_' + lat_str + '_' + lon_str
filename += '.pck'
full_path = g_config.BASE_DIR + os.sep + g_config.PICKLES + os.sep + filename
with open(full_path, "w") as f: # write mode
pickle.dump(cell, f)
@staticmethod
def un_persist_cell_pickle(this_lat, this_lon):
"""
:param this_lat:
:param this_lon:
:return: cell
"""
if this_lat < 0:
lat_str = ("%03.2f" % (float(round(-this_lat * 100)) / 100.0)).replace('.', 'm')
else:
lat_str = ("%03.2f" % (float(round(this_lat * 100)) / 100.0)).replace('.', 'p')
if this_lon < 0:
lon_str = ("%03.2f" % (float(round(-this_lon * 100)) / 100.0)).replace('.', 'm')
else:
lon_str = ("%03.2f" % (float(round(this_lon * 100)) / 100.0)).replace('.', 'p')
filename = 'cell_' + lat_str + '_' + lon_str + '.pck'
with open(filename, "r") as f: # read mode
cell = pickle.load(open(f))
return cell
def temporal_filter(self, cell):
"""
:param cell:
This method does the filter model of the ont and returns a filtered outage based on the
alarm_condition (a value between 0 and 1)
Start with the alarm_condition =0 which is no alarm (These are alarm_conditions for ALARMs)
This is how the EPOCH number can be converted back and forth to a date.
In this context ON means power is ON, OFF means power is off
t is in milliseconds. To convert to minutes divide by 1000 and by 60.
:return:
"""
self.cell_time_event = False
for this_ont in cell['onts']:
event_vector = {'t': [int64(g_config.ENGINE_BEGIN_TIME)], 'a': [ON]}
on_times = cell['onts'][this_ont]['events'][ON]
off_times = cell['onts'][this_ont]['events'][OFF]
if len(on_times) > 0:
for this_alarm in on_times:
event_vector['t'].append(this_alarm)
event_vector['a'].append(ON)
if len(off_times) > 0:
for this_alarm in off_times:
event_vector['t'].append(this_alarm)
event_vector['a'].append(OFF)
# At this point we have a temporal vector of event for this ONT.
time_vector = array(event_vector['t'])
ind = argsort(time_vector)
power_state = array(event_vector['a'])[ind]
t = time_vector[ind]
# At this point the sorted time and alarm vectors are ready
# tw = t[t > t[-1] - config.ALARM_DETECT_WINDOW * 1000]
# aw = a[t > t[-1] - config.ALARM_DETECT_WINDOW * 1000]
# Deglitch the vectors now
# To deglitch the time vector take all the values that at ON and extend them by 5 minutes then
# and add (or) them back to the time vector
# time_of_alarm_condition = tw[-1] # The last time vector point (the sorted value)
# alarm_condition = aw[-1]
time_count = len(t)
deglitched_power_state = copy.copy(power_state)
# see for example http://pandas.pydata.org/pandas-docs/stable/timeseries.html
for i in range(time_count - 1):
if power_state[i] == OFF and power_state[i + 1] == ON:
if t[i + 1] < t[i] + g_config.DEGLITCH_TIME:
self.my_local_logger.debug(
"Deglitched the power at %s" % (pandas.to_datetime(t[i], unit='ms')))
deglitched_power_state[i] = ON
else:
self.my_local_logger.debug("off time is %f min (%f hours) (days %f)" % (
(t[i + 1] - t[i]) / 1000 / 60, (t[i + 1] - t[i]) / 1000 / 60 / 60,
(t[i + 1] - t[i]) / 1000 / 60 / 60 / 24))
power_state_array = []
time_array = []
for i in range(time_count-1):
time_array.append(t[i])
time_array.append(t[i+1] - g_config.MS_TIME_RESOLUTION) # something around 5 seconds
power_state_array.append(deglitched_power_state[i])
power_state_array.append(deglitched_power_state[i])
if deglitched_power_state[i] == ON:
self.my_local_logger.debug("power on at %s" % (pandas.to_datetime(t[i], unit='ms')))
if deglitched_power_state[i] == OFF:
self.my_local_logger.debug("power off at %s" % (pandas.to_datetime(t[i], unit='ms')))
time_array.append(t[-1])
power_state_array.append(deglitched_power_state[-1])
sample_time = cell['groom_time']
if sample_time > t[-1]:
self.my_local_logger.debug(
"sample time is after the end of time in the time event list, using interpolated value")
time_array.append(sample_time - g_config.MS_TIME_RESOLUTION)
power_state_array.append(deglitched_power_state[-1])
time_array_sec = [round(x / 1000) for x in time_array]
# time_domain_vector = [time_array, power_state_array] # column_stack((time_array,power_state_array))
# Calculate a +/- 1 week interval every 5 minutes from the groom time unless the groom time is the same as
# the current time then the last 30 minutes are used to compute the time vector.
# This is done to allow the real time groomer to run a bit faster than the interactive groomer during the
# interp call.
# The arrow library produces timestamp values in seconds.
current_time = arrow.utcnow().to('US/Eastern')
a_week_ago = current_time.replace(weeks=-1)
sample_time_arrow = arrow.get(sample_time/1000)
if sample_time_arrow.timestamp < a_week_ago.timestamp:
# This is a grooming operation that fits in the 2 week span of time.
start_time = sample_time_arrow.replace(weeks=-1)
stop_time = sample_time_arrow.replace(weeks=1)
else:
start_time = sample_time_arrow.replace(weeks=-2)
stop_time = sample_time_arrow
# The time vector will be in seconds
# One minute = 60
# One hour = 60*60
# One day = 24*60*60
# One week = 7*24*60*60
# Five minute intervals are 5*60
delta_time = 5*60 # This is the sample interval of the time vector (Every 5 minutes)
number_of_points = (stop_time.timestamp - start_time.timestamp) / delta_time
sample_time_array = linspace(start_time.timestamp, stop_time.timestamp, number_of_points)
sample_power_array = interp(sample_time_array, time_array_sec, power_state_array)
time_domain_vector = [sample_time_array, sample_power_array]
reliability = sum(sample_power_array)/len(sample_power_array)
event_durations = []
event_times = []
if sample_power_array.min() == sample_power_array.max():
self.SHOW_PLOTS = False
else:
self.SHOW_PLOTS = True
if self.SHOW_PLOTS:
if not g_config.IS_DEPLOYED:
print "Reliability = %4.4f" % reliability
if reliability > 0.8:
self.cell_time_event = True
if not g_config.IS_DEPLOYED:
try:
import matplotlib.pyplot as plt
# plt.plot(time_array, power_state_array, 'o')
plt.plot(sample_time_array, sample_power_array, '-x')
plt.show(block=False)
except:
print "Something went wrong with the matplotlib command, skipping!"
if (sample_power_array[0] > 0) and (sample_power_array[-1] > 0):
if not g_config.IS_DEPLOYED:
print "Diff the time vector to find the on and off times."
diff_sample_power_array = diff(sample_power_array)
index_on = diff_sample_power_array > 0
on_times = sample_time_array[index_on]
index_off = diff_sample_power_array < 0
off_times = sample_time_array[index_off]
if len(on_times) == len(off_times):
for k, t_off in enumerate(off_times):
# The power will be off from the time it turns minus the time it turned off.
power_fail_event_duration = on_times[k] - t_off
if not g_config.IS_DEPLOYED:
print "power fail event duration = %f" % power_fail_event_duration
event_durations.append(power_fail_event_duration)
event_times.append(t_off)
if not g_config.IS_DEPLOYED:
print "Found a %10.2f minute outage on %s" % (
(power_fail_event_duration/60),
arrow.get(t_off).format("MMMM DD, YYYY @ hh:mm A")
)
else:
self.my_local_logger.info('Power event edges are mismatched, skipping this: ')
else:
self.my_local_logger.info('Power event edges in the window are mismatched, skipping this: ')
else:
self.my_local_logger.info('Power event outage has low reliability, skipping this: ')
self.my_local_logger.info('temporal data for cell has %d points from %s to %s' % (
number_of_points, start_time, stop_time))
cell['onts'][this_ont]['temporal_filter'] = {'reliability': reliability,
'event_durations': event_durations,
'event_times': event_times,
'time_domain_vector': time_domain_vector}
return cell
def spatial_filter(self, cell):
"""
The spatial filter does a filtering of the ont collection based on the asset called this_asset.
:param cell:
A cell that contains of onts along with their locations and states.
The onts values must have been filtered temporally first.
:return:
"""
if self.cell_time_event:
# Only append outages on assets for the cells that have events
if not g_config.IS_DEPLOYED:
print "An interesting time event has occurred in this cell..."
for this_ont in cell['onts']:
event_durations = cell['onts'][this_ont]['temporal_filter']['event_durations']
event_times = cell['onts'][this_ont]['temporal_filter']['event_times']
if not g_config.IS_DEPLOYED:
if this_ont == "0016FE13":
print "found an event"
for this_asset in cell['onts'][this_ont]['assets']:
if not g_config.IS_DEPLOYED:
if this_asset == "TR1000489404_108":
print "found a matching asset"
try:
event_activities = cell['assets'][this_asset]['spatial_filter']
except KeyError:
event_activities = {'distance': [], 'events': []}
if len(event_durations) > 0:
ont_lat = cell['onts'][this_ont]['lat_lon'][0]
ont_lon = cell['onts'][this_ont]['lat_lon'][1]
lat_lon = cell['assets'][this_asset]['lat_lon']
asset_lat = lat_lon[0]
asset_lon = lat_lon[1]
this_distance = lat_lon_distance(asset_lat, asset_lon, ont_lat, ont_lon, units='mi')
event_activities['distance'].append(this_distance)
event_activities['events'].append(
{'event_durations': event_durations, 'event_times': event_times}
)
cell['assets'][this_asset]['spatial_filter'] = event_activities
if not g_config.IS_DEPLOYED:
print " ...done with interesting cell."
return cell
def vote_on_assets(self, cell, temporal_data, spatial_data, voting_data):
"""
:param cell:
:param voting_data: an integer that is the number of votes to use
:return:
"""
try:
this_filter = json.loads(spatial_data)
total_counts = len(this_filter['r'])
weights = []
for i in range(total_counts):
weights.append(this_filter['r'][i])
except TypeError as e:
self.my_local_logger.error('Spatial data has a Type Error: %s, %s' % (spatial_data, e))
except ValueError as e:
self.my_local_logger.error('Spatial data has a ValueError: %s, %s' % (spatial_data, e))
self.my_local_logger.info('spatial data = %s', spatial_data)
self.my_local_logger.info('temporal data = %s', temporal_data)
if voting_data:
try:
number_of_votes = int(voting_data)
except ValueError as e:
self.my_local_logger.error('Voting data has en error in the passed value %s' % e)
number_of_votes = 1
except TypeError as e:
self.my_local_logger.error('Voting data is not a string %s' % e)
number_of_votes = 1
else:
number_of_votes = 1
self.my_local_logger.info('Number of votes passed: %d' % number_of_votes)
for this_asset in cell['assets']:
cell['assets'][this_asset]['outage_events'] = None
try:
# these_distances = cell['assets'][this_asset]['spatial_filter']['distance']
these_events = cell['assets'][this_asset]['spatial_filter']['events']
except KeyError:
# print "No outages on this asset"
continue
if len(these_events) > 0:
if len(these_events) >= 1: # number_of_votes:
# This is where the filter will take place.
# These events is an array.
# I must iterate over an array of these event items
try:
outage_events = cell['assets'][this_asset]['outage_events']
except KeyError:
outage_events = {'event_durations': [], 'event_times': []}
if outage_events is None:
outage_events = {'event_durations': [], 'event_times': []}
for this_event_dict in these_events:
for j, this_event in enumerate(this_event_dict['event_durations']):
outage_events['event_durations'].append(this_event)
outage_events['event_times'].append(this_event_dict['event_times'][j])
cell['assets'][this_asset]['outage_events'] = outage_events
return cell
def post_outage_on_asset(self, cell, payload):
"""
:param cell:
:param payload: this will be of the form
http://10.123.0.27:8080/eon360/api/utilities?p=0&s=20
"eonUtilityEntries": [
{
"id": "5508dacee4b0df5309df591e",
"version": 0,
#######################
## ADD THIS GUID
"guid": "46f7655c-9160-4c08-b272-59c32232ba9f",
#######################
"company": "CEDRAFT",
"serviceAddress": "{\"CE Map ID\": \"None\",
\"Municipality\": \"New Castle\",
\"Provenance\":\"Report A\",
\"Attached Assets\": [],
\"Next Hop\": \"PS302355612\",
\"Type\": \"HOUSE\",
\"Downstream\": \"None\",
\"Transformer Supply\": [\"TR302355616_T4\"],
\"Upstream\":\"PS302355612\",
\"Connections\": [],
\"Address\":\"10 VALLEY VIEW RD, Chappaqua NY, 10514-2532\",
\"Utility ID\": \"None\"}",
"errorCode": "0",
"circuitID": "10U2",
"transformerID": "HS01c902165608e5f12ce4c01c78c70415",
"eligibilityList": [
{
"id": "54a079aae4b040db636a2d95",
"version": 0,
"guid": "23697667-4810-4169-8802-46ad6efae3a3",
"company": "",
"ontSerialNumber": "59054969",
"errorCode": "0.91",
"alarmID": "CHPQNYCPOL1*LET-3*11*1*1",
"ontAddress": "8 Brookside Cir,Chappaqua,NY,10514",
"modelCoefficients": null,
"longitude": f-73.787811,
"latitude": 41.175064,
"createdAtTimestamp": 1419803050366,
"lastModifiedAtTimestamp": 1419803050366
},
"payload": {
"company": "CEDRAFT",
"outageTime": 1430452800000,
"longitude": lon,
"latitude": lat,
"circuitID": "",
"assetID": "",
"votes": 3,
"spatial": '{"r":[1,1]}',
"temporal": "[1,0; .8,24; .3, 60]",
"reputationEnabled": True,
"zoomT": 1,
"zoomR": 1,
"radius": 0.12,
"units": "MI"
},
The post must be of the form
{
"eventDuration": "long",
"guid": "",
"id": "",
"utility": {
"assetType": "",
"circuitID": "",
"company": "",
"outageID": "",
"transformerID": ""
},
"timeOfEvent": "Date",
"company": "",
"longitude": 0,
"internalUtilityGuid": "",
"latitude": 0,
"algorithm": "",
"version": "long"
}
:return:
"""
# circuit_id = ""
# First loop over all circuits:
try:
for this_circuit in cell['circuits']:
# Now loop over all the items on that circuit
for this_asset in cell['circuits'][this_circuit]['connected_items']:
asset_item = cell['assets'][this_asset]
outages = asset_item['outage_events']
# This is the form of an event (If there is one!)
# It will be None if there are no events otherwise it will be:
# 'event_durations': copy.deepcopy(these_events['event_durations']),
# 'event_times': copy.deepcopy(these_events['event_times'])
if outages:
self.my_local_logger.info('Examining circuit=%s, asset=%s. which has %d outages to post!' % (this_circuit, this_asset, len(outages)))
if this_asset[0:2] == "TR":
asset_type = "TRANSFORMER"
elif this_asset[0:2] == "HS":
asset_type = "HOUSE"
elif this_asset[0:2] == "PS":
asset_type = "POLE, SECONDARY"
elif this_asset[0:2] == "PP":
asset_type = "POLE, PRIMARY"
else:
asset_type = "OTHER"
for i, this_event_duration in enumerate(outages['event_durations']):
address_string = cell['assets'][this_asset]['serviceAddress']
self.my_local_logger.info("address_string = %s" % address_string)
address_string_pairs = json.loads(address_string)
this_address = ''
if "Municipality" in address_string_pairs.keys():
this_address += 'Municipality:' + address_string_pairs['Municipality'] + '|'
if "Address" in address_string_pairs.keys():
this_address += 'Address:' + address_string_pairs['Address'] + '|'
# Here's how to include the CE Map ID and the Utility ID if needed
# this_address += 'CE MapID:' + this_asset.split('_')[1] + '|'
# this_address += 'UtilityID:' + this_asset.split('_')[0][2:]
if this_address[-1] == '|':
this_address = this_address[:-1]
utility_document = {
"internalUtilityGuid": asset_item['guid'],
"eventDuration": int(round(this_event_duration * 1000)),
# "guid": "guid-here",
# "id": 'id-here',
"utility": {
"assetType": asset_type,
"circuitID": this_circuit,
"company": payload["company"],
"outageID": 'outage-id-here',
"transformerID": this_asset,
"address": this_address
},
"timeOfEvent": int(round(outages['event_times'][i] * 1000)),
# "longitude": asset_item['lat_lon'][1],
# "latitude": asset_item['lat_lon'][0],
"algorithm": "NEAR10"
# "version": 0
}
if not g_config.IS_DEPLOYED:
print "Posting a %10.2f minute outage on %s, circuit: %s, asset_id: %s" % (
(utility_document['eventDuration'] / 1000 / 60),
arrow.get(utility_document['timeOfEvent'] / 1000).format("MMMM DD, YYYY @ hh:mm A"),
utility_document['utility']['circuitID'],
utility_document['utility']['transformerID']
)
self.my_local_logger.info('Posting: %s' % json.dumps(utility_document))
self.eon_api_bridge.groomed_outages_post_20(utility_document)
else:
if not g_config.IS_DEPLOYED:
print "Nothing to post for circuit: %s, asset_id: %s" % (
this_circuit,
this_asset
)
except:
self.my_local_logger.error('Posting outage error')
def build_in_memory_cell_db(self, cell):
"""
:param cell: A cell of data that represents the collection of onts, assets and circuits along with the alarms
Creates an in-memory data structure that has this information:
this_cell = {'neighbors': [], # the 6 nearest neighbors
'assets': {}, # The utility assets including their lat and lon and events
'onts': {}, # Verizon's ONTs including their lat and lon and events
'state': '' # A string representing the state of this cell.
This is used for multi threading purposes so that neighboring cells can see
whats going on.
'circuits': {} # This is a set of circuits in this cell. All assets on a circuit
are in the circuits list
'lat_lon': [] # The lat and lon array of the center of the cell
'radius': 1.00 # The radius of the circumscribed cell.
ont_items is a dictionary of {'lat_lon':[],'assets':[],'events':[]}
asset_items is a dictionary of {'lat_lon':[],'onts':[],'events':[]}
:return: none
"""
asset_dict = {'groom_time': cell['groom_time']}
for this_asset in cell['assets']:
asset_dict[this_asset] = SortedDict()
for this_ont in cell['assets'][this_asset]['onts']:
this_distance = lat_lon_distance(cell['assets'][this_asset]['lat_lon'][0],
cell['assets'][this_asset]['lat_lon'][1],
cell['onts'][this_ont]['lat_lon'][0],
cell['onts'][this_ont]['lat_lon'][1])
for this_event in cell['onts'][this_ont]['events'][0]:
event_key = int(this_event / 1000)
if event_key in asset_dict[this_asset]:
asset_dict[this_asset][event_key]['voters'].update({this_distance: this_ont})
else:
voters = SortedDict()
voters.update({this_distance: this_ont})
asset_dict[this_asset].update({event_key: {'state': 0, 'voters': voters}})
# self.my_local_logger.debug("%d,0,%s,%s,%f" % (event_key, this_ont, this_asset, this_distance)
for this_event in cell['onts'][this_ont]['events'][1]:
event_key = int(this_event / 1000)
if event_key in asset_dict[this_asset]:
asset_dict[this_asset][event_key]['voters'].update({this_distance: this_ont})
else:
voters = SortedDict()
voters.update({this_distance: this_ont})
asset_dict[this_asset].update({event_key: {'state': 1, 'voters': voters}})
# self.my_local_logger.debug("%d,1,%s,%s,%f" % (event_key, this_ont, this_asset, this_distance)
self.asset_dictionary = asset_dict
self.my_local_logger.debug("done with build_in_memory_cell_db")
@staticmethod
def compute_cell_guid(payload, resolution):
"""
Computes a GUID based on the lat lon and time value
"""
# query_guid = payload["query_guid"]
this_lat = payload["latitude"]
this_lon = payload["longitude"]
# utility = payload["company"]
outage_test_time = payload["outageTime"]
# circuit_id = payload["circuitID"]
# asset_id = payload["assetID"]
# votes = payload["votes"]
# spatial = payload["spatial"]
# temporal = payload["temporal"]
# reputation_ena = payload["reputationEnabled"]
# zoom_t = payload["zoomT"]
# zoom_r = payload["zoomR"]
# radius = payload["radius"]
# units = payload["units"]
# The number of decimal points in the lat and lon gridify the guid
fmt_str = "%%4.%df_%%4.%df_%%d" % (resolution, resolution)
this_guid = fmt_str % (this_lat, this_lon, outage_test_time)
cell_guid = this_guid.replace(".", "p").replace("-", "m")
timestamp_guid = "%d" % outage_test_time
return cell_guid, timestamp_guid
def save_cell_in_shared_mem(self, this_cell_guid, cell):
while not self.shared_data_lock.acquire(False):
self.my_local_logger.info('Waiting to acquire lock for shared data.')
time.sleep(g_config.SLEEP_TIME)
self.shared_data['cell_collection_set'].add(this_cell_guid)
self.shared_data['cell_collection_dict'][this_cell_guid] = cell
self.shared_data_lock.release()
def get_shared_data(self, query_type="all", dict_key=None):
my_shared_data = None
if query_type == "all":
while not self.shared_data_lock.acquire(False):
self.my_local_logger.info('groom_outages: waiting to acquire lock for shared data.')
time.sleep(g_config.SLEEP_TIME)
my_shared_data = copy.copy(self.shared_data)
self.shared_data_lock.release()
elif query_type == "cell_collection_dict":
while not self.shared_data_lock.acquire(False):
self.my_local_logger.info('groom_outages: waiting to acquire lock for shared data.')
time.sleep(g_config.SLEEP_TIME)
if dict_key is not None:
my_shared_data = copy.copy(self.shared_data['cell_collection_dict'][dict_key])
else:
my_shared_data = copy.copy(self.shared_data['cell_collection_dict'])
self.shared_data_lock.release()
elif query_type == "cell_collection_dict_keys":
while not self.shared_data_lock.acquire(False):
self.my_local_logger.info('groom_outages: waiting to acquire lock for shared data.')
time.sleep(g_config.SLEEP_TIME)
my_shared_data = copy.copy(self.shared_data['cell_collection_dict'].keys())
self.shared_data_lock.release()
elif query_type == "cell_collection_set":
while not self.shared_data_lock.acquire(False):
self.my_local_logger.info('groom_outages: waiting to acquire lock for shared data.')
time.sleep(g_config.SLEEP_TIME)
my_shared_data = copy.copy(self.shared_data['cell_collection_set'])
self.shared_data_lock.release()
return my_shared_data
def build_new_cell(self, this_cell_guid, this_items_payload, ttl):
"""
Builds a cell and stores it in local shared memory
"""
self.my_local_logger.debug("BUILDING_CELL %d, %s" % (self.cell_count, this_cell_guid))
t0 = time.time()
# Step 3) Query the API and find all utility assets within the region of interest
cell = self.get_data_in_cell_area(this_items_payload, ttl) # lat, lon, radius, this_time, ttl)
t1 = time.time()
self.my_local_logger.debug("API calls to get %d assets in a %f %s radius took %f seconds" %
(len(cell['assets']), cell['radius'], g_config.RADIUS_UNITS, (t1 - t0)))
self.persist_cell_pickle(cell, this_cell_guid)
self.my_local_logger.debug("Saved the cell pickle")
t0 = time.time()
self.build_in_memory_cell_db(cell)
t1 = time.time()
self.my_local_logger.debug("Building in memory data took %f seconds" % (t1 - t0))
# plot_assets(self.asset_dictionary)
# Step 4) Save this cell to the shared memory set
self.cell_count += 1
return cell
def mark_cell_in_shared_memory(self, cell_guid):
self.my_local_logger.debug("MARKING_CELL %s" % cell_guid)
while not self.shared_data_lock.acquire(False):
self.my_local_logger.info('Waiting to acquire lock for shared data.')
time.sleep(g_config.SLEEP_TIME)
self.shared_data['cell_collection_set'].add(cell_guid)
self.shared_data_lock.release()
def queue_to_publish(self, message):
while not self.outgoing_queue_lock.acquire(False):
self.my_local_logger.info('Groomer is waiting to acquire lock on publisher queue.')
time.sleep(g_config.SLEEP_TIME)
self.my_local_logger.debug("Groomer got consumer_queue_lock, ")
self.outgoing_q.put(message, False)
self.my_local_logger.debug(" after putting message in queue size is now: %d" % self.outgoing_q.qsize())
if self.outgoing_queue_lock:
self.outgoing_queue_lock.release()
self.my_local_logger.debug(
"Groomer released the consumer_queue_lock. Queue size is now:%d" % self.outgoing_q.qsize())
self.my_local_logger.info('Publish message queued, lock released.')
def groom_outages(self):
"""
This method grooms the outages by looking at the internal shared queue and pulling off the items that are
ready to be processed. The shared queue is passed between processes contains the cell data along with
processing state for each cell.
"""
#######################################################
# This is the general flow for the groom process
# When the queue is hit then it will have the start and end times along with the various parameters
# needed for the outage event calculation.
# When the queue item comes in then these steps happen.
#
# h) temporal filter : a string that represents time domain filter coefficients.
# The string will be of this form:
# "[1,0; .8,24; .3, 60]"
# "[w0,t0; w1,t1; w2, t2; ...]" were w0 is the weight (typically between 0 and 1)
# and t0 is the historical time
# (in minutes) from the event. In this example the following rules are used:
# At the event time, the alarm will be weighted with 1, 24 minutes before the event the alarm
# will be weighted by .8, 60 minutes before the event the alarm will be weighted by 0.3.
# For events that happen between the time weights a linear interpolation will be used.
# i) use reputation (flag) : a flag that says whether to use the reputation of the ONTs for voting
self.start_time = time.time()
self.my_local_logger.debug("GROOMING NOW")
# lat = 41.2693778
# lon = -73.8773389
# radius = 1.0 # config.START_RADIUS # = 0.12
# #################################################
# STEP 1 Pull items off the queue.
# self.pull_q_groom_command()
self.groomer_state = "1:GROOM"
groom_queue_len = len(self.local_q)
if groom_queue_len == 0:
self.my
| 0 |
5109eb2ffd5d01484a8cb1b7fd588d7214116878
|
Python
|
_local_logger.debug("NOTHING IN LOCAL QUEUE, returning")
self.groomer_state = "1.0:GROOM_RETURN_EARLY"
return
self.my_local_logger.debug("------------------ processing all %d items in the local_q" % groom_queue_len)
for _ in range(groom_queue_len):
# STEP 1) Pull items off the queue. The queue will consist of:
# a) time : in in microseconds that is desired for calculating the outage
# b) lat : latitude in decimal degrees
# c) lon : longitude in decimal degrees
# d) circuitID : circuit ID filter to be used for identification of a
# specific circuit within the area of interest
# e) assetID : asset ID filter to be used within the area of interest
# f) number of votes : number of votes to be used for qualifying the outage
# g) spatial filter : a string that represents radial filter coefficients. This is a string of the form:
# "[1,0; .2,.2; .3,.01]"
# "[w0,d0; w1,d1; w3,d3; ... ]" where w0 is the weight (typically 0 to 1)
# and d0 is the distance in miles or
# whatever the units are set to in the config file.
# The distance is the distance along a line that runs through the asset lat/lon and is parallel to the
# nearest upstream circuit segment. The ONT distance is projected to this circuit line and is filtered
# by the same spatial filter coefficients.
# In addition to the spatial filter the ONTs are weighted by their reputation
# (if the flag is set) which is
# calculated by an internally learned algorithm.
self.my_local_logger.debug(" Grooming local_q, size = %d" % len(self.local_q))
top_of_q_data = copy.copy(self.local_q.popleft()) # was popleft
self.groomer_state = "1.1:GROOM_POP_QUEUE"
self.my_local_logger.info("Got a local queue item.")
if "ttl" in top_of_q_data.keys():
ttl = top_of_q_data["ttl"]
else:
ttl = self.ttl
if top_of_q_data["payload"]['radius'] != self.working_radius:
self.resolution = compute_resolution(top_of_q_data["payload"]["radius"])
this_cell_guid, this_timestamp_guid = self.compute_cell_guid(top_of_q_data["payload"], self.resolution)
keys = self.get_shared_data('cell_collection_dict_keys')
collection_set = self.get_shared_data('cell_collection_set')
##################################################
# STEP 2) Look at the GUID generator for the lat and lon and see if the shared
# memory contains a cell structure for this item.
if this_cell_guid in keys: # my_shared_data['cell_collection_dict'].keys():
# 2.1) If it does contain the GUID then determine the state of that cell.
# 2.2) If the time stamp GUID of this cell GUID is within the resolution outage
# machine then continue with step 4.
self.groomer_state = "1.2:GROOM_FOUND_SHARED_DATA"
self.my_local_logger.debug("This cell is already in shared memory, "
"and is fully populated checking using a copy of it")
cell = self.get_shared_data('cell_collection_dict', this_cell_guid)
self.my_local_logger.debug("EXISTS: %s[%f,%f]TTL=%d" %
(this_cell_guid, cell["lat_lon"][0], cell["lat_lon"][1], cell["ttl"]))
else: # 2.3) If it does not contain the GUID or the time stamp GUID does not match then go to step 3.
# STEP 3) Query the API and find all utility assets within the region of interest
# (defined by a config parameter as the starting zoom level in miles)
# These will include house, transformers, poles, wires and so on.
# The first 2 letters of the assetID will be the item type. Save this cell to the shared memory set
# From this collection of assets create a SET of items in a shared queue that
# holds these items until so that other processes don't work on these items at the same time.
# The items will be filtered by assetID (item 1e) and circuitID (item 1d) if these fields are filled in.
cell = self.build_new_cell(this_cell_guid, top_of_q_data["payload"], ttl)
self.save_cell_in_shared_mem(this_cell_guid, cell)
self.my_local_logger.debug("CREATE: %s[%f,%f]TTL=%d" %
(this_cell_guid, cell["lat_lon"][0], cell["lat_lon"][1], ttl))
self.groomer_state = "1.3:GROOM_BUILD_NEW_CELLS"
# self.plot_assets()
# At this point the cell has been created and tested to be sure that its the one we want.
# Now examine the neighboring cells from this cells collection:
# STEP 4) Using the result of step 3 the cell is ready to be processed.
# 4.1) The next step is to look at each of the 6 neighboring cells.
# This is done by examining the 6 cells and determining their state.
# 4.1.1) Check the TTL count of this cell. If the TTL is zero continue to the next cell
# in the incoming Queue.
self.groomer_state = "1.4:GROOM_PROPAGATE_CELL"
if cell['ttl'] != 0:
for i, items in enumerate(cell['neighbors']): # the 6 nearest neighbors
this_neighbor_cell = items[0]
angle = items[1]
# The six neighbor cells are initially set to zero
# this_cell = {'neighbors': [["",0], ["",60], ["",120], ["",180], ["",240],["",300]],
# 'assets': {},
# 'onts': {},
# 'circuits': {},
# 'state': 'create',
# 'lat_lon': [lat, lon],
# 'radius': radius,
# 'groom_time': groom_time
# }
distance = 2 * top_of_q_data["payload"]["radius"]
if not this_neighbor_cell:
# We need to copy each of the neighbor cells to make sure we get a unique data structure
neighbor_cell_message = copy.copy(top_of_q_data)
self.my_local_logger.debug("%s neighbor[%d] is empty, [%f][%f], filling it now" %
(this_cell_guid, i, cell["lat_lon"][0], cell["lat_lon"][1]))
new_lat, new_lon = move_to_lat_lon(cell["lat_lon"][0], cell["lat_lon"][1], distance, angle)
# jump out of the loop if the cell is outside the region of interest
company_name = top_of_q_data['payload']['company']
if company_name not in self.utility_region.keys():
self.my_local_logger.error("Skipping cell rebroadcast "
"because company_name='%s' is not in utility_region." %
company_name)
self.groomer_state = "1.5.0:GROOM_ABORT_PROPAGATE"
continue
if (new_lat < self.utility_region[company_name]['min_latitude']) or \
(new_lat > self.utility_region[company_name]['max_latitude']) or \
(new_lon > self.utility_region[company_name]['max_longitude']) or \
(new_lon < self.utility_region[company_name]['min_longitude']):
# Here is where the outage time can be advanced by 2 weeks and run again.
if not g_config.IS_DEPLOYED:
print "Skipping neighbor cell rebroadcast at " \
"lat = %f, lon = %f because outside utility region." % \
(new_lat, new_lon)
self.my_local_logger.info("Skipping neighbor cell rebroadcast at "
"lat = %f, lon = %f because outside utility region." %
(new_lat, new_lon))
self.groomer_state = "1.5.1:GROOM_ABORT_PROPAGATE"
continue
neighbor_cell_message["payload"]["longitude"] = new_lon
neighbor_cell_message["payload"]["latitude"] = new_lat
new_cell_guid, new_timestamp_guid = self.compute_cell_guid(neighbor_cell_message["payload"],
self.resolution)
if new_cell_guid not in collection_set:
# STEP 5) Queue up a grooming process for neighboring cells that
# allows another process to pick up the outage calculation for the rest of the circuit.
# The neighboring cell is defined by outage location +/- 1 one patch area of
# interest in 6 hexagonal directions. This will create a small overlap on the cell corners.
self.groomer_state = "1.5.1:GROOM_QUEUE_NEIGHBOR"
self.my_local_logger.debug("queue length X = %d" % len(self.local_q))
self.mark_cell_in_shared_memory(new_cell_guid)
if cell['ttl'] == -1:
# If the TTL count is -1 then this is a full propagation list so this causes a
# post (publish) of a new query. Then continue with the next cell.
neighbor_cell_message["ttl"] = -1
else:
# Decrease the TTL count and post (publish) a new query.
# Then continue with the next cell.
neighbor_cell_message["ttl"] = cell['ttl'] - 1
self.my_local_logger.debug(" POST: %s[%f,%f]TTL=%d->%s[%f,%f]TTL=%d(%d)" %
(this_cell_guid, cell["lat_lon"][0], cell["lat_lon"][1], ttl,
new_cell_guid, new_lat, new_lon, neighbor_cell_message["ttl"],
angle))
########################
# This is the work around to just post the message back to the local_q instead of sending it
# out to the rabbit bus for parallel processing
####################################
# BURNED BY PYTHON
####################################
# The queue append does not copy the data, instead it just posts a pointer to the data
# self.local_q.append(copy.deepcopy(neighbor_cell_message))
# self.my_local_logger.debug("gueue length Y = %d" % len(self.local_q)
self.queue_to_publish(copy.deepcopy(neighbor_cell_message))
else:
self.groomer_state = "1.5.2:GROOM_LINK_NEIGHBOR"
# time.sleep(1)
self.my_local_logger.debug("Stitching %s's neighbor[%d]@[%f][%f] to this cell: %s" %
(this_cell_guid, i, cell["lat_lon"][0], cell["lat_lon"][1],
new_cell_guid))
self.my_local_logger.debug("SHARED: %s[%f,%f]TTL=%d->%s[%f,%f]TTL=%d (%d)" %
(this_cell_guid, cell["lat_lon"][0], cell["lat_lon"][1], ttl,
new_cell_guid, new_lat, new_lon, cell['ttl'], angle))
# If the cell is already in shared memory then just connect the cells neighbors
cell['neighbors'][i] = [new_cell_guid, angle]
self.save_cell_in_shared_mem(this_cell_guid, cell)
# STEP 6) OUTAGE CALCULATION
# at this point the outage region is contained within one cell.
# This is the process of grooming the outage. The data is ready to be used for calculating the outage.
# The filter algorithm was given above.
# 6.1) First the temporal filter is applied to the assets in the cell
self.groomer_state = "1.6:GROOM_COMPUTE_OUTAGE"
t_cell = self.temporal_filter(cell)
self.save_cell_in_shared_mem(this_cell_guid, t_cell)
# 6.2) Second the spatial filter is applied to each assets in the cell
s_cell = self.spatial_filter(t_cell)
self.save_cell_in_shared_mem(this_cell_guid, s_cell)
# 6.3) Once the filtered data is ready then the vote is applied to each ONT and the final vote is computed.
v_cell = self.vote_on_assets(s_cell,
top_of_q_data['payload']['temporal'],
top_of_q_data['payload']['spatial'],
top_of_q_data['payload']['votes'])
self.save_cell_in_shared_mem(this_cell_guid, v_cell)
# and the results is written back to the outage API.
self.my_local_logger.info("Calling post_outage_on_asset.")
self.my_local_logger.info("Posting this payload: %s" % json.dumps(top_of_q_data["payload"]))
self.post_outage_on_asset(v_cell, top_of_q_data["payload"])
self.end_time = time.time()
elapsed_process_time = fmax(self.end_time - self.start_time, .001)
self.groomer_state = "0:IDLE"
self.groom_run_state = "0:IDLE"
self.my_local_logger.info("Done. Elapsed time %f sec." % elapsed_process_time)
@staticmethod
def build_groom_payload(this_date, company=None, trigger_time=0, lat=0, lon=0, ttl=0):
"""
:param this_date: The date that the groom operation is to be examined
:param company: The utility company name associated with this alarm (if any)
:param trigger_time: The time of the alarm.
:param lat: Latitude of the alarm
:param lon: Longitude of the alarm
:param ttl: Time to live (set to 2 to limit the range of area to examine)
:return: The payload for the groom queue (or None if there is no utility)
Note that the first company is returned. There may be cases where the utilities may overlap. There are better
test methods for determining whether a point is in a region or not.
"""
this_payload = None
if company is None:
for this_company in g_config.UTILITY_REGION:
#:TODO: Replace with a better test method. See http://alienryderflex.com/polygon/
if g_config.UTILITY_REGION[this_company]['min_latitude'] < lat < \
g_config.UTILITY_REGION[this_company]['max_latitude'] and \
g_config.UTILITY_REGION[this_company]['min_longitude'] < lon < \
g_config.UTILITY_REGION[this_company]['max_longitude']:
company = this_company
break
if company is not None:
this_payload = {"dateTime": this_date,
"payload": {"company": company,
"outageTime": trigger_time,
"longitude": lon,
"latitude": lat,
"circuitID": "",
"assetID": "",
"votes": 0,
"spatial": '{"r":[1,1]}',
"temporal": "[1,0; .8,24; .3, 60]",
"reputationEnabled": True,
"zoomT": 1,
"zoomR": 1,
"radius": 0.12,
"units": "MI"
},
"messageType": "Save",
"ttl": ttl
}
return this_payload
@staticmethod
def build_payload(this_date, this_company, this_trigger_time, this_lat, this_lon, ttl):
this_payload = {"dateTime": this_date,
"payload": {"company": this_company,
"outageTime": this_trigger_time,
"longitude": this_lon,
"latitude": this_lat,
"circuitID": "",
"assetID": "",
"votes": 0,
"spatial": '{"r":[1,1]}',
"temporal": "[1,0; .8,24; .3, 60]",
"reputationEnabled": True,
"zoomT": 1,
"zoomR": 1,
"radius": 0.12,
"units": "MI"
},
"messageType": "Save",
"ttl": ttl
}
return this_payload
def utility_groom(self, utility_name="ALL", location=None, ttl=g_config.TTL_MAX):
"""
Triggers a Utility wide grooming process by setting up a ttl of -1 and injecting it into the Rabbit MQ bus.
When called, the outage test location is calculated by starting in the center of the geographic location
using the current time for outage detection.
All utilities in the utility dictionary will be groomed when this method is called.
:param: utility_name: the utility to groom or ALL for all
:param: location: is the groom location which will be the starting point of the groom process. If the value is
passed in and is not none then the groom will occur within a TTL_MAX region of this location
:return:
"""
# TODO: The best approach here is to trigger the outage groom at the center of the last alarm.
# trigger_time = arrow.get("2015-01-09T19:42:33.689-0400").timestamp*1000
trigger_date = arrow.utcnow().to('US/Eastern').format('YYYY-MM-DDTHH:mm:ss.SSSZ')
trigger_time = arrow.get(trigger_date).timestamp*1000
if location is None:
ttl = g_config.TTL_RANDOM_GROOM
if utility_name in self.utility_region.keys():
r = random.random()
this_lat = r * (self.utility_region[utility_name]['max_latitude'] -
self.utility_region[utility_name]['min_latitude']) + \
self.utility_region[utility_name]['min_latitude']
r = random.random()
this_lon = r * (self.utility_region[utility_name]['max_longitude'] -
self.utility_region[utility_name]['min_longitude']) + \
self.utility_region[utility_name]['min_longitude']
this_payload = self.build_groom_payload(trigger_date, utility_name, trigger_time, this_lat, this_lon, ttl)
self.my_local_logger.info("SEEDED %s" % this_payload)
if this_payload is not None:
self.queue_to_publish(this_payload)
else:
for company in self.utility_region.keys():
r = random.random()
this_lat = r * (self.utility_region[company]['max_latitude'] -
self.utility_region[company]['min_latitude']) + \
self.utility_region[company]['min_latitude']
r = random.random()
this_lon = r * (self.utility_region[company]['max_longitude'] -
self.utility_region[company]['min_longitude']) + \
self.utility_region[company]['min_longitude']
this_payload = self.build_groom_payload(trigger_date, company, trigger_time, this_lat, this_lon, ttl)
self.my_local_logger.info("SEEDED %s" % this_payload)
if this_payload is not None:
self.queue_to_publish(this_payload)
else:
if utility_name in self.utility_region.keys():
this_lat = location["lat"]
this_lon = location["lon"]
this_payload = self.build_groom_payload(trigger_date, utility_name, trigger_time, this_lat, this_lon,
ttl)
self.my_local_logger.info("SEEDED %s" % this_payload)
if this_payload is not None:
self.queue_to_publish(this_payload)
else:
for company in self.utility_region.keys():
this_lat = location["lat"]
this_lon = location["lon"]
this_payload = self.build_groom_payload(trigger_date, company, trigger_time, this_lat, this_lon,
ttl)
self.my_local_logger.info("SEEDED %s" % this_payload)
if this_payload is not None:
self.queue_to_publish(this_payload)
def run(self):
# self.my_local_logger.push
self.run_start_time = time.time()
report_time = self.run_start_time + g_config.KEEP_ALIVE_INTERVAL
self.my_local_logger.debug("Started at %f" % self.run_start_time) # "backend_msg_handler.run")
while self.run_enable:
# Also add a timeout so that if the queue isn't full it processes alarms anyway.
elapsed_time = time.time() - self.run_start_time
if time.time() > report_time:
self.my_local_logger.info("|OK dT|%10.3f|(s)|%10.3f|e|%10.3f|elp|%10.3f|state|%s|groomer state|%s" %
(self.end_time - self.start_time,
self.start_time,
self.end_time,
elapsed_time,
self.groom_run_state,
self.groomer_state)
)
report_time = time.time() + g_config.KEEP_ALIVE_INTERVAL
self.idle_count += 1
self.groom_run_state = "1:REPORT"
queue_depth = len(self.local_q)
groom_now = False
if queue_depth > g_config.QUEUE_SIZE_BLOCK:
groom_now = True
self.my_local_logger.info("Analyzing after %f sec because queue size is %d" %
(elapsed_time, queue_depth)) # , "backend_msg_handler.run")
elif queue_depth > 0 and (elapsed_time > g_config.MESSAGE_EXPIRATION_SEC):
groom_now = True
self.my_local_logger.info("Analyzing after %f sec because time expired." %
elapsed_time) # , "backend_msg_handler.run")
# when the backend message queue is QUEUE_SIZE_BLOCK then block this thread and process the queue
if groom_now:
self.groom_run_state = "2:GROOMING"
self.groom_outages()
# need to acquire a lock when pulling from the queue
if not self.incoming_rabbit_mq.empty():
self.idle_count = 0
self.my_local_logger.debug("Message received, calling the process_incoming_queue now: %f" %
elapsed_time)
self.groom_run_state = "3:PROCESS_QUEUE"
self.process_incoming_rabbit_mq()
# set the run_start_time to begin timing at the time that the last message was queued
self.run_start_time = time.time()
def join(self, timeout=None):
self.run_enable = False
self.my_local_logger.info("Stopping at %f" % (time.time()))
if __name__ == "__main__":
from g_pika_rabbit_bridge import MqConsumer, MqPublisher
import logging.handlers
import datetime
BASE_DIR = 'C:\\repo\\personal\\myDocs\\Aptect\\Verizon\\Workproduct\\EON-IOT\\groomer'
LOG_FORMAT = '%(asctime)s %(name)-12s %(levelname)-8s %(funcName)s %(lineno)5d :%(message)s'
########################
# LOG FILE SETUP
########################
unique_str = datetime.datetime.now().isoformat().replace(':', '_').replace('.', '_').replace('-', '_')
try:
os.mkdir(BASE_DIR + os.sep + g_config.LOG_DIR)
except OSError or WindowsError:
print "Log directory exists"
try:
os.mkdir(BASE_DIR + os.sep + g_config.PICKLES)
except OSError or WindowsError:
print "Pickles directory exists"
LOG_FILENAME = BASE_DIR + os.sep + g_config.LOG_DIR + os.sep + 'top_' + unique_str + '.log'
# Add the log message handler to the logger
handler = logging.handlers.RotatingFileHandler(LOG_FILENAME, maxBytes=5000000, backupCount=50)
handler.setFormatter(logging.Formatter(LOG_FORMAT, datefmt='%m-%d %H:%M:%S'))
handler.setLevel(logging.DEBUG)
# Add this handler to the root logger
logging.getLogger('').addHandler(handler)
shared_data_top = {'thread_name_pool': set(), # This contains a set of thread names that are sharing this data
'master_ont_set': set(), # This contains all the ONTs that have been seen for this groom cycle
'master_ont_dict': {}, # This contains the dictionary of ONTs that have been seen
"cell_collection_set": set(), # This contains all the cell guids that have been seen so far
"cell_collection_dict": {}} # This is a dictionary of the cell quids that have been seen
# and have been filled in with cell data
shared_data_lock_top = threading.Lock()
rabbit_message_queue = Queue.Queue()
rabbit_queue_lock = threading.Lock()
# EON_MQ_IP = '10.123.0.20'
EON_MQ_IP = 'localhost'
EON_MQ_UN = 'manager' # 'manager' #
EON_MQ_PW = 'e0n36o' # 'manager' #
EON_MQ_PORT = 5672
EON_MQ_BASE = '/#/queues'
EON_MQ_VHOST = 'eon360'
EON_MQ_QUEUE = 'collection-notification'
EON_GROOM_QUEUE = 'grooming-notification'
connection_string = 'amqp://' + EON_MQ_UN + ':' + EON_MQ_PW + '@' + EON_MQ_IP + ':' + \
('%d' % EON_MQ_PORT) + '/' + EON_MQ_VHOST
consumer = MqConsumer(connection_string, rabbit_message_queue, rabbit_queue_lock, EON_GROOM_QUEUE)
# # Can probably use the next line to look for a failed pika bridge.
# It will be None if the connection is not available.
# consumer.__dict__['_connection']
publish_message_queue = Queue.Queue()
publish_queue_lock = threading.Lock()
publisher = MqPublisher(connection_string, publish_message_queue, publish_queue_lock, EON_GROOM_QUEUE)
groomer = GroomingMessageHandler(incoming_q=rabbit_message_queue,
incoming_queue_lock=rabbit_queue_lock,
outgoing_q=publish_message_queue,
outgoing_queue_lock=publish_queue_lock,
module_instance_name='Handler01',
shared_data=shared_data_top,
shared_data_lock=shared_data_lock_top)
groomer.run_enable = True
groomer.start()
consumer.start()
publisher.start()
run_mode = True
try:
# This is Corlandt NY
# This is what a groom payload should look like:
# The spec version 1.1 shows this format
# {
# “queryGuid": "dffdd6e5-79df-4da7-9a6d-84a8d3ead772", A unique ID that is created
# when the query button is clicked.
# “type”: "Query", Message type that is to be processed
# Type of Action can be one of:
# Save: Save button clicked on the GUI
# Test: Query button clicked when the mode selection is Test
# Query: Query button clicked when the mode selection is Query (default)
# Clear: User browses away from page
#
# "payload": { The payload of the data from the web page form
# "company": "CEDRAFT", The company name being used on this web page
# "outageTime": 1414011303715, The datetime from the web page form
# "latitude": 41.07597, Latitude (optional)
# "longitude": -74.011081, Longitude (optional)
# "circuitID",: "", Circuit ID (optional), as known by the utility
# "assetID": "", Asset ID (optional), as known by the utility (transformer)
# "votes": 3, Votes (optional) to use for outage 1 to 10
# "spatial": "[1,0; .2,.2; .3,.01]", A spatial vector string (optional) consisting of weight,
# distance pairs
# "temporal":"[1,0; .8,24; .3, 60]", A temporal vector string (optional) consisting of weight,
# time pairs
# "reputationEnabled": true, The state of the reputation check box. If checked then
# this value is true otherwise false
# "zoomT": 1, The current zoom level of the time in the display plot
# "zoomR": 1, The current zoom level of the radius in the display plot
# “radius”: 1 The radius to use for the starting zoom level
# “units” : "MI" The units of the radius. (MI or KM)
# }
# }
# This will be the outage time of the test (January 9th, 2015)
# The types of messages implemented are Query, Save
lat = 41.2693778
lon = -73.8773389
radius = 1.0 # config.START_RADIUS # = 0.12
outage_time = arrow.get("2015-01-09T19:42:33.689-0400").timestamp*1000
today = arrow.utcnow().to('US/Eastern').format('YYYY-MM-DDTHH:mm:ss.SSSZ')
groom_payload = {"queryGuid": "4a1b34bc-9739-4b40-85e1-8f464fe98211",
"dateTime": today,
"payload": {
"company": "CEDRAFT",
"outageTime": outage_time,
"longitude": lon,
"latitude": lat,
"circuitID": "",
"assetID": "",
"votes": 3,
"spatial": '{"r":[1,1]}',
"temporal": "[1,0; .8,24; .3, 60]",
"reputationEnabled": True,
"zoomT": 1,
"zoomR": 1,
"radius": 0.12,
"units": "MI"
},
"messageType": "Save"
}
publisher.message = groom_payload
while True:
pass
groomer.join()
consumer.join()
publisher.join()
except KeyboardInterrupt:
groomer.join()
# consumer.join()
| 1 |
7ed966af4b81c43fa6980a449a421d947ebad60b
|
Python
|
# Copyright Qwilt, 2012
#
# The code contained in this file may not be used by any other entities without explicit written permission from Qwilt.
#
# Author: naamas
from a.infra.misc.enum_with_value import EnumWithValue
from a.infra.basic.return_codes import ReturnCodes
from a.infra.misc.init_guard import InitGuard
from a.sys.confd.pyconfdlib.tag_values import TagValues
from a.sys.confd.pyconfdlib.value import Value
from a.sys.confd.pyconfdlib.key_path import KeyPath
from system_defaults_maapi_base_gen import SystemDefaultsMaapiBase
from a.api.yang.modules.tech.common.qwilt_tech_interfaces.tech.interfaces.interface.system_defaults.content.content_maapi_gen import BlinkyContentMaapi
from a.api.yang.modules.tech.common.qwilt_tech_interfaces.tech.interfaces.interface.system_defaults.connectivity_check.connectivity_check_maapi_gen import BlinkyConnectivityCheckMaapi
from a.api.yang.modules.tech.common.qwilt_tech_interfaces.tech.interfaces.interface.system_defaults.management.management_maapi_gen import BlinkyManagementMaapi
from a.api.yang.modules.tech.common.qwilt_tech_interfaces.tech.interfaces.interface.system_defaults.link.link_maapi_gen import BlinkyLinkMaapi
from a.api.yang.modules.tech.common.qwilt_tech_interfaces.tech.interfaces.interface.system_defaults.device.device_maapi_gen import BlinkyDeviceMaapi
class BlinkySystemDefaultsMaapi(SystemDefaultsMaapiBase):
def __init__ (self, logger):
self.myInitGuard = InitGuard()
self._log=logger.createLogger("sys-blinky-oper-example","blinky-maapi-systemDefaults")
self.domain = None
self.contentObj = None
self.connectivityCheckObj = None
self.managementObj = None
self.linkObj = None
self.deviceObj = None
self.configurationDelayRequested = False
self.configurationDelay = None
self.configurationDelaySet = False
self.muteReportingRequested = False
self.muteReporting = None
self.muteReportingSet = False
self.sendGratuitousArpRequested = False
self.sendGratuitousArp = None
self.sendGratuitousArpSet = False
self.shutdownRequested = False
self.shutdown = None
self.shutdownSet = False
self.techModeRequested = False
self.techMode = None
self.techModeSet = False
def init (self, domain):
self.myInitGuard.crashIfInitDone()
for logFunc in self._log('init').debug3Func(): logFunc('called. domain=%s', domain)
self.domain = domain
self.myInitGuard.initDone()
def requestConfigAndOper (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('request-config-and-oper').debug3Func(): logFunc('called, PARAMS')
self.requestConfigurationDelay(True)
self.requestMuteReporting(True)
self.requestSendGratuitousArp(True)
self.requestShutdown(True)
self.requestTechMode(True)
if not self.contentObj:
self.contentObj = self.newContent()
self.contentObj.requestConfigAndOper()
if not self.connectivityCheckObj:
self.connectivityCheckObj = self.newConnectivityCheck()
self.connectivityCheckObj.requestConfigAndOper()
if not self.managementObj:
self.managementObj = self.newManagement()
self.managementObj.requestConfigAndOper()
if not self.linkObj:
self.linkObj = self.newLink()
self.linkObj.requestConfigAndOper()
if not self.deviceObj:
self.deviceObj = self.newDevice()
self.deviceObj.requestConfigAndOper()
def requestConfig (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('request-config').debug3Func(): logFunc('called, PARAMS')
self.requestConfigurationDelay(True)
self.requestMuteReporting(True)
self.requestSendGratuitousArp(True)
self.requestShutdown(True)
self.requestTechMode(True)
if not self.contentObj:
self.contentObj = self.newContent()
self.contentObj.requestConfig()
if not self.connectivityCheckObj:
self.connectivityCheckObj = self.newConnectivityCheck()
self.connectivityCheckObj.requestConfig()
if not self.managementObj:
self.managementObj = self.newManagement()
self.managementObj.requestConfig()
if not self.linkObj:
self.linkObj = self.newLink()
self.linkObj.requestConfig()
if not self.deviceObj:
self.deviceObj = self.newDevice()
self.deviceObj.requestConfig()
def requestOper (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('request-oper').debug3Func(): logFunc('called, PARAMS')
self.requestConfigurationDelay(False)
self.requestMuteReporting(False)
self.requestSendGratuitousArp(False)
self.requestShutdown(False)
self.requestTechMode(False)
if not self.contentObj:
self.contentObj = self.newContent()
self.contentObj.requestOper()
if not self.connectivityCheckObj:
self.connectivityCheckObj = self.newConnectivityCheck()
self.connectivityCheckObj.requestOper()
if not self.managementObj:
self.managementObj = self.newManagement()
self.managementObj.requestOper()
if not self.linkObj:
self.linkObj = self.newLink()
self.linkObj.requestOper()
if not self.deviceObj:
self.deviceObj = self.newDevice()
self.deviceObj.requestOper()
def clearAllRequested (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('clear-all-requested').debug3Func(): logFunc('called, PARAMS')
self.requestConfigurationDelay(False)
self.requestMuteReporting(False)
self.requestSendGratuitousArp(False)
self.requestShutdown(False)
self.requestTechMode(False)
if not self.contentObj:
self.contentObj = self.newContent()
self.contentObj.clearAllRequested()
if not self.connectivityCheckObj:
self.connectivityCheckObj = self.newConnectivityCheck()
self.connectivityCheckObj.clearAllRequested()
if not self.managementObj:
self.managementObj = self.newManagement()
self.managementObj.clearAllRequested()
if not self.linkObj:
self.linkObj = self.newLink()
self.linkObj.clearAllRequested()
if not self.deviceObj:
self.deviceObj = self.newDevice()
self.deviceObj.clearAllRequested()
def clearAllSet (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('clear-all-set').debug3Func(): logFunc('called, PARAMS')
self.setConfigurationDelay(None)
self.configurationDelaySet = False
self.setMuteReporting(None)
self.muteReportingSet = False
self.setSendGratuitousArp(None)
self.sendGratuitousArpSet = False
self.setShutdown(None)
self.shutdownSet = False
self.setTechMode(None)
self.techModeSet = False
if self.contentObj:
self.contentObj.clearAllSet()
if self.connectivityCheckObj:
self.connectivityCheckObj.clearAllSet()
if self.managementObj:
self.managementObj.clearAllSet()
if self.linkObj:
self.linkObj.clearAllSet()
if self.deviceObj:
self.deviceObj.clearAllSet()
def write (self
, interface
, trxContext=None
):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('write').debug3Func(): logFunc('called, PARAMS')
return self._internalWrite(interface, trxContext)
def read (self
, interface
, trxContext=None):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('read').debug3Func(): logFunc('called, PARAMS')
return self._internalRead(interface,
False,
trxContext)
def readAllOrFail (self
, interface
, trxContext=None):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('read-all-or-fail').debug3Func(): logFunc('called, PARAMS')
return self._internalRead(interface,
True,
trxContext)
def newContent (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('new-content').debug3Func(): logFunc('called.')
content = BlinkyContentMaapi(self._log)
content.init(self.domain)
return content
def setContentObj (self, obj):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('set-content').debug3Func(): logFunc('called. obj=%s', obj)
self.contentObj = obj
def getContentObj (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('get-content').debug3Func(): logFunc('called. self.contentObj=%s', self.contentObj)
return self.contentObj
def hasContent (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('has-content').debug3Func(): logFunc('called. self.contentObj=%s', self.contentObj)
if self.contentObj:
return True
return False
def newConnectivityCheck (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('new-connectivitycheck').debug3Func(): logFunc('called.')
connectivityCheck = BlinkyConnectivityCheckMaapi(self._log)
connectivityCheck.init(self.domain)
return connectivityCheck
def setConnectivityCheckObj (self, obj):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('set-connectivitycheck').debug3Func(): logFunc('called. obj=%s', obj)
self.connectivityCheckObj = obj
def getConnectivityCheckObj (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('get-connectivitycheck').debug3Func(): logFunc('called. self.connectivityCheckObj=%s', self.connectivityCheckObj)
return self.connectivityCheckObj
def hasConnectivityCheck (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('has-connectivitycheck').debug3Func(): logFunc('called. self.connectivityCheckObj=%s', self.connectivityCheckObj)
if self.connectivityCheckObj:
return True
return False
def newManagement (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('new-management').debug3Func(): logFunc('called.')
management = BlinkyManagementMaapi(self._log)
management.init(self.domain)
return management
def setManagementObj (self, obj):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('set-management').debug3Func(): logFunc('called. obj=%s', obj)
self.managementObj = obj
def getManagementObj (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('get-management').debug3Func(): logFunc('called. self.managementObj=%s', self.managementObj)
return self.managementObj
def hasManagement (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('has-management').debug3Func(): logFunc('called. self.managementObj=%s', self.managementObj)
if self.managementObj:
return True
return False
def newLink (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('new-link').debug3Func(): logFunc('called.')
link = BlinkyLinkMaapi(self._log)
link.init(self.domain)
return link
def setLinkObj (self, obj):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('set-link').debug3Func(): logFunc('called. obj=%s', obj)
self.linkObj = obj
def getLinkObj (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('get-link').debug3Func(): logFunc('called. self.linkObj=%s', self.linkObj)
return self.linkObj
def hasLink (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('has-link').debug3Func(): logFunc('called. self.linkObj=%s', self.linkObj)
if self.linkObj:
return True
return False
def newDevice (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('new-device').debug3Func(): logFunc('called.')
device = BlinkyDeviceMaapi(self._log)
device.init(self.domain)
return device
def setDeviceObj (self, obj):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('set-device').debug3Func(): logFunc('called. obj=%s', obj)
self.deviceObj = obj
def getDeviceObj (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('get-device').debug3Func(): logFunc('called. self.deviceObj=%s', self.deviceObj)
return self.deviceObj
def hasDevice (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('has-device').debug3Func(): logFunc('called. self.deviceObj=%s', self.deviceObj)
if self.deviceObj:
return True
return False
def requestConfigurationDelay (self, requested):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('request-configurationdelay').debug3Func(): logFunc('called. requested=%s', requested)
self.configurationDelayRequested = requested
self.configurationDelaySet = False
def isConfigurationDelayRequested (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('is-configurationdelay-requested').debug3Func(): logFunc('called. requested=%s', self.configurationDelayRequested)
return self.configurationDelayRequested
def getConfigurationDelay (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('get-configurationdelay').debug3Func(): logFunc('called. self.configurationDelaySet=%s, self.configurationDelay=%s', self.configurationDelaySet, self.configurationDelay)
if self.configurationDelaySet:
return self.configurationDelay
return None
def hasConfigurationDelay (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('has-configurationdelay').debug3Func(): logFunc('called. self.configurationDelaySet=%s, self.configurationDelay=%s', self.configurationDelaySet, self.configurationDelay)
if self.configurationDelaySet:
return True
return False
def setConfigurationDelay (self, configurationDelay):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('set-configurationdelay').debug3Func(): logFunc('called. configurationDelay=%s, old=%s', configurationDelay, self.configurationDelay)
self.configurationDelaySet = True
self.configurationDelay = configurationDelay
def requestMuteReporting (self, requested):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('request-mutereporting').debug3Func(): logFunc('called. requested=%s', requested)
self.muteReportingRequested = requested
self.muteReportingSet = False
def isMuteReportingRequested (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('is-mutereporting-requested').debug3Func(): logFunc('called. requested=%s', self.muteReportingRequested)
return self.muteReportingRequested
def getMuteReporting (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('get-mutereporting').debug3Func(): logFunc('called. self.muteReportingSet=%s, self.muteReporting=%s', self.muteReportingSet, self.muteReporting)
if self.muteReportingSet:
return self.muteReporting
return None
def hasMuteReporting (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('has-mutereporting').debug3Func(): logFunc('called. self.muteReportingSet=%s, self.muteReporting=%s', self.muteReportingSet, self.muteReporting)
if self.muteReportingSet:
return True
return False
def setMuteReporting (self, muteReporting):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('set-mutereporting').debug3Func(): logFunc('called. muteReporting=%s, old=%s', muteReporting, self.muteReporting)
self.muteReportingSet = True
self.muteReporting = muteReporting
def requestSendGratuitousArp (self, requested):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('request-sendgratuitousarp').debug3Func(): logFunc('called. requested=%s', requested)
self.sendGratuitousArpRequested = requested
self.sendGratuitousArpSet = False
def isSendGratuitousArpRequested (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('is-sendgratuitousarp-requested').debug3Func(): logFunc('called. requested=%s', self.sendGratuitousArpRequested)
return self.sendGratuitousArpRequested
def getSendGratuitousArp (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('get-sendgratuitousarp').debug3Func(): logFunc('called. self.sendGratuitousArpSet=%s, self.sendGratuitousArp=%s', self.sendGratuitousArpSet, self.sendGratuitousArp)
if self.sendGratuitousArpSet:
return self.sendGratuitousArp
return None
def hasSendGratuitousArp (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('has-sendgratuitousarp').debug3Func(): logFunc('called. self.sendGratuitousArpSet=%s, self.sendGratuitousArp=%s', self.sendGratuitousArpSet, self.sendGratuitousArp)
if self.sendGratuitousArpSet:
return True
return False
def setSendGratuitousArp (self, sendGratuitousArp):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('set-sendgratuitousarp').debug3Func(): logFunc('called. sendGratuitousArp=%s, old=%s', sendGratuitousArp, self.sendGratuitousArp)
self.sendGratuitousArpSet = True
self.sendGratuitousArp = sendGratuitousArp
def requestShutdown (self, requested):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('request-shutdown').debug3Func(): logFunc('called. requested=%s', requested)
self.shutdownRequested = requested
self.shutdownSet = False
def isShutdownRequested (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('is-shutdown-requested').debug3Func(): logFunc('called. requested=%s', self.shutdownRequested)
return self.shutdownRequested
def getShutdown (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('get-shutdown').debug3Func(): logFunc('called. self.shutdownSet=%s, self.shutdown=%s', self.shutdownSet, self.shutdown)
if self.shutdownSet:
return self.shutdown
return None
def hasShutdown (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('has-shutdown').debug3Func(): logFunc('called. self.shutdownSet=%s, self.shutdown=%s', self.shutdownSet, self.shutdown)
if self.shutdownSet:
return True
return False
def setShutdown (self, shutdown):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('set-shutdown').debug3Func(): logFunc('called. shutdown=%s, old=%s', shutdown, self.shutdown)
self.shutdownSet = True
self.shutdown = shutdown
def requestTechMode (self, requested):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('request-techmode').debug3Func(): logFunc('called. requested=%s', requested)
self.techModeRequested = requested
self.techModeSet = False
def isTechModeRequested (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('is-techmode-requested').debug3Func(): logFunc('called. requested=%s', self.techModeRequested)
return self.techModeRequested
def getTechMode (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('get-techmode').debug3Func(): logFunc('called. self.techModeSet=%s, self.techMode=%s', self.techModeSet, self.techMode)
if self.techModeSet:
return self.techMode
return None
def hasTechMode (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('has-techmode').debug3Func(): logFunc('called. self.techModeSet=%s, self.techMode=%s', self.techModeSet, self.techMode)
if self.techModeSet:
return True
return False
def setTechMode (self, techMode):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('set-techmode').debug3Func(): logFunc('called. techMode=%s, old=%s', techMode, self.techMode)
self.techModeSet = True
self.techMode = techMode
def _clearAllReadData (self):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('clear-all-read-data').debug3Func(): logFunc('called')
if self.contentObj:
self.contentObj._clearAllReadData()
if self.connectivityCheckObj:
self.connectivityCheckObj._clearAllReadData()
if self.managementObj:
self.managementObj._clearAllReadData()
if self.linkObj:
self.linkObj._clearAllReadData()
if self.deviceObj:
self.deviceObj._clearAllReadData()
self.configurationDelay = 0
self.configurationDelaySet = False
self.muteReporting = 0
self.muteReportingSet = False
self.sendGratuitousArp = 0
self.sendGratuitousArpSet = False
self.shutdown = 0
self.shutdownSet = False
self.techMode = 0
self.techModeSet = False
def _getSelfKeyPath (self, interface
, junkForTemplate):
for logFunc in self._log('get-self-key-path').debug3Func(): logFunc('called. PARAMS, junkForTemplate=%s', junkForTemplate)
keyPath = KeyPath()
xmlVal = Value()
xmlVal.setXmlTag(("system-defaults", "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces", "qt-if"))
keyPath.addKeyPathPrefix(xmlVal)
ancestorVal = Value()
ancestorVal.setString(interface);
keyPath.addKeyPathPrefix(ancestorVal)
xmlVal = Value()
xmlVal.setXmlTag(("interface", "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces", "qt-if"))
keyPath.addKeyPathPrefix(xmlVal)
xmlVal = Value()
xmlVal.setXmlTag(("interfaces", "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces", "qt-if"))
keyPath.addKeyPathPrefix(xmlVal)
xmlVal = Value()
xmlVal.setXmlTag(("tech", "http://qwilt.com/ns/yang/device/tech/qwilt-tech", "qt"))
keyPath.addKeyPathPrefix(xmlVal)
for logFunc in self._log('get-self-key-path-done').debug3Func(): logFunc('done. keyPath=%s. PARAMS', keyPath)
return keyPath
def _internalWrite (self,
interface,
trxContext):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('internal-write').debug3Func(): logFunc('called. PARAMS')
tagValueList = TagValues()
res = self._fillWriteTagValues(tagValueList)
if res != ReturnCodes.kOk:
for logFunc in self._log('write-fill-write-tag-value-failed').errorFunc(): logFunc('_fillWriteTagValues() failed. PARAMS')
return ReturnCodes.kGeneralError
itemsToDelete = []
res = self._collectItemsToDelete(interface,
itemsToDelete)
if res != ReturnCodes.kOk:
for logFunc in self._log('write-collect-items-to-delete-failed').errorFunc(): logFunc('_collectItemsToDelete() failed. PARAMS')
return ReturnCodes.kGeneralError
keyPath = self._getSelfKeyPath(interface,
None)
res = self.domain.writeMaapi(tagValueList, keyPath, trxContext, itemsToDelete)
if res != ReturnCodes.kOk:
for logFunc in self._log('write-domain-failed').errorFunc(): logFunc('domain.writeMaapi() failed. PARAMS')
return ReturnCodes.kGeneralError
for logFunc in self._log('internal-write-done').debug3Func(): logFunc('done. PARAMS')
return ReturnCodes.kOk
def _internalRead (self,
interface,
readAllOrFail,
trxContext):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('internal-read').debug3Func(): logFunc('called. PARAMS, readAllOrFail=%s', readAllOrFail)
if readAllOrFail:
self._clearAllReadData()
tagValueList = TagValues()
res = self._fillReadTagValues(tagValueList)
if res != ReturnCodes.kOk:
for logFunc in self._log('read-fill-read-tag-value-failed').errorFunc(): logFunc('_fillReadTagValues() failed. PARAMS')
return ReturnCodes.kGeneralError
keyPath = self._getSelfKeyPath(interface,
None)
res = self.domain.readMaapi(tagValueList, keyPath, trxContext)
if res != ReturnCodes.kOk:
for logFunc in self._log('read-domain-failed').errorFunc(): logFunc('domain.readMaapi() failed. PARAMS')
return ReturnCodes.kGeneralError
res = self._readTagValues(tagValueList, readAllOrFail)
if res != ReturnCodes.kOk:
for logFunc in self._log('read-read-tag-values-failed').errorFunc(): logFunc('_readTagValues() failed. PARAMS')
return ReturnCodes.kGeneralError
for logFunc in self._log('internal-read-done').debug3Func(): logFunc('done. PARAMS, readAllOrFail=%s', readAllOrFail)
return ReturnCodes.kOk
def _collectItemsToDelete (self,
interface,
itemsToDelete):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('collect-items-to-delete').debug3Func(): logFunc('called: itemsToDelete=%s. PARAMS', itemsToDelete)
if self.contentObj:
res = self.contentObj._collectItemsToDelete(interface,
itemsToDelete)
if res != ReturnCodes.kOk:
for logFunc in self._log('collect-items-to-delete-content-failed').errorFunc(): logFunc('contentObj._collectItemsToDelete() failed. PARAMS')
return ReturnCodes.kGeneralError
if self.connectivityCheckObj:
res = self.connectivityCheckObj._collectItemsToDelete(interface,
itemsToDelete)
if res != ReturnCodes.kOk:
for logFunc in self._log('collect-items-to-delete-connectivity-check-failed').errorFunc(): logFunc('connectivityCheckObj._collectItemsToDelete() failed. PARAMS')
return ReturnCodes.kGeneralError
if self.managementObj:
res = self.managementObj._collectItemsToDelete(interface,
itemsToDelete)
if res != ReturnCodes.kOk:
for logFunc in self._log('collect-items-to-delete-management-failed').errorFunc(): logFunc('managementObj._collectItemsToDelete() failed. PARAMS')
return ReturnCodes.kGeneralError
if self.linkObj:
res = self.linkObj._collectItemsToDelete(interface,
itemsToDelete)
if res != ReturnCodes.kOk:
for logFunc in self._log('collect-items-to-delete-link-failed').errorFunc(): logFunc('linkObj._collectItemsToDelete() failed. PARAMS')
return ReturnCodes.kGeneralError
if self.deviceObj:
res = self.deviceObj._collectItemsToDelete(interface,
itemsToDelete)
if res != ReturnCodes.kOk:
for logFunc in self._log('collect-items-to-delete-device-failed').errorFunc(): logFunc('deviceObj._collectItemsToDelete() failed. PARAMS')
return ReturnCodes.kGeneralError
for logFunc in self._log('collect-items-to-delete-done').debug3Func(): logFunc('done: itemsToDelete=%s. PARAMS', itemsToDelete)
return ReturnCodes.kOk
def _fillWriteTagValues (self, tagValueList):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('fill-write-tag-values').debug3Func(): logFunc('called: tagValueList=%s', tagValueList)
if self.hasConfigurationDelay():
valConfigurationDelay = Value()
if self.configurationDelay is not None:
valConfigurationDelay.setUint64(self.configurationDelay)
else:
valConfigurationDelay.setEmpty()
tagValueList.push(("configuration-delay", "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces"), valConfigurationDelay)
if self.hasMuteReporting():
valMuteReporting = Value()
if self.muteReporting is not None:
valMuteReporting.setBool(self.muteReporting)
else:
valMuteReporting.setEmpty()
tagValueList.push(("mute-reporting", "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces"), valMuteReporting)
if self.hasSendGratuitousArp():
valSendGratuitousArp = Value()
if self.sendGratuitousArp is not None:
valSendGratuitousArp.setBool(self.sendGratuitousArp)
else:
valSendGratuitousArp.setEmpty()
tagValueList.push(("send-gratuitous-arp", "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces"), valSendGratuitousArp)
if self.hasShutdown():
valShutdown = Value()
if self.shutdown is not None:
valShutdown.setBool(self.shutdown)
else:
valShutdown.setEmpty()
tagValueList.push(("shutdown", "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces"), valShutdown)
if self.hasTechMode():
valTechMode = Value()
if self.techMode is not None:
valTechMode.setBool(self.techMode)
else:
valTechMode.setEmpty()
tagValueList.push(("tech-mode", "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces"), valTechMode)
if self.contentObj:
valBegin = Value()
(tag, ns, prefix) = ("content" , "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces", "qt-if")
valBegin.setXmlBegin((tag, ns, prefix))
tagValueList.push((tag, ns), valBegin)
tagValueListLen = tagValueList.getLen()
res = self.contentObj._fillWriteTagValues(tagValueList)
if res != ReturnCodes.kOk:
for logFunc in self._log('fill-write-tag-values-content-failed').errorFunc(): logFunc('contentObj._fillWriteTagValues() failed. PARAMS')
return ReturnCodes.kGeneralError
if tagValueList.getLen() == tagValueListLen:
# descendant didn't add anything, no need to read it.
tagValueList.pop()
else:
valEnd = Value()
valEnd.setXmlEnd((tag, ns, prefix))
tagValueList.push((tag, ns), valEnd)
if self.connectivityCheckObj:
valBegin = Value()
(tag, ns, prefix) = ("connectivity-check" , "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces", "qt-if")
valBegin.setXmlBegin((tag, ns, prefix))
tagValueList.push((tag, ns), valBegin)
tagValueListLen = tagValueList.getLen()
res = self.connectivityCheckObj._fillWriteTagValues(tagValueList)
if res != ReturnCodes.kOk:
for logFunc in self._log('fill-write-tag-values-connectivity-check-failed').errorFunc(): logFunc('connectivityCheckObj._fillWriteTagValues() failed. PARAMS')
return ReturnCodes.kGeneralError
if tagValueList.getLen() == tagValueListLen:
# descendant didn't add anything, no need to read it.
tagValueList.pop()
else:
valEnd = Value()
valEnd.setXmlEnd((tag, ns, prefix))
tagValueList.push((tag, ns), valEnd)
if self.managementObj:
valBegin = Value()
(tag, ns, prefix) = ("management" , "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces", "qt-if")
valBegin.setXmlBegin((tag, ns, prefix))
tagValueList.push((tag, ns), valBegin)
tagValueListLen = tagValueList.getLen()
res = self.managementObj._fillWriteTagValues(tagValueList)
if res != ReturnCodes.kOk:
for logFunc in self._log('fill-write-tag-values-management-failed').errorFunc(): logFunc('managementObj._fillWriteTagValues() failed. PARAMS')
return ReturnCodes.kGeneralError
if tagValueList.getLen() == tagValueListLen:
# descendant didn't add anything, no need to read it.
tagValueList.pop()
else:
valEnd = Value()
valEnd.setXmlEnd((tag, ns, prefix))
tagValueList.push((tag, ns), valEnd)
if self.linkObj:
valBegin = Value()
(tag, ns, prefix) = ("link" , "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces", "qt-if")
valBegin.setXmlBegin((tag, ns, prefix))
tagValueList.push((tag, ns), valBegin)
tagValueListLen = tagValueList.getLen()
res = self.linkObj._fillWriteTagValues(tagValueList)
if res != ReturnCodes.kOk:
for logFunc in self._log('fill-write-tag-values-link-failed').errorFunc(): logFunc('linkObj._fillWriteTagValues() failed. PARAMS')
return ReturnCodes.kGeneralError
if tagValueList.getLen() == tagValueListLen:
# descendant didn't add anything, no need to read it.
tagValueList.pop()
else:
valEnd = Value()
valEnd.setXmlEnd((tag, ns, prefix))
tagValueList.push((tag, ns), valEnd)
if self.deviceObj:
valBegin = Value()
(tag, ns, prefix) = ("device" , "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces", "qt-if")
valBegin.setXmlBegin((tag, ns, prefix))
tagValueList.push((tag, ns), valBegin)
tagValueListLen = tagValueList.getLen()
res = self.deviceObj._fillWriteTagValues(tagValueList)
if res != ReturnCodes.kOk:
for logFunc in self._log('fill-write-tag-values-device-failed').errorFunc(): logFunc('deviceObj._fillWriteTagValues() failed. PARAMS')
return ReturnCodes.kGeneralError
if tagValueList.getLen() == tagValueListLen:
# descendant didn't add anything, no need to read it.
tagValueList.pop()
else:
valEnd = Value()
valEnd.setXmlEnd((tag, ns, prefix))
tagValueList.push((tag, ns), valEnd)
return ReturnCodes.kOk
def _fillReadTagValues (self, tagValueList):
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('fill-read-tag-values').debug3Func(): logFunc('called: tagValueList=%s', tagValueList)
if self.isConfigurationDelayRequested():
valConfigurationDelay = Value()
valConfigurationDelay.setEmpty()
tagValueList.push(("configuration-delay", "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces"), valConfigurationDelay)
if self.isMuteReportingRequested():
valMuteReporting = Value()
valMuteReporting.setEmpty()
tagValueList.push(("mute-reporting", "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces"), valMuteReporting)
if self.isSendGratuitousArpRequested():
valSendGratuitousArp = Value()
valSendGratuitousArp.setEmpty()
tagValueList.push(("send-gratuitous-arp", "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces"), valSendGratuitousArp)
if self.isShutdownRequested():
valShutdown = Value()
valShutdown.setEmpty()
tagValueList.push(("shutdown", "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces"), valShutdown)
if self.isTechModeRequested():
valTechMode = Value()
valTechMode.setEmpty()
tagValueList.push(("tech-mode", "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces"), valTechMode)
if self.contentObj:
valBegin = Value()
(tag, ns, prefix) = ("content" , "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces", "qt-if")
valBegin.setXmlBegin((tag, ns, prefix))
tagValueList.push((tag, ns), valBegin)
tagValueListLen = tagValueList.getLen()
res = self.contentObj._fillReadTagValues(tagValueList)
if res != ReturnCodes.kOk:
for logFunc in self._log('fill-read-tag-values-content-failed').errorFunc(): logFunc('contentObj._fillReadTagValues() failed. PARAMS')
return ReturnCodes.kGeneralError
if tagValueList.getLen() == tagValueListLen:
# descendant didn't add anything, no need to read it.
tagValueList.pop()
else:
valEnd = Value()
valEnd.setXmlEnd((tag, ns, prefix))
tagValueList.push((tag, ns), valEnd)
if self.connectivityCheckObj:
valBegin = Value()
(tag, ns, prefix) = ("connectivity-check" , "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces", "qt-if")
valBegin.setXmlBegin((tag, ns, prefix))
tagValueList.push((tag, ns), valBegin)
tagValueListLen = tagValueList.getLen()
res = self.connectivityCheckObj._fillReadTagValues(tagValueList)
if res != ReturnCodes.kOk:
for logFunc in self._log('fill-read-tag-values-connectivity-check-failed').errorFunc(): logFunc('connectivityCheckObj._fillReadTagValues() failed. PARAMS')
return ReturnCodes.kGeneralError
if tagValueList.getLen() == tagValueListLen:
# descendant didn't add anything, no need to read it.
tagValueList.pop()
else:
valEnd = Value()
valEnd.setXmlEnd((tag, ns, prefix))
tagValueList.push((tag, ns), valEnd)
if self.managementObj:
valBegin = Value()
(tag, ns, prefix) = ("management" , "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces", "qt-if")
valBegin.setXmlBegin((tag, ns, prefix))
tagValueList.push((tag, ns), valBegin)
tagValueListLen = tagValueList.getLen()
res = self.managementObj._fillReadTagValues(tagValueList)
if res != ReturnCodes.kOk:
for logFunc in self._log('fill-read-tag-values-management-failed').errorFunc(): logFunc('managementObj._fillReadTagValues() failed. PARAMS')
return ReturnCodes.kGeneralError
if tagValueList.getLen() == tagValueListLen:
# descendant didn't add anything, no need to read it.
tagValueList.pop()
else:
valEnd = Value()
valEnd.setXmlEnd((tag, ns, prefix))
tagValueList.push((tag, ns), valEnd)
if self.linkObj:
valBegin = Value()
(tag, ns, prefix) = ("link" , "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces", "qt-if")
valBegin.setXmlBegin((tag, ns, prefix))
tagValueList.push((tag, ns), valBegin)
tagValueListLen = tagValueList.getLen()
res = self.linkObj._fillReadTagValues(tagValueList)
if res != ReturnCodes.kOk:
for logFunc in self._log('fill-read-tag-values-link-failed').errorFunc(): logFunc('linkObj._fillReadTagValues() failed. PARAMS')
return ReturnCodes.kGeneralError
if tagValueList.getLen() == tagValueListLen:
# descendant didn't add anything, no need to read it.
tagValueList.pop()
else:
valEnd = Value()
valEnd.setXmlEnd((tag, ns, prefix))
tagValueList.push((tag, ns), valEnd)
if self.deviceObj:
valBegin = Value()
(tag, ns, prefix) = ("device" , "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces", "qt-if")
valBegin.setXmlBegin((tag, ns, prefix))
tagValueList.push((tag, ns), valBegin)
tagValueListLen = tagValueList.getLen()
res = self.deviceObj._fillReadTagValues(tagValueList)
if res != ReturnCodes.kOk:
for logFunc in self._log('fill-read-tag-values-device-failed').errorFunc(): logFunc('deviceObj._fillReadTagValues() failed. PARAMS')
return ReturnCodes.kGeneralError
if tagValueList.getLen() == tagValueListLen:
# descendant didn't add anything, no need to read it.
tagValueList.pop()
else:
valEnd = Value()
valEnd.setXmlEnd((tag, ns, prefix))
tagValueList.push((tag, ns), valEnd)
return ReturnCodes.kOk
def _readTagValues (self, tagValueList, readAllOrFail):
__pychecker__ = 'maxlines=300'
__pychecker__ = 'maxreturns=30'
self.myInitGuard.isInitOrCrash()
for logFunc in self._log('read-tag-values').debug3Func(): logFunc('called. readAllOrFail=%s, tagValueList=%s', readAllOrFail, tagValueList)
res = ReturnCodes.kOk
for logFunc in self._log('read-tag-values-leaves').debug3Func(): logFunc('reading leaves. tagValueList=%s', tagValueList)
if self.isConfigurationDelayRequested():
((tag, ns), tempValue) = tagValueList.popFront()
if (tag != "configuration-delay") or \
(ns != "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces"):
for logFunc in self._log('reag-tag-values-unexpected-tag-leaf-configurationdelay').errorFunc(): logFunc('got unexpected tag-value for leaf: %s. expected: (%s, %s), got: (%s, %s)',
"configurationDelay", "configuration-delay", "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces", tag, ns)
self._clearAllReadData()
return ReturnCodes.kGeneralError
tempVar = None
tempVar = tempValue.asUint64()
if res != ReturnCodes.kOk or tempVar is None:
for logFunc in self._log('read-tag-values-configuration-delay-bad-value').infoFunc(): logFunc('configurationDelay not read')
if readAllOrFail:
self._clearAllReadData()
return ReturnCodes.kGeneralError
if tempVar is not None:
self.setConfigurationDelay(tempVar)
for logFunc in self._log('read-tag-values-configuration-delay').debug3Func(): logFunc('read configurationDelay. configurationDelay=%s, tempValue=%s', self.configurationDelay, tempValue.getType())
if self.isMuteReportingRequested():
((tag, ns), tempValue) = tagValueList.popFront()
if (tag != "mute-reporting") or \
(ns != "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces"):
for logFunc in self._log('reag-tag-values-unexpected-tag-leaf-mutereporting').errorFunc(): logFunc('got unexpected tag-value for leaf: %s. expected: (%s, %s), got: (%s, %s)',
"muteReporting", "mute-reporting", "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces", tag, ns)
self._clearAllReadData()
return ReturnCodes.kGeneralError
tempVar = None
tempVar = tempValue.asBool()
if res != ReturnCodes.kOk or tempVar is None:
for logFunc in self._log('read-tag-values-mute-reporting-bad-value').infoFunc(): logFunc('muteReporting not read')
if readAllOrFail:
self._clearAllReadData()
return ReturnCodes.kGeneralError
if tempVar is not None:
self.setMuteReporting(tempVar)
for logFunc in self._log('read-tag-values-mute-reporting').debug3Func(): logFunc('read muteReporting. muteReporting=%s, tempValue=%s', self.muteReporting, tempValue.getType())
if self.isSendGratuitousArpRequested():
((tag, ns), tempValue) = tagValueList.popFront()
if (tag != "send-gratuitous-arp") or \
(ns != "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces"):
for logFunc in self._log('reag-tag-values-unexpected-tag-leaf-sendgratuitousarp').errorFunc(): logFunc('got unexpected tag-value for leaf: %s. expected: (%s, %s), got: (%s, %s)',
"sendGratuitousArp", "send-gratuitous-arp", "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces", tag, ns)
self._clearAllReadData()
return ReturnCodes.kGeneralError
tempVar = None
tempVar = tempValue.asBool()
if res != ReturnCodes.kOk or tempVar is None:
for logFunc in self._log('read-tag-values-send-gratuitous-arp-bad-value').infoFunc(): logFunc('sendGratuitousArp not read')
if readAllOrFail:
self._clearAllReadData()
return ReturnCodes.kGeneralError
if tempVar is not None:
self.setSendGratuitousArp(tempVar)
for logFunc in self._log('read-tag-values-send-gratuitous-arp').debug3Func(): logFunc('read sendGratuitousArp. sendGratuitousArp=%s, tempValue=%s', self.sendGratuitousArp, tempValue.getType())
if self.isShutdownRequested():
((tag, ns), tempValue) = tagValueList.popFront()
if (tag != "shutdown") or \
(ns != "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces"):
for logFunc in self._log('reag-tag-values-unexpected-tag-leaf-shutdown').errorFunc(): logFunc('got unexpected tag-value for leaf: %s. expected: (%s, %s), got: (%s, %s)',
"shutdown", "shutdown", "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces", tag, ns)
self._clearAllReadData()
return ReturnCodes.kGeneralError
tempVar = None
tempVar = tempValue.asBool()
if res != ReturnCodes.kOk or tempVar is None:
for logFunc in self._log('read-tag-values-shutdown-bad-value').infoFunc(): logFunc('shutdown not read')
if readAllOrFail:
self._clearAllReadData()
return ReturnCodes.kGeneralError
if tempVar is not None:
self.setShutdown(tempVar)
for logFunc in self._log('read-tag-values-shutdown').debug3Func(): logFunc('read shutdown. shutdown=%s, tempValue=%s', self.shutdown, tempValue.getType())
if self.isTechModeRequested():
((tag, ns), tempValue) = tagValueList.popFront()
if (tag != "tech-mode") or \
(ns != "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces"):
for logFunc in self._log('reag-tag-values-unexpected-tag-leaf-techmode').errorFunc(): logFunc('got unexpected tag-value for leaf: %s. expected: (%s, %s), got: (%s, %s)',
"techMode", "tech-mode", "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces", tag, ns)
self._clearAllReadData()
return ReturnCodes.kGeneralError
tempVar = None
tempVar = tempValue.asBool()
if res != ReturnCodes.kOk or tempVar is None:
for logFunc in self._log('read-tag-values-tech-mode-bad-value').infoFunc(): logFunc('techMode not read')
if readAllOrFail:
self._clearAllReadData()
return ReturnCodes.kGeneralError
if tempVar is not None:
self.setTechMode(tempVar)
for logFunc in self._log('read-tag-values-tech-mode').debug3Func(): logFunc('read techMode. techMode=%s, tempValue=%s', self.techMode, tempValue.getType())
if self.contentObj:
((tag, ns), valBegin) = tagValueList.popFront()
if (tag != "content") or \
(ns != "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces") or \
(valBegin.getType() != Value.kXmlBegin):
for logFunc in self._log('reag-tag-values-unexpected-tag-begin').errorFunc(): logFunc('got unexpected tag-value. expected: (%s, %s, type=%s), got: (%s, %s, type=%s)',
"content", "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces", Value.kXmlBegin,
tag, ns, valBegin.getType())
self._clearAllReadData()
return ReturnCodes.kGeneralError
res = self.contentObj._readTagValues(tagValueList, readAllOrFail)
if res != ReturnCodes.kOk:
for logFunc in self._log('read-tag-values-content-failed').errorFunc(): logFunc('contentObj._readTagValues() failed. tagValueList=%s', tagValueList)
if readAllOrFail:
self._clearAllReadData()
return ReturnCodes.kGeneralError
((tag, ns), valEnd) = tagValueList.popFront()
if (tag != "content") or \
(ns != "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces") or \
(valEnd.getType() != Value.kXmlEnd):
for logFunc in self._log('reag-tag-values-unexpected-tag-end').errorFunc(): logFunc('got unexpected tag-value. expected: (%s, %s, type=%s), got: (%s, %s, type=%s)',
"content", "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces", Value.kXmlEnd,
tag, ns, valEnd.getType())
self._clearAllReadData()
return ReturnCodes.kGeneralError
if self.connectivityCheckObj:
((tag, ns), valBegin) = tagValueList.popFront()
if (tag != "connectivity-check") or \
(ns != "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces") or \
(valBegin.getType() != Value.kXmlBegin):
for logFunc in self._log('reag-tag-values-unexpected-tag-begin').errorFunc(): logFunc('got unexpected tag-value. expected: (%s, %s, type=%s), got: (%s, %s, type=%s)',
"connectivity-check", "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces", Value.kXmlBegin,
tag, ns, valBegin.getType())
self._clearAllReadData()
return ReturnCodes.kGeneralError
res = self.connectivityCheckObj._readTagValues(tagValueList, readAllOrFail)
if res != ReturnCodes.kOk:
for logFunc in self._log('read-tag-values-connectivity-check-failed').errorFunc(): logFunc('connectivityCheckObj._readTagValues() failed. tagValueList=%s', tagValueList)
if readAllOrFail:
self._clearAllReadData()
return ReturnCodes.kGeneralError
((tag, ns), valEnd) = tagValueList.popFront()
if (tag != "connectivity-check") or \
(ns != "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces") or \
(valEnd.getType() != Value.kXmlEnd):
for logFunc in self._log('reag-tag-values-unexpected-tag-end').errorFunc(): logFunc('got unexpected tag-value. expected: (%s, %s, type=%s), got: (%s, %s, type=%s)',
"connectivity-check", "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces", Value.kXmlEnd,
tag, ns, valEnd.getType())
self._clearAllReadData()
return ReturnCodes.kGeneralError
if self.managementObj:
((tag, ns), valBegin) = tagValueList.popFront()
if (tag != "management") or \
(ns != "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces") or \
(valBegin.getType() != Value.kXmlBegin):
for logFunc in self._log('reag-tag-values-unexpected-tag-begin').errorFunc(): logFunc('got unexpected tag-value. expected: (%s, %s, type=%s), got: (%s, %s, type=%s)',
"management", "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces", Value.kXmlBegin,
tag, ns, valBegin.getType())
self._clearAllReadData()
return ReturnCodes.kGeneralError
res = self.managementObj._readTagValues(tagValueList, readAllOrFail)
if res != ReturnCodes.kOk:
for logFunc in self._log('read-tag-values-management-failed').errorFunc(): logFunc('managementObj._readTagValues() failed. tagValueList=%s', tagValueList)
if readAllOrFail:
self._clearAllReadData()
return ReturnCodes.kGeneralError
((tag, ns), valEnd) = tagValueList.popFront()
if (tag != "management") or \
(ns != "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces") or \
(valEnd.getType() != Value.kXmlEnd):
for logFunc in self._log('reag-tag-values-unexpected-tag-end').errorFunc(): logFunc('got unexpected tag-value. expected: (%s, %s,
| 0 |
7ed966af4b81c43fa6980a449a421d947ebad60b
|
Python
|
type=%s), got: (%s, %s, type=%s)',
"management", "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces", Value.kXmlEnd,
tag, ns, valEnd.getType())
self._clearAllReadData()
return ReturnCodes.kGeneralError
if self.linkObj:
((tag, ns), valBegin) = tagValueList.popFront()
if (tag != "link") or \
(ns != "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces") or \
(valBegin.getType() != Value.kXmlBegin):
for logFunc in self._log('reag-tag-values-unexpected-tag-begin').errorFunc(): logFunc('got unexpected tag-value. expected: (%s, %s, type=%s), got: (%s, %s, type=%s)',
"link", "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces", Value.kXmlBegin,
tag, ns, valBegin.getType())
self._clearAllReadData()
return ReturnCodes.kGeneralError
res = self.linkObj._readTagValues(tagValueList, readAllOrFail)
if res != ReturnCodes.kOk:
for logFunc in self._log('read-tag-values-link-failed').errorFunc(): logFunc('linkObj._readTagValues() failed. tagValueList=%s', tagValueList)
if readAllOrFail:
self._clearAllReadData()
return ReturnCodes.kGeneralError
((tag, ns), valEnd) = tagValueList.popFront()
if (tag != "link") or \
(ns != "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces") or \
(valEnd.getType() != Value.kXmlEnd):
for logFunc in self._log('reag-tag-values-unexpected-tag-end').errorFunc(): logFunc('got unexpected tag-value. expected: (%s, %s, type=%s), got: (%s, %s, type=%s)',
"link", "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces", Value.kXmlEnd,
tag, ns, valEnd.getType())
self._clearAllReadData()
return ReturnCodes.kGeneralError
if self.deviceObj:
((tag, ns), valBegin) = tagValueList.popFront()
if (tag != "device") or \
(ns != "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces") or \
(valBegin.getType() != Value.kXmlBegin):
for logFunc in self._log('reag-tag-values-unexpected-tag-begin').errorFunc(): logFunc('got unexpected tag-value. expected: (%s, %s, type=%s), got: (%s, %s, type=%s)',
"device", "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces", Value.kXmlBegin,
tag, ns, valBegin.getType())
self._clearAllReadData()
return ReturnCodes.kGeneralError
res = self.deviceObj._readTagValues(tagValueList, readAllOrFail)
if res != ReturnCodes.kOk:
for logFunc in self._log('read-tag-values-device-failed').errorFunc(): logFunc('deviceObj._readTagValues() failed. tagValueList=%s', tagValueList)
if readAllOrFail:
self._clearAllReadData()
return ReturnCodes.kGeneralError
((tag, ns), valEnd) = tagValueList.popFront()
if (tag != "device") or \
(ns != "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces") or \
(valEnd.getType() != Value.kXmlEnd):
for logFunc in self._log('reag-tag-values-unexpected-tag-end').errorFunc(): logFunc('got unexpected tag-value. expected: (%s, %s, type=%s), got: (%s, %s, type=%s)',
"device", "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces", Value.kXmlEnd,
tag, ns, valEnd.getType())
self._clearAllReadData()
return ReturnCodes.kGeneralError
for logFunc in self._log('read-tag-values-done').debug3Func(): logFunc('done. readAllOrFail=%s, tagValueList=%s', readAllOrFail, tagValueList)
return ReturnCodes.kOk
"""
Extracted from the below data:
{
"node": {
"name": "systemDefaults",
"namespace": "system_defaults",
"className": "SystemDefaultsMaapi",
"importStatement": "from a.api.yang.modules.tech.common.qwilt_tech_interfaces.tech.interfaces.interface.system_defaults.system_defaults_maapi_gen import SystemDefaultsMaapi",
"baseClassName": "SystemDefaultsMaapiBase",
"baseModule": "system_defaults_maapi_base_gen"
},
"ancestors": [
{
"moduleYangNamespacePrefix": "qt",
"yangName": "tech",
"namespace": "tech",
"isCurrent": false,
"isList": false,
"moduleYangNamespace": "http://qwilt.com/ns/yang/device/tech/qwilt-tech",
"name": "tech"
},
{
"moduleYangNamespacePrefix": "qt-if",
"yangName": "interfaces",
"namespace": "interfaces",
"isCurrent": false,
"isList": false,
"moduleYangNamespace": "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces",
"name": "interfaces"
},
{
"moduleYangNamespacePrefix": "qt-if",
"isCurrent": false,
"yangName": "interface",
"namespace": "interface",
"isList": true,
"moduleYangNamespace": "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces",
"keyLeaf": {
"varName": "interface",
"defaultVal": null,
"typeHandler": "handler: StringHandler"
},
"name": "interface"
},
{
"moduleYangNamespacePrefix": "qt-if",
"yangName": "system-defaults",
"namespace": "system_defaults",
"isCurrent": true,
"isList": false,
"moduleYangNamespace": "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces",
"name": "system-defaults"
}
],
"descendants": [
{
"moduleYangNamespacePrefix": "qt-if",
"memberName": "content",
"yangName": "content",
"className": "BlinkyContentMaapi",
"importStatement": "from a.api.yang.modules.tech.common.qwilt_tech_interfaces.tech.interfaces.interface.system_defaults.content.content_maapi_gen import BlinkyContentMaapi",
"isList": false,
"moduleYangNamespace": "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces"
},
{
"moduleYangNamespacePrefix": "qt-if",
"memberName": "connectivityCheck",
"yangName": "connectivity-check",
"className": "BlinkyConnectivityCheckMaapi",
"importStatement": "from a.api.yang.modules.tech.common.qwilt_tech_interfaces.tech.interfaces.interface.system_defaults.connectivity_check.connectivity_check_maapi_gen import BlinkyConnectivityCheckMaapi",
"isList": false,
"moduleYangNamespace": "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces"
},
{
"moduleYangNamespacePrefix": "qt-if",
"memberName": "management",
"yangName": "management",
"className": "BlinkyManagementMaapi",
"importStatement": "from a.api.yang.modules.tech.common.qwilt_tech_interfaces.tech.interfaces.interface.system_defaults.management.management_maapi_gen import BlinkyManagementMaapi",
"isList": false,
"moduleYangNamespace": "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces"
},
{
"moduleYangNamespacePrefix": "qt-if",
"memberName": "link",
"yangName": "link",
"className": "BlinkyLinkMaapi",
"importStatement": "from a.api.yang.modules.tech.common.qwilt_tech_interfaces.tech.interfaces.interface.system_defaults.link.link_maapi_gen import BlinkyLinkMaapi",
"isList": false,
"moduleYangNamespace": "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces"
},
{
"moduleYangNamespacePrefix": "qt-if",
"memberName": "device",
"yangName": "device",
"className": "BlinkyDeviceMaapi",
"importStatement": "from a.api.yang.modules.tech.common.qwilt_tech_interfaces.tech.interfaces.interface.system_defaults.device.device_maapi_gen import BlinkyDeviceMaapi",
"isList": false,
"moduleYangNamespace": "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces"
}
],
"conditionalDebugName": null,
"operLeaves": [],
"module": {},
"configLeaves": [
{
"moduleYangNamespace": "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces",
"moduleYangNamespacePrefix": "qt-if",
"typeHandler": "handler: IntHandler",
"memberName": "configurationDelay",
"yangName": "configuration-delay",
"object": "",
"leafrefPath": null,
"defaultVal": "0",
"hasDefaultRef": false
},
{
"moduleYangNamespace": "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces",
"moduleYangNamespacePrefix": "qt-if",
"typeHandler": "handler: BoolPyHandler",
"memberName": "muteReporting",
"yangName": "mute-reporting",
"object": "",
"leafrefPath": null,
"defaultVal": "false",
"hasDefaultRef": false
},
{
"moduleYangNamespace": "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces",
"moduleYangNamespacePrefix": "qt-if",
"typeHandler": "handler: BoolPyHandler",
"memberName": "sendGratuitousArp",
"yangName": "send-gratuitous-arp",
"object": "",
"leafrefPath": null,
"defaultVal": "true",
"hasDefaultRef": false
},
{
"moduleYangNamespace": "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces",
"moduleYangNamespacePrefix": "qt-if",
"typeHandler": "handler: BoolPyHandler",
"memberName": "shutdown",
"yangName": "shutdown",
"object": "",
"leafrefPath": null,
"defaultVal": "true",
"hasDefaultRef": false
},
{
"moduleYangNamespace": "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces",
"moduleYangNamespacePrefix": "qt-if",
"typeHandler": "handler: BoolPyHandler",
"memberName": "techMode",
"yangName": "tech-mode",
"object": "",
"leafrefPath": null,
"defaultVal": "false",
"hasDefaultRef": false
}
],
"env": {
"namespaces": [
"a",
"api",
"yang",
"modules",
"tech",
"common",
"qwilt_tech_interfaces"
]
},
"leaves": [
{
"moduleYangNamespace": "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces",
"moduleYangNamespacePrefix": "qt-if",
"typeHandler": "handler: IntHandler",
"memberName": "configurationDelay",
"yangName": "configuration-delay",
"object": "",
"leafrefPath": null,
"defaultVal": "0",
"hasDefaultRef": false
},
{
"moduleYangNamespace": "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces",
"moduleYangNamespacePrefix": "qt-if",
"typeHandler": "handler: BoolPyHandler",
"memberName": "muteReporting",
"yangName": "mute-reporting",
"object": "",
"leafrefPath": null,
"defaultVal": "false",
"hasDefaultRef": false
},
{
"moduleYangNamespace": "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces",
"moduleYangNamespacePrefix": "qt-if",
"typeHandler": "handler: BoolPyHandler",
"memberName": "sendGratuitousArp",
"yangName": "send-gratuitous-arp",
"object": "",
"leafrefPath": null,
"defaultVal": "true",
"hasDefaultRef": false
},
{
"moduleYangNamespace": "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces",
"moduleYangNamespacePrefix": "qt-if",
"typeHandler": "handler: BoolPyHandler",
"memberName": "shutdown",
"yangName": "shutdown",
"object": "",
"leafrefPath": null,
"defaultVal": "true",
"hasDefaultRef": false
},
{
"moduleYangNamespace": "http://qwilt.com/ns/yang/device/tech/qwilt-tech-interfaces",
"moduleYangNamespacePrefix": "qt-if",
"typeHandler": "handler: BoolPyHandler",
"memberName": "techMode",
"yangName": "tech-mode",
"object": "",
"leafrefPath": null,
"defaultVal": "false",
"hasDefaultRef": false
}
],
"createTime": "2013"
}
"""
| 1 |
9e83190ba101ff865ded4f3fba7ad806dcc6226b
|
Python
|
6691161,兵庫県,神戸市北区,道場町生野
6600000,兵庫県,尼崎市,
6610981,兵庫県,尼崎市,猪名寺
6600064,兵庫県,尼崎市,稲葉荘
6600055,兵庫県,尼崎市,稲葉元町
6600812,兵庫県,尼崎市,今福
6600096,兵庫県,尼崎市,扇町
6600076,兵庫県,尼崎市,大島
6600072,兵庫県,尼崎市,大庄川田町
6600075,兵庫県,尼崎市,大庄中通
6600077,兵庫県,尼崎市,大庄西町
6600063,兵庫県,尼崎市,大庄北
6600842,兵庫県,尼崎市,大高洲町
6610023,兵庫県,尼崎市,大西町
6600095,兵庫県,尼崎市,大浜町
6610022,兵庫県,尼崎市,尾浜町
6600862,兵庫県,尼崎市,開明町
6600821,兵庫県,尼崎市,梶ケ島
6610979,兵庫県,尼崎市,上坂部
6610014,兵庫県,尼崎市,上ノ島町
6610971,兵庫県,尼崎市,瓦宮
6610964,兵庫県,尼崎市,神崎町
6600884,兵庫県,尼崎市,神田中通
6600885,兵庫県,尼崎市,神田南通
6600883,兵庫県,尼崎市,神田北通
6600826,兵庫県,尼崎市,北城内
6600878,兵庫県,尼崎市,北竹谷町
6600804,兵庫県,尼崎市,北大物町
6600834,兵庫県,尼崎市,北初島町
6600806,兵庫県,尼崎市,金楽寺町
6600813,兵庫県,尼崎市,杭瀬寺島
6600814,兵庫県,尼崎市,杭瀬本町
6600822,兵庫県,尼崎市,杭瀬南新町
6600815,兵庫県,尼崎市,杭瀬北新町
6610977,兵庫県,尼崎市,久々知
6610978,兵庫県,尼崎市,久々知西町
6610983,兵庫県,尼崎市,口田中
6610013,兵庫県,尼崎市,栗山町
6600873,兵庫県,尼崎市,玄番南之町
6600872,兵庫県,尼崎市,玄番北之町
6610982,兵庫県,尼崎市,食満
6600074,兵庫県,尼崎市,琴浦町
6610972,兵庫県,尼崎市,小中島
6610024,兵庫県,尼崎市,三反田町
6600808,兵庫県,尼崎市,潮江
6610976,兵庫県,尼崎市,潮江
6600866,兵庫県,尼崎市,汐町
6610952,兵庫県,尼崎市,椎堂
6610975,兵庫県,尼崎市,下坂部
6600811,兵庫県,尼崎市,常光寺
6600881,兵庫県,尼崎市,昭和通
6600882,兵庫県,尼崎市,昭和南通
6600082,兵庫県,尼崎市,水明町
6600094,兵庫県,尼崎市,末広町
6600071,兵庫県,尼崎市,崇徳院
6610973,兵庫県,尼崎市,善法寺町
6600823,兵庫県,尼崎市,大物町
6610963,兵庫県,尼崎市,高田町
6600876,兵庫県,尼崎市,竹谷町
6610025,兵庫県,尼崎市,立花町
6600871,兵庫県,尼崎市,建家町
6610951,兵庫県,尼崎市,田能
6610002,兵庫県,尼崎市,塚口町
6610001,兵庫県,尼崎市,塚口本町
6600858,兵庫県,尼崎市,築地
6610965,兵庫県,尼崎市,次屋
6610046,兵庫県,尼崎市,常松
6610042,兵庫県,尼崎市,常吉
6600092,兵庫県,尼崎市,鶴町
6600867,兵庫県,尼崎市,寺町
6600083,兵庫県,尼崎市,道意町
6610961,兵庫県,尼崎市,戸ノ内町
6610003,兵庫県,尼崎市,富松町
6600851,兵庫県,尼崎市,中在家町
6600091,兵庫県,尼崎市,中浜町
6600802,兵庫県,尼崎市,長洲中通
6600801,兵庫県,尼崎市,長洲東通
6600807,兵庫県,尼崎市,長洲西通
6600803,兵庫県,尼崎市,長洲本通
6600073,兵庫県,尼崎市,菜切山町
6610974,兵庫県,尼崎市,若王寺
6600052,兵庫県,尼崎市,七松町
6600093,兵庫県,尼崎市,西海岸町
6610966,兵庫県,尼崎市,西川
6610047,兵庫県,尼崎市,西昆陽
6600865,兵庫県,尼崎市,西桜木町
6600845,兵庫県,尼崎市,西高洲町
6600054,兵庫県,尼崎市,西立花町
6600827,兵庫県,尼崎市,西大物町
6600805,兵庫県,尼崎市,西長洲町
6600893,兵庫県,尼崎市,西難波町
6600874,兵庫県,尼崎市,西本町
6600863,兵庫県,尼崎市,西本町北通
6600837,兵庫県,尼崎市,西松島町
6600868,兵庫県,尼崎市,西御園町
6600857,兵庫県,尼崎市,西向島町
6610962,兵庫県,尼崎市,額田町
6610967,兵庫県,尼崎市,浜
6600062,兵庫県,尼崎市,浜田町
6600843,兵庫県,尼崎市,東海岸町
6600864,兵庫県,尼崎市,東桜木町
6610953,兵庫県,尼崎市,東園田町
6600841,兵庫県,尼崎市,東高洲町
6600828,兵庫県,尼崎市,東大物町
6610011,兵庫県,尼崎市,東塚口町
6600051,兵庫県,尼崎市,東七松町
6600892,兵庫県,尼崎市,東難波町
6600832,兵庫県,尼崎市,東初島町
6600844,兵庫県,尼崎市,東浜町
6600824,兵庫県,尼崎市,東本町
6600831,兵庫県,尼崎市,東松島町
6600835,兵庫県,尼崎市,東向島東之町
6600856,兵庫県,尼崎市,東向島西之町
6600891,兵庫県,尼崎市,扶桑町
6600846,兵庫県,尼崎市,船出
6600087,兵庫県,尼崎市,平左衛門町
6600086,兵庫県,尼崎市,丸島町
6610026,兵庫県,尼崎市,水堂町
6610984,兵庫県,尼崎市,御園
6600861,兵庫県,尼崎市,御園町
6610985,兵庫県,尼崎市,南清水
6600825,兵庫県,尼崎市,南城内
6600875,兵庫県,尼崎市,南竹谷町
6610012,兵庫県,尼崎市,南塚口町
6600053,兵庫県,尼崎市,南七松町
6600833,兵庫県,尼崎市,南初島町
6610033,兵庫県,尼崎市,南武庫之荘
6600877,兵庫県,尼崎市,宮内町
6600084,兵庫県,尼崎市,武庫川町
6610044,兵庫県,尼崎市,武庫町
6610041,兵庫県,尼崎市,武庫の里
6610035,兵庫県,尼崎市,武庫之荘
6610031,兵庫県,尼崎市,武庫之荘本町
6610032,兵庫県,尼崎市,武庫之荘東
6610034,兵庫県,尼崎市,武庫之荘西
6610043,兵庫県,尼崎市,武庫元町
6610045,兵庫県,尼崎市,武庫豊町
6610021,兵庫県,尼崎市,名神町
6600085,兵庫県,尼崎市,元浜町
6610970,兵庫県,尼崎市,弥生ケ丘町
6600061,兵庫県,尼崎市,蓬川荘園
6600081,兵庫県,尼崎市,蓬川町
6620000,兵庫県,西宮市,
6620063,兵庫県,西宮市,相生町
6620862,兵庫県,西宮市,青木町
6691122,兵庫県,西宮市,青葉台
6638186,兵庫県,西宮市,上鳴尾町
6620925,兵庫県,西宮市,朝凪町
6620842,兵庫県,西宮市,芦原町
6620871,兵庫県,西宮市,愛宕山
6620946,兵庫県,西宮市,荒戎町
6638025,兵庫県,西宮市,荒木町
6620911,兵庫県,西宮市,池田町
6638137,兵庫県,西宮市,池開町
6620928,兵庫県,西宮市,石在町
6620074,兵庫県,西宮市,石刎町
6620932,兵庫県,西宮市,泉町
6620873,兵庫県,西宮市,一ケ谷町
6620975,兵庫県,西宮市,市庭町
6638002,兵庫県,西宮市,一里山町
6620972,兵庫県,西宮市,今在家町
6638214,兵庫県,西宮市,今津曙町
6638213,兵庫県,西宮市,今津上野町
6638221,兵庫県,西宮市,今津大東町
6638222,兵庫県,西宮市,今津久寿川町
6638229,兵庫県,西宮市,今津社前町
6638223,兵庫県,西宮市,今津巽町
6638227,兵庫県,西宮市,今津出在家町
6638225,兵庫県,西宮市,今津西浜町
6638212,兵庫県,西宮市,今津野田町
6638228,兵庫県,西宮市,今津二葉町
6638224,兵庫県,西宮市,今津真砂町
6638215,兵庫県,西宮市,今津水波町
6638226,兵庫県,西宮市,今津港町
6638211,兵庫県,西宮市,今津山中町
6620886,兵庫県,西宮市,上ケ原山田町
6620885,兵庫県,西宮市,上ケ原山手町
6620891,兵庫県,西宮市,上ケ原一番町
6620892,兵庫県,西宮市,上ケ原二番町
6620893,兵庫県,西宮市,上ケ原三番町
6620894,兵庫県,西宮市,上ケ原四番町
6620895,兵庫県,西宮市,上ケ原五番町
6620896,兵庫県,西宮市,上ケ原六番町
6620881,兵庫県,西宮市,上ケ原七番町
6620882,兵庫県,西宮市,上ケ原八番町
6620883,兵庫県,西宮市,上ケ原九番町
6620884,兵庫県,西宮市,上ケ原十番町
6638134,兵庫県,西宮市,上田中町
6638133,兵庫県,西宮市,上田東町
6638135,兵庫県,西宮市,上田西町
6620855,兵庫県,西宮市,江上町
6638143,兵庫県,西宮市,枝川町
6620085,兵庫県,西宮市,老松町
6620036,兵庫県,西宮市,大井手町
6638017,兵庫県,西宮市,大島町
6620054,兵庫県,西宮市,大谷町
6620836,兵庫県,西宮市,大畑町
6620957,兵庫県,西宮市,大浜町
6638023,兵庫県,西宮市,大森町
6638106,兵庫県,西宮市,大屋町
6620827,兵庫県,西宮市,岡田山
6620022,兵庫県,西宮市,奥畑
6620961,兵庫県,西宮市,御茶家所町
6638182,兵庫県,西宮市,学文殿町
6620977,兵庫県,西宮市,神楽町
6638136,兵庫県,西宮市,笠屋町
6620052,兵庫県,西宮市,霞町
6620001,兵庫県,西宮市,甲山町
6638003,兵庫県,西宮市,上大市
6620865,兵庫県,西宮市,神垣町
6638114,兵庫県,西宮市,上甲子園
6620813,兵庫県,西宮市,上甲東園
6620027,兵庫県,西宮市,神園町
6638021,兵庫県,西宮市,上之町
6620954,兵庫県,西宮市,上葭原町
6620097,兵庫県,西宮市,柏堂町
6620098,兵庫県,西宮市,柏堂西町
6620944,兵庫県,西宮市,川添町
6620951,兵庫県,西宮市,川西町
6620861,兵庫県,西宮市,河原町
6620945,兵庫県,西宮市,川東町
6638107,兵庫県,西宮市,瓦林町
6620823,兵庫県,西宮市,神呪町
6620021,兵庫県,西宮市,神原
6620078,兵庫県,西宮市,菊谷町
6620062,兵庫県,西宮市,木津山町
6638035,兵庫県,西宮市,北口町
6620833,兵庫県,西宮市,北昭和町
6620025,兵庫県,西宮市,北名次町
6620091,兵庫県,西宮市,北山町
6691131,兵庫県,西宮市,清瀬台
6620077,兵庫県,西宮市,久出ケ谷町
6691135,兵庫県,西宮市,国見台
6620927,兵庫県,西宮市,久保町
6638103,兵庫県,西宮市,熊野町
6620064,兵庫県,西宮市,雲井町
6620926,兵庫県,西宮市,鞍掛町
6620083,兵庫県,西宮市,苦楽園一番町
6620082,兵庫県,西宮市,苦楽園二番町
6620081,兵庫県,西宮市,苦楽園三番町
6620088,兵庫県,西宮市,苦楽園四番町
6620087,兵庫県,西宮市,苦楽園五番町
6620086,兵庫県,西宮市,苦楽園六番町
6620037,兵庫県,西宮市,結善町
6620099,兵庫県,西宮市,剣谷町
6638156,兵庫県,西宮市,甲子園網引町
6638165,兵庫県,西宮市,甲子園浦風町
6638151,兵庫県,西宮市,甲子園洲鳥町
6638162,兵庫県,西宮市,甲子園砂田町
6638166,兵庫県,西宮市,甲子園高潮町
6638167,兵庫県,西宮市,甲子園浜田町
6638161,兵庫県,西宮市,甲子園春風町
6638163,兵庫県,西宮市,甲子園三保町
6638164,兵庫県,西宮市,甲子園六石町
6638171,兵庫県,西宮市,甲子園一番町
6638172,兵庫県,西宮市,甲子園二番町
6638173,兵庫県,西宮市,甲子園三番町
6638174,兵庫県,西宮市,甲子園四番町
6638175,兵庫県,西宮市,甲子園五番町
6638176,兵庫県,西宮市,甲子園六番町
6638177,兵庫県,西宮市,甲子園七番町
6638178,兵庫県,西宮市,甲子園八番町
6638179,兵庫県,西宮市,甲子園九番町
6638113,兵庫県,西宮市,甲子園口
6638112,兵庫県,西宮市,甲子園口北町
6638152,兵庫県,西宮市,甲子園町
6638155,兵庫県,西宮市,甲子園浜
6620812,兵庫県,西宮市,甲東園
6620832,兵庫県,西宮市,甲風園
6620965,兵庫県,西宮市,郷免町
6620018,兵庫県,西宮市,甲陽園山王町
6620012,兵庫県,西宮市,甲陽園東山町
6620017,兵庫県,西宮市,甲陽園西山町
6620014,兵庫県,西宮市,甲陽園日之出町
6620015,兵庫県,西宮市,甲陽園本庄町
6620011,兵庫県,西宮市,甲陽園目神山町
6620016,兵庫県,西宮市,甲陽園若江町
6620092,兵庫県,西宮市,甑岩町
6620006,兵庫県,西宮市,越水社家郷山
6620864,兵庫県,西宮市,越水町
6638122,兵庫県,西宮市,小曽根町
6620047,兵庫県,西宮市,寿町
6638123,兵庫県,西宮市,小松東町
6638125,兵庫県,西宮市,小松西町
6638124,兵庫県,西宮市,小松南町
6638126,兵庫県,西宮市,小松北町
6638127,兵庫県,西宮市,小松町
6620844,兵庫県,西宮市,西福町
6620032,兵庫県,西宮市,桜谷町
6620071,兵庫県,西宮市,桜町
6620875,兵庫県,西宮市,五月ケ丘
6638183,兵庫県,西宮市,里中町
6620978,兵庫県,西宮市,産所町
6691101,兵庫県,西宮市,塩瀬町生瀬
6691251,兵庫県,西宮市,塩瀬町名塩
6691141,兵庫県,西宮市,塩瀬町名塩
6620026,兵庫県,西宮市,獅子ケ口町
6620033,兵庫県,西宮市,清水町
6638004,兵庫県,西宮市,下大市東町
6638005,兵庫県,西宮市,下大市西町
6620956,兵庫県,西宮市,下葭原町
6620974,兵庫県,西宮市,社家町
6620004,兵庫県,西宮市,鷲林寺
6620003,兵庫県,西宮市,鷲林寺町
6620002,兵庫県,西宮市,鷲林寺南町
6620856,兵庫県,西宮市,城ケ堀町
6620822,兵庫県,西宮市,松籟荘
6620023,兵庫県,西宮市,城山
6620843,兵庫県,西宮市,神祇官町
6620013,兵庫県,西宮市,新甲陽町
6620845,兵庫県,西宮市,神明町
6620041,兵庫県,西宮市,末広町
6620096,兵庫県,西宮市,角石町
6620913,兵庫県,西宮市,染殿町
6620867,兵庫県,西宮市,大社町
6638033,兵庫県,西宮市,高木東町
6638032,兵庫県,西宮市,高木西町
6620872,兵庫県,西宮市,高座町
6638141,兵庫県,西宮市,高須町
6620066,兵庫県,西宮市,高塚町
6638202,兵庫県,西宮市,高畑町
6638204,兵庫県,西宮市,高松町
6638201,兵庫県,西宮市,田代町
6638001,兵庫県,西宮市,田近野町
6620943,兵庫県,西宮市,建石町
6620973,兵庫県,西宮市,田中町
6638006,兵庫県,西宮市,段上町
6620046,兵庫県,西宮市,千歳町
6620853,兵庫県,西宮市,津田町
6638012,兵庫県,西宮市,堤町
6638244,兵庫県,西宮市,津門綾羽町
6638242,兵庫県,西宮市,津門飯田町
6638247,兵庫県,西宮市,津門稲荷町
6638243,兵庫県,西宮市,津門大箇町
6638241,兵庫県,西宮市,津門大塚町
6638245,兵庫県,西宮市,津門呉羽町
6638234,兵庫県,西宮市,津門住江町
6638231,兵庫県,西宮市,津門西口町
6638246,兵庫県,西宮市,津門仁辺町
6638232,兵庫県,西宮市,津門宝津町
6638233,兵庫県,西宮市,津門川町
6638104,兵庫県,西宮市,天道町
6620043,兵庫県,西宮市,常磐町
6638121,兵庫県,西宮市,戸崎町
6620916,兵庫県,西宮市,戸田町
6620065,兵庫県,西宮市,殿山町
6638105,兵庫県,西宮市,中島町
6620851,兵庫県,西宮市,中須佐町
6620852,兵庫県,西宮市,中殿町
6620952,兵庫県,西宮市,中浜町
6620857,兵庫県,西宮市,中前田町
6620868,兵庫県,西宮市,中屋町
6620955,兵庫県,西宮市,中葭原町
6638034,兵庫県,西宮市,長田町
6691147,兵庫県,西宮市,名塩
6691149,兵庫県,西宮市,名塩赤坂
6691143,兵庫県,西宮市,名塩ガーデン
6691136,兵庫県,西宮市,名塩木之元
6691146,兵庫県,西宮市,名塩さくら台
6691142,兵庫県,西宮市,名塩山荘
6691144,兵庫県,西宮市,名塩茶園町
6691148,兵庫県,西宮市,名塩東久保
6691145,兵庫県,西宮市,名塩平成台
6691132,兵庫県,西宮市,名塩南台
6691162,兵庫県,西宮市,名塩美山
6691134,兵庫県,西宮市,名塩新町
6620024,兵庫県,西宮市,名次町
6691103,兵庫県,西宮市,生瀬東町
6691111,兵庫県,西宮市,生瀬高台
6691102,兵庫県,西宮市,生瀬町
6691104,兵庫県,西宮市,生瀬武庫川町
6638184,兵庫県,西宮市,鳴尾町
6638142,兵庫県,西宮市,鳴尾浜
6620038,兵庫県,西宮市,南郷町
6620814,兵庫県,西宮市,仁川五ケ山町
6620815,兵庫県,西宮市,仁川百合野町
6620811,兵庫県,西宮市,仁川町
6620034,兵庫県,西宮市,西田町
6620934,兵庫県,西宮市,西宮浜
6620933,兵庫県,西宮市,西波止町
6620093,兵庫県,西宮市,西平町
6620838,兵庫県,西宮市,能登町
6638015,兵庫県,西宮市,野間町
6620051,兵庫県,西宮市,羽衣町
6620854,兵庫県,西宮市,櫨塚町
6638187,兵庫県,西宮市,花園町
6691121,兵庫県,西宮市,花の峯
6620915,兵庫県,西宮市,馬場町
6638154,兵庫県,西宮市,浜甲子園
6620942,兵庫県,西宮市,浜町
6620923,兵庫県,西宮市,浜松原町
6620941,兵庫県,西宮市,浜脇町
6638014,兵庫県,西宮市,林田町
6638132,兵庫県,西宮市,東鳴尾町
6620924,兵庫県,西宮市,東浜町
6620922,兵庫県,西宮市,東町
6691133,兵庫県,西宮市,東山台
6620094,兵庫県,西宮市,毘沙門町
6620084,兵庫県,西宮市,樋之池町
6638011,兵庫県,西宮市,樋ノ口町
6638022,兵庫県,西宮市,日野町
6620835,兵庫県,西宮市,平木町
6620044,兵庫県,西宮市,平松町
6620837,兵庫県,西宮市,広田町
6638203,兵庫県,西宮市,深津町
6620067,兵庫県,西宮市,深谷町
6638031,兵庫県,西宮市,伏原町
6638111,兵庫県,西宮市,二見町
6638185,兵庫県,西宮市,古川町
6620042,兵庫県,西宮市,分銅町
6691112,兵庫県,西宮市,宝生ケ丘
6620072,兵庫県,西宮市,豊楽町
6620953,兵庫県,西宮市,堀切町
6620914,兵庫県,西宮市,本町
6620931,兵庫県,西宮市,前浜町
6620076,兵庫県,西宮市,松生町
6620073,兵庫県,西宮市,松風町
6620061,兵庫県,西宮市,松ケ丘町
6620962,兵庫県,西宮市,松下町
6620053,兵庫県,西宮市,松園町
6638102,兵庫県,西宮市,松並町
6620912,兵庫県,西宮市,松原町
6638101,兵庫県,西宮市,松山町
6620831,兵庫県,西宮市,丸橋町
6620031,兵庫県,西宮市,満池谷町
6620095,兵庫県,西宮市,美作町
6638153,兵庫県,西宮市,南甲子園
6620075,兵庫県,西宮市,南越木岩町
6620834,兵庫県,西宮市,南昭和町
6620976,兵庫県,西宮市,宮西町
6620947,兵庫県,西宮市,宮前町
6638131,兵庫県,西宮市,武庫川町
6620863,兵庫県,西宮市,室川町
6620846,兵庫県,西宮市,森下町
6638013,兵庫県,西宮市,門前町
6620826,兵庫県,西宮市,門戸岡田町
6620824,兵庫県,西宮市,門戸東町
6620828,兵庫県,西宮市,門戸西町
6620825,兵庫県,西宮市,門戸荘
6638024,兵庫県,西宮市,薬師町
6620963,兵庫県,西宮市,屋敷町
6620045,兵庫県,西宮市,安井町
6620866,兵庫県,西宮市,柳本町
6620005,兵庫県,西宮市,湯元町
6620964,兵庫県,西宮市,弓場町
6620921,兵庫県,西宮市,用海町
6620917,兵庫県,西宮市,与古道町
6620841,兵庫県,西宮市,両度町
6620918,兵庫県,西宮市,六湛寺町
6620874,兵庫県,西宮市,六軒町
6638181,兵庫県,西宮市,若草町
6620035,兵庫県,西宮市,若松町
6638016,兵庫県,西宮市,若山町
6620971,兵庫県,西宮市,和上町
6640000,兵庫県,伊丹市,
6640001,兵庫県,伊丹市,荒牧
6640008,兵庫県,伊丹市,荒牧南
6640864,兵庫県,伊丹市,安堂寺町
6640027,兵庫県,伊丹市,池尻
6640846,兵庫県,伊丹市,伊丹
6640861,兵庫県,伊丹市,稲野町
6640011,兵庫県,伊丹市,鋳物師
6640843,兵庫県,伊丹市,岩屋
6640856,兵庫県,伊丹市,梅ノ木
6640899,兵庫県,伊丹市,大鹿
6640003,兵庫県,伊丹市,大野
6640002,兵庫県,伊丹市,荻野
6640031,兵庫県,伊丹市,荻野西
6640025,兵庫県,伊丹市,奥畑
6640833,兵庫県,伊丹市,小阪田
6640863,兵庫県,伊丹市,柏木町
6640893,兵庫県,伊丹市,春日丘
6640831,兵庫県,伊丹市,北伊丹
6640837,兵庫県,伊丹市,北河原
6640891,兵庫県,伊丹市,北園
6640007,兵庫県,伊丹市,北野
6640836,兵庫県,伊丹市,北本町
6640857,兵庫県,伊丹市,行基町
6640844,兵庫県,伊丹市,口酒井
6640872,兵庫県,伊丹市,車塚
6640839,兵庫県,伊丹市,桑津
6640006,兵庫県,伊丹市,鴻池
6640855,兵庫県,伊丹市,御願塚
6640881,兵庫県,伊丹市,昆陽
6640015,兵庫県,伊丹市,昆陽池
6640885,兵庫県,伊丹市,昆陽泉町
6640016,兵庫県,伊丹市,昆陽北
6640886,兵庫県,伊丹市,昆陽東
6640888,兵庫県,伊丹市,昆陽南
6640897,兵庫県,伊丹市,桜ケ丘
6640894,兵庫県,伊丹市,清水
6640832,兵庫県,伊丹市,下河原
6640882,兵庫県,伊丹市,鈴原町
6640898,兵庫県,伊丹市,千僧
6640892,兵庫県,伊丹市,高台
6640851,兵庫県,伊丹市,中央
6640026,兵庫県,伊丹市,寺本
6640020,兵庫県,伊丹市,寺本東
6640022,兵庫県,伊丹市,中野東
6640023,兵庫県,伊丹市,中野西
6640029,兵庫県,伊丹市,中野北
6640838,兵庫県,伊丹市,中村
6640834,兵庫県,伊丹市,西桑津
6640858,兵庫県,伊丹市,西台
6640028,兵庫県,伊丹市,西野
6640873,兵庫県,伊丹市,野間
6640875,兵庫県,伊丹市,野間北
6640845,兵庫県,伊丹市,東有岡
6640835,兵庫県,伊丹市,東桑津
6640004,兵庫県,伊丹市,東野
6640853,兵庫県,伊丹市,平松
6640014,兵庫県,伊丹市,広畑
6640847,兵庫県,伊丹市,藤ノ木
6640896,兵庫県,伊丹市,船原
6640871,兵庫県,伊丹市,堀池
6640024,兵庫県,伊丹市,松ケ丘
6640884,兵庫県,伊丹市,美鈴町
6640017,兵庫県,伊丹市,瑞ケ丘
6640005,兵庫県,伊丹市,瑞原
6640013,兵庫県,伊丹市,瑞穂町
6640012,兵庫県,伊丹市,緑ケ丘
6640883,兵庫県,伊丹市,南鈴原
6640854,兵庫県,伊丹市,南町
6640865,兵庫県,伊丹市,南野
6640887,兵庫県,伊丹市,南野北
6640852,兵庫県,伊丹市,南本町
6640895,兵庫県,伊丹市,宮ノ前
6640842,兵庫県,伊丹市,森本
6640874,兵庫県,伊丹市,山田
6640862,兵庫県,伊丹市,若菱町
6680000,兵庫県,豊岡市,
6680801,兵庫県,豊岡市,赤石
6680001,兵庫県,豊岡市,伊賀谷
6680261,兵庫県,豊岡市,出石町荒木
6680213,兵庫県,豊岡市,出石町伊木
6680207,兵庫県,豊岡市,出石町伊豆
6680218,兵庫県,豊岡市,出石町入佐
6680244,兵庫県,豊岡市,出石町上野
6680215,兵庫県,豊岡市,出石町魚屋
6680214,兵庫県,豊岡市,出石町内町
6680271,兵庫県,豊岡市,出石町大谷
6680201,兵庫県,豊岡市,出石町奥小野
6680251,兵庫県,豊岡市,出石町奥山
6680235,兵庫県,豊岡市,出石町鍛冶屋
6680279,兵庫県,豊岡市,出石町片間
6680255,兵庫県,豊岡市,出石町上村
6680231,兵庫県,豊岡市,出石町川原
6680242,兵庫県,豊岡市,出石町桐野
6680202,兵庫県,豊岡市,出石町口小野
6680264,兵庫県,豊岡市,出石町暮坂
6680256,兵庫県,豊岡市,出石町小人
6680216,兵庫県,豊岡市,出石町材木
6680205,兵庫県,豊岡市,出石町嶋
6680211,兵庫県,豊岡市,出石町下谷
6680233,兵庫県,豊岡市,出石町田結庄
6680209,兵庫県,豊岡市,出石町田多地
6680212,兵庫県,豊岡市,出石町谷山
6680273,兵庫県,豊岡市,出石町坪井
6680257,兵庫県,豊岡市,出石町坪口
6680223,兵庫県,豊岡市,出石町鉄砲
6680241,兵庫県,豊岡市,出石町寺坂
6680222,兵庫県,豊岡市,出石町寺町
6680217,兵庫県,豊岡市,出石町東條
6680272,兵庫県,豊岡市,出石町鳥居
6680275,兵庫県,豊岡市,出石町長砂
6680243,兵庫県,豊岡市,出石町中野
6680254,兵庫県,豊岡市,出石町中村
6680203,兵庫県,豊岡市,出石町袴狭
6680237,兵庫県,豊岡市,出石町馬場
6680246,兵庫県,豊岡市,出石町日野辺
6680266,兵庫県,豊岡市,出石町平田
6680238,兵庫県,豊岡市,出石町弘原
6680206,兵庫県,豊岡市,出石町福居
6680263,兵庫県,豊岡市,出石町福住
6680265,兵庫県,豊岡市,出石町福見
6680262,兵庫県,豊岡市,出石町細見
6680224,兵庫県,豊岡市,出石町本町
6680221,兵庫県,豊岡市,出石町町分
6680236,兵庫県,豊岡市,出石町松枝
6680277,兵庫県,豊岡市,出石町丸中
6680278,兵庫県,豊岡市,出石町三木
6680204,兵庫県,豊岡市,出石町宮内
6680274,兵庫県,豊岡市,出石町水上
6680276,兵庫県,豊岡市,出石町森井
6680225,兵庫県,豊岡市,出石町八木
6680208,兵庫県,豊岡市,出石町安良
6680234,兵庫県,豊岡市,出石町柳
6680245,兵庫県,豊岡市,出石町百合
6680232,兵庫県,豊岡市,出石町宵田
6680253,兵庫県,豊岡市,出石町榎見
6680252,兵庫県,豊岡市,出石町和屋
6680021,兵庫県,豊岡市,泉町
6680821,兵庫県,豊岡市,市場
6680851,兵庫県,豊岡市,今森
6680081,兵庫県,豊岡市,岩井
6680002,兵庫県,豊岡市,岩熊
6680071,兵庫県,豊岡市,内町
6680852,兵庫県,豊岡市,江本
6680041,兵庫県,豊岡市,大磯町
6680861,兵庫県,豊岡市,大篠岡
6680072,兵庫県,豊岡市,大谷
6680031,兵庫県,豊岡市,大手町
6680822,兵庫県,豊岡市,奥野
6696123,兵庫県,豊岡市,小島
6680022,兵庫県,豊岡市,小田井町
6680871,兵庫県,豊岡市,梶原
6680862,兵庫県,豊岡市,香住
6680023,兵庫県,豊岡市,加広町
6680811,兵庫県,豊岡市,鎌田
6680011,兵庫県,豊岡市,上陰
6680061,兵庫県,豊岡市,上佐野
6680863,兵庫県,豊岡市,上鉢山
6680831,兵庫県,豊岡市,神美台
6680841,兵庫県,豊岡市,加陽
6680864,兵庫県,豊岡市,木内
6696103,兵庫県,豊岡市,城崎町今津
6696116,兵庫県,豊岡市,城崎町上山
6696115,兵庫県,豊岡市,城崎町来日
6696111,兵庫県,豊岡市,城崎町楽々浦
6696114,兵庫県,豊岡市,城崎町戸島
6696112,兵庫県,豊岡市,城崎町飯谷
6696113,兵庫県,豊岡市,城崎町結
6696102,兵庫県,豊岡市,城崎町桃島
6696101,兵庫県,豊岡市,城崎町湯島
6680042,兵庫県,豊岡市,京町
6680832,兵庫県,豊岡市,倉見
6696124,兵庫県,豊岡市,気比
6680872,兵庫県,豊岡市,河谷
6680003,兵庫県,豊岡市,江野
6680051,兵庫県,豊岡市,九日市上町
6680053,兵庫県,豊岡市,九日市中町
6680052,兵庫県,豊岡市,九日市下町
6680024,兵庫県,豊岡市,寿町
6680802,兵庫県,豊岡市,金剛寺
6680025,兵庫県,豊岡市,幸町
6680812,兵庫県,豊岡市,栄町
6680043,兵庫県,豊岡市,桜町
6680062,兵庫県,豊岡市,佐野
668
| 0 |
9e83190ba101ff865ded4f3fba7ad806dcc6226b
|
Python
|
0044,兵庫県,豊岡市,山王町
6680054,兵庫県,豊岡市,塩津町
6680012,兵庫県,豊岡市,下陰
6680803,兵庫県,豊岡市,下鶴井
6680813,兵庫県,豊岡市,下宮
6680865,兵庫県,豊岡市,下鉢山
6680082,兵庫県,豊岡市,庄
6680814,兵庫県,豊岡市,祥雲寺
6680873,兵庫県,豊岡市,庄境
6680045,兵庫県,豊岡市,城南町
6680063,兵庫県,豊岡市,正法寺
6680853,兵庫県,豊岡市,清冷寺
6680055,兵庫県,豊岡市,昭和町
6680004,兵庫県,豊岡市,新堂
6696122,兵庫県,豊岡市,瀬戸
6696125,兵庫県,豊岡市,田結
6680064,兵庫県,豊岡市,高屋
6680005,兵庫県,豊岡市,滝
6696214,兵庫県,豊岡市,竹野町阿金谷
6696223,兵庫県,豊岡市,竹野町芦谷
6696202,兵庫県,豊岡市,竹野町宇日
6696352,兵庫県,豊岡市,竹野町大森
6696218,兵庫県,豊岡市,竹野町奥須井
6696224,兵庫県,豊岡市,竹野町鬼神谷
6696333,兵庫県,豊岡市,竹野町御又
6696354,兵庫県,豊岡市,竹野町川南谷
6696334,兵庫県,豊岡市,竹野町河内
6696216,兵庫県,豊岡市,竹野町切浜
6696229,兵庫県,豊岡市,竹野町金原
6696215,兵庫県,豊岡市,竹野町草飼
6696353,兵庫県,豊岡市,竹野町桑野本
6696341,兵庫県,豊岡市,竹野町小城
6696222,兵庫県,豊岡市,竹野町小丸
6696226,兵庫県,豊岡市,竹野町下塚
6696221,兵庫県,豊岡市,竹野町須谷
6696351,兵庫県,豊岡市,竹野町須野谷
6696203,兵庫県,豊岡市,竹野町田久日
6696201,兵庫県,豊岡市,竹野町竹野
6696343,兵庫県,豊岡市,竹野町段
6696225,兵庫県,豊岡市,竹野町轟
6696342,兵庫県,豊岡市,竹野町二連原
6696344,兵庫県,豊岡市,竹野町椒
6696213,兵庫県,豊岡市,竹野町羽入
6696217,兵庫県,豊岡市,竹野町浜須井
6696227,兵庫県,豊岡市,竹野町林
6696228,兵庫県,豊岡市,竹野町東大谷
6696331,兵庫県,豊岡市,竹野町坊岡
6696212,兵庫県,豊岡市,竹野町松本
6696355,兵庫県,豊岡市,竹野町三原
6696332,兵庫県,豊岡市,竹野町森本
6696335,兵庫県,豊岡市,竹野町門谷
6696211,兵庫県,豊岡市,竹野町和田
6680866,兵庫県,豊岡市,駄坂
6680046,兵庫県,豊岡市,立野町
6680833,兵庫県,豊岡市,立石
6680324,兵庫県,豊岡市,但東町相田
6680352,兵庫県,豊岡市,但東町赤花
6680316,兵庫県,豊岡市,但東町天谷
6680374,兵庫県,豊岡市,但東町後
6680372,兵庫県,豊岡市,但東町大河内
6680335,兵庫県,豊岡市,但東町太田
6680353,兵庫県,豊岡市,但東町奥赤
6680343,兵庫県,豊岡市,但東町奥藤
6680301,兵庫県,豊岡市,但東町奥矢根
6680323,兵庫県,豊岡市,但東町小谷
6680321,兵庫県,豊岡市,但東町唐川
6680331,兵庫県,豊岡市,但東町木村
6680341,兵庫県,豊岡市,但東町口藤
6680373,兵庫県,豊岡市,但東町久畑
6680363,兵庫県,豊岡市,但東町栗尾
6680315,兵庫県,豊岡市,但東町河本
6680333,兵庫県,豊岡市,但東町高龍寺
6680376,兵庫県,豊岡市,但東町小坂
6680354,兵庫県,豊岡市,但東町坂津
6680334,兵庫県,豊岡市,但東町坂野
6680325,兵庫県,豊岡市,但東町佐々木
6680364,兵庫県,豊岡市,但東町佐田
6680361,兵庫県,豊岡市,但東町正法寺
6680311,兵庫県,豊岡市,但東町出合
6680313,兵庫県,豊岡市,但東町出合市場
6680337,兵庫県,豊岡市,但東町東里
6680342,兵庫県,豊岡市,但東町中藤
6680345,兵庫県,豊岡市,但東町中山
6680317,兵庫県,豊岡市,但東町西谷
6680332,兵庫県,豊岡市,但東町西野々
6680303,兵庫県,豊岡市,但東町畑
6680351,兵庫県,豊岡市,但東町畑山
6680375,兵庫県,豊岡市,但東町東中
6680314,兵庫県,豊岡市,但東町日殿
6680336,兵庫県,豊岡市,但東町日向
6680362,兵庫県,豊岡市,但東町平田
6680304,兵庫県,豊岡市,但東町水石
6680312,兵庫県,豊岡市,但東町南尾
6680322,兵庫県,豊岡市,但東町三原
6680344,兵庫県,豊岡市,但東町虫生
6680371,兵庫県,豊岡市,但東町薬王寺
6680302,兵庫県,豊岡市,但東町矢根
6680033,兵庫県,豊岡市,中央町
6680032,兵庫県,豊岡市,千代田町
6696121,兵庫県,豊岡市,津居山
6680073,兵庫県,豊岡市,辻
6680083,兵庫県,豊岡市,栃江
6680065,兵庫県,豊岡市,戸牧
6680013,兵庫県,豊岡市,中陰
6680842,兵庫県,豊岡市,中郷
6680874,兵庫県,豊岡市,中谷
6680834,兵庫県,豊岡市,長谷
6680074,兵庫県,豊岡市,野垣
6680804,兵庫県,豊岡市,野上
6680014,兵庫県,豊岡市,野田
6680854,兵庫県,豊岡市,八社宮
6696127,兵庫県,豊岡市,畑上
6680843,兵庫県,豊岡市,引野
6680844,兵庫県,豊岡市,土渕
6695314,兵庫県,豊岡市,日高町赤崎
6695331,兵庫県,豊岡市,日高町上石
6695315,兵庫県,豊岡市,日高町浅倉
6695356,兵庫県,豊岡市,日高町荒川
6695326,兵庫県,豊岡市,日高町池上
6695367,兵庫県,豊岡市,日高町石井
6695375,兵庫県,豊岡市,日高町稲葉
6695357,兵庫県,豊岡市,日高町猪子垣
6695338,兵庫県,豊岡市,日高町猪爪
6695346,兵庫県,豊岡市,日高町伊府
6695302,兵庫県,豊岡市,日高町岩中
6695301,兵庫県,豊岡市,日高町江原
6695339,兵庫県,豊岡市,日高町大岡
6695324,兵庫県,豊岡市,日高町上郷
6695354,兵庫県,豊岡市,日高町観音寺
6695345,兵庫県,豊岡市,日高町久田谷
6695342,兵庫県,豊岡市,日高町久斗
6695372,兵庫県,豊岡市,日高町栗栖野
6695361,兵庫県,豊岡市,日高町栗山
6695341,兵庫県,豊岡市,日高町国分寺
6695366,兵庫県,豊岡市,日高町頃垣
6695351,兵庫県,豊岡市,日高町佐田
6695355,兵庫県,豊岡市,日高町篠垣
6695362,兵庫県,豊岡市,日高町芝
6695365,兵庫県,豊岡市,日高町十戸
6695364,兵庫県,豊岡市,日高町庄境
6695332,兵庫県,豊岡市,日高町竹貫
6695371,兵庫県,豊岡市,日高町太田
6695336,兵庫県,豊岡市,日高町谷
6695352,兵庫県,豊岡市,日高町知見
6695313,兵庫県,豊岡市,日高町鶴岡
6695321,兵庫県,豊岡市,日高町土居
6695343,兵庫県,豊岡市,日高町道場
6695369,兵庫県,豊岡市,日高町栃本
6695358,兵庫県,豊岡市,日高町殿
6695337,兵庫県,豊岡市,日高町中
6695335,兵庫県,豊岡市,日高町奈佐路
6695379,兵庫県,豊岡市,日高町名色
6695344,兵庫県,豊岡市,日高町夏栗
6695328,兵庫県,豊岡市,日高町西芝
6695305,兵庫県,豊岡市,日高町祢布
6695363,兵庫県,豊岡市,日高町野
6695327,兵庫県,豊岡市,日高町野々庄
6695359,兵庫県,豊岡市,日高町羽尻
6695311,兵庫県,豊岡市,日高町日置
6695303,兵庫県,豊岡市,日高町東構
6695373,兵庫県,豊岡市,日高町東河内
6695312,兵庫県,豊岡市,日高町日高
6695322,兵庫県,豊岡市,日高町府市場
6695333,兵庫県,豊岡市,日高町藤井
6695323,兵庫県,豊岡市,日高町府中新
6695325,兵庫県,豊岡市,日高町堀
6695307,兵庫県,豊岡市,日高町松岡
6695376,兵庫県,豊岡市,日高町万劫
6695378,兵庫県,豊岡市,日高町万場
6695306,兵庫県,豊岡市,日高町水上
6695374,兵庫県,豊岡市,日高町水口
6695353,兵庫県,豊岡市,日高町森山
6695377,兵庫県,豊岡市,日高町山田
6695368,兵庫県,豊岡市,日高町山宮
6695334,兵庫県,豊岡市,日高町山本
6695304,兵庫県,豊岡市,日高町宵田
6680015,兵庫県,豊岡市,一日市
6680815,兵庫県,豊岡市,日撫
6680075,兵庫県,豊岡市,福成寺
6680084,兵庫県,豊岡市,福田
6680855,兵庫県,豊岡市,伏
6680076,兵庫県,豊岡市,船谷
6680805,兵庫県,豊岡市,船町
6680816,兵庫県,豊岡市,法花寺
6680047,兵庫県,豊岡市,三坂町
6696126,兵庫県,豊岡市,三原
6680085,兵庫県,豊岡市,宮井
6680823,兵庫県,豊岡市,三宅
6680016,兵庫県,豊岡市,宮島
6680056,兵庫県,豊岡市,妙楽寺
6680077,兵庫県,豊岡市,目坂
6680026,兵庫県,豊岡市,元町
6680806,兵庫県,豊岡市,森
6680824,兵庫県,豊岡市,森尾
6680006,兵庫県,豊岡市,森津
6680057,兵庫県,豊岡市,弥栄町
6680807,兵庫県,豊岡市,山本
6680875,兵庫県,豊岡市,百合地
6680078,兵庫県,豊岡市,吉井
6680817,兵庫県,豊岡市,六地蔵
6680027,兵庫県,豊岡市,若松町
6650000,兵庫県,宝塚市,
6650014,兵庫県,宝塚市,青葉台
6650822,兵庫県,宝塚市,安倉中
6650825,兵庫県,宝塚市,安倉西
6650823,兵庫県,宝塚市,安倉南
6650821,兵庫県,宝塚市,安倉北
6650835,兵庫県,宝塚市,旭町
6650851,兵庫県,宝塚市,泉ガ丘
6650864,兵庫県,宝塚市,泉町
6650033,兵庫県,宝塚市,伊孑志
6650007,兵庫県,宝塚市,伊孑志
6650862,兵庫県,宝塚市,今里町
6650004,兵庫県,宝塚市,梅野町
6691211,兵庫県,宝塚市,大原野
6650057,兵庫県,宝塚市,大吹町
6650034,兵庫県,宝塚市,小林
6650017,兵庫県,宝塚市,小林西山
6650054,兵庫県,宝塚市,鹿塩
6650824,兵庫県,宝塚市,金井町
6691202,兵庫県,宝塚市,上佐曽利
6650047,兵庫県,宝塚市,亀井町
6650842,兵庫県,宝塚市,川面
6650848,兵庫県,宝塚市,川面
6650836,兵庫県,宝塚市,清荒神
6691241,兵庫県,宝塚市,切畑
6660161,兵庫県,宝塚市,切畑
6650808,兵庫県,宝塚市,切畑
6650812,兵庫県,宝塚市,口谷東
6650813,兵庫県,宝塚市,口谷西
6650023,兵庫県,宝塚市,蔵人
6650832,兵庫県,宝塚市,向月町
6691201,兵庫県,宝塚市,香合新田
6650045,兵庫県,宝塚市,光明町
6650041,兵庫県,宝塚市,御所の前町
6650841,兵庫県,宝塚市,御殿山
6650865,兵庫県,宝塚市,寿町
6650827,兵庫県,宝塚市,小浜
6650053,兵庫県,宝塚市,駒の町
6691222,兵庫県,宝塚市,境野
6650845,兵庫県,宝塚市,栄町
6650035,兵庫県,宝塚市,逆瀬川
6650024,兵庫県,宝塚市,逆瀬台
6650846,兵庫県,宝塚市,桜ガ丘
6691205,兵庫県,宝塚市,芝辻新田
6691203,兵庫県,宝塚市,下佐曽利
6650012,兵庫県,宝塚市,寿楽荘
6650052,兵庫県,宝塚市,新明和町
6650044,兵庫県,宝塚市,末成町
6650031,兵庫県,宝塚市,末広町
6650847,兵庫県,宝塚市,すみれガ丘
6650055,兵庫県,宝塚市,大成町
6650051,兵庫県,宝塚市,高司
6650043,兵庫県,宝塚市,高松町
6650076,兵庫県,宝塚市,谷口町
6691231,兵庫県,宝塚市,玉瀬
6650072,兵庫県,宝塚市,千種
6650001,兵庫県,宝塚市,長寿ガ丘
6650002,兵庫県,宝塚市,月見山
6650833,兵庫県,宝塚市,鶴の荘
6650073,兵庫県,宝塚市,塔の町
6650032,兵庫県,宝塚市,東洋町
6650021,兵庫県,宝塚市,中州
6650874,兵庫県,宝塚市,中筋
6650872,兵庫県,宝塚市,中筋
6650875,兵庫県,宝塚市,中筋山手
6650056,兵庫県,宝塚市,中野町
6650877,兵庫県,宝塚市,中山桜台
6650871,兵庫県,宝塚市,中山五月台
6650868,兵庫県,宝塚市,中山荘園
6650876,兵庫県,宝塚市,中山台
6650861,兵庫県,宝塚市,中山寺
6650807,兵庫県,宝塚市,長尾台
6650873,兵庫県,宝塚市,長尾町
6691204,兵庫県,宝塚市,長谷
6650065,兵庫県,宝塚市,仁川旭ガ丘
6650064,兵庫県,宝塚市,仁川うぐいす台
6650062,兵庫県,宝塚市,仁川高台
6650063,兵庫県,宝塚市,仁川高丸
6650066,兵庫県,宝塚市,仁川団地
6650067,兵庫県,宝塚市,仁川月見ガ丘
6650075,兵庫県,宝塚市,仁川宮西町
6650061,兵庫県,宝塚市,仁川北
6650074,兵庫県,宝塚市,仁川台
6650022,兵庫県,宝塚市,野上
6691221,兵庫県,宝塚市,波豆
6650802,兵庫県,宝塚市,花屋敷荘園
6650803,兵庫県,宝塚市,花屋敷つつじガ丘
6650801,兵庫県,宝塚市,花屋敷松ガ丘
6660162,兵庫県,宝塚市,花屋敷緑ガ丘
6650015,兵庫県,宝塚市,光ガ丘
6650805,兵庫県,宝塚市,雲雀丘
6650804,兵庫県,宝塚市,雲雀丘山手
6650816,兵庫県,宝塚市,平井
6650817,兵庫県,宝塚市,平井山荘
6650046,兵庫県,宝塚市,福井町
6650806,兵庫県,宝塚市,ふじガ丘
6650016,兵庫県,宝塚市,宝松苑
6650013,兵庫県,宝塚市,宝梅
6650866,兵庫県,宝塚市,星の荘
6650831,兵庫県,宝塚市,米谷
6650837,兵庫県,宝塚市,米谷清
6650863,兵庫県,宝塚市,三笠町
6650834,兵庫県,宝塚市,美座
6650011,兵庫県,宝塚市,南口
6650811,兵庫県,宝塚市,南ひばりガ丘
6650843,兵庫県,宝塚市,宮の町
6650042,兵庫県,宝塚市,美幸町
6650844,兵庫県,宝塚市,武庫川町
6650005,兵庫県,宝塚市,武庫山
6650852,兵庫県,宝塚市,売布
6650855,兵庫県,宝塚市,売布きよしガ丘
6650856,兵庫県,宝塚市,売布自由ガ丘
6650867,兵庫県,宝塚市,売布東の町
6650854,兵庫県,宝塚市,売布山手町
6650853,兵庫県,宝塚市,売布ガ丘
6650006,兵庫県,宝塚市,紅葉ガ丘
6650071,兵庫県,宝塚市,社町
6650887,兵庫県,宝塚市,山手台東
6650886,兵庫県,宝塚市,山手台西
6650883,兵庫県,宝塚市,山本中
6650814,兵庫県,宝塚市,山本野里
6650815,兵庫県,宝塚市,山本丸橋
6650881,兵庫県,宝塚市,山本東
6650884,兵庫県,宝塚市,山本西
6650882,兵庫県,宝塚市,山本南
6650885,兵庫県,宝塚市,山本台
6650826,兵庫県,宝塚市,弥生町
6650025,兵庫県,宝塚市,ゆずり葉台
6650003,兵庫県,宝塚市,湯本町
6660000,兵庫県,川西市,
6660148,兵庫県,川西市,赤松
6660156,兵庫県,川西市,石道
6660146,兵庫県,川西市,芋生
6660007,兵庫県,川西市,鴬が丘
6660133,兵庫県,川西市,鴬台
6660001,兵庫県,川西市,鴬の森町
6660014,兵庫県,川西市,小戸
6660015,兵庫県,川西市,小花
6660031,兵庫県,川西市,霞ケ丘
6660025,兵庫県,川西市,加茂
6660012,兵庫県,川西市,絹延町
6660135,兵庫県,川西市,錦松台
6660024,兵庫県,川西市,久代
6660103,兵庫県,川西市,国崎
6660101,兵庫県,川西市,黒川
6660107,兵庫県,川西市,下財町
6660145,兵庫県,川西市,けやき坂
6660115,兵庫県,川西市,向陽台
6660033,兵庫県,川西市,栄町
6660021,兵庫県,川西市,栄根
6660104,兵庫県,川西市,笹部
6660022,兵庫県,川西市,下加茂
6660125,兵庫県,川西市,新田
6660116,兵庫県,川西市,水明台
6660158,兵庫県,川西市,清流台
6660142,兵庫県,川西市,清和台東
6660143,兵庫県,川西市,清和台西
6660111,兵庫県,川西市,大和東
6660112,兵庫県,川西市,大和西
6660002,兵庫県,川西市,滝山町
6660126,兵庫県,川西市,多田院
6660127,兵庫県,川西市,多田院多田所町
6660128,兵庫県,川西市,多田院西
6660124,兵庫県,川西市,多田桜木
6660016,兵庫県,川西市,中央町
6660123,兵庫県,川西市,鼓が滝
6660011,兵庫県,川西市,出在家町
6660034,兵庫県,川西市,寺畑
6660113,兵庫県,川西市,長尾町
6660155,兵庫県,川西市,西畦野
6660138,兵庫県,川西市,西多田
6660004,兵庫県,川西市,萩原
6660005,兵庫県,川西市,萩原台東
6660006,兵庫県,川西市,萩原台西
6660134,兵庫県,川西市,萩原台西
6660035,兵庫県,川西市,花屋敷
6660036,兵庫県,川西市,花屋敷山手町
6660017,兵庫県,川西市,火打
6660117,兵庫県,川西市,東畦野
6660114,兵庫県,川西市,東畦野山手
6660023,兵庫県,川西市,東久代
6660122,兵庫県,川西市,東多田
6660032,兵庫県,川西市,日高町
6660153,兵庫県,川西市,一庫
6660121,兵庫県,川西市,平野
6660037,兵庫県,川西市,松が丘町
6660003,兵庫県,川西市,丸の内町
6660152,兵庫県,川西市,丸山台
6650891,兵庫県,川西市,満願寺町
6660013,兵庫県,川西市,美園町
6660157,兵庫県,川西市,緑が丘
6660129,兵庫県,川西市,緑台
6660136,兵庫県,川西市,南野坂
6660026,兵庫県,川西市,南花屋敷
6660105,兵庫県,川西市,見野
6660151,兵庫県,川西市,美山台
6660141,兵庫県,川西市,虫生
6660131,兵庫県,川西市,矢問
6660132,兵庫県,川西市,矢問東町
6660144,兵庫県,川西市,柳谷
6660106,兵庫県,川西市,山下町
6660154,兵庫県,川西市,山原
6660137,兵庫県,川西市,湯山台
6660102,兵庫県,川西市,横路
6660147,兵庫県,川西市,若宮
6691300,兵庫県,三田市,
6691536,兵庫県,三田市,三田市の次に番地がくる場合
6691526,兵庫県,三田市,相生町
6691358,兵庫県,三田市,藍本
6691323,兵庫県,三田市,あかしあ台
6691548,兵庫県,三田市,池尻
6691414,兵庫県,三田市,市之瀬
6691356,兵庫県,三田市,井ノ草
6691336,兵庫県,三田市,馬渡
6691502,兵庫県,三田市,永沢寺
6691528,兵庫県,三田市,駅前町
6691349,兵庫県,三田市,大川瀬
6691354,兵庫県,三田市,大畑
6691515,兵庫県,三田市,大原
6691503,兵庫県,三田市,乙原
6691504,兵庫県,三田市,小野
6691337,兵庫県,三田市,学園
6691507,兵庫県,三田市,香下
6691346,兵庫県,三田市,上相野
6691301,兵庫県,三田市,上青野
6691316,兵庫県,三田市,上井沢
6691338,兵庫県,三田市,上内神
6691415,兵庫県,三田市,上槻瀬
6691542,兵庫県,三田市,上深田
6691351,兵庫県,三田市,上本庄
6691311,兵庫県,三田市,加茂
6691514,兵庫県,三田市,川除
6691403,兵庫県,三田市,川原
6691541,兵庫県,三田市,貴志
6691304,兵庫県,三田市,北浦
6691521,兵庫県,三田市,桑原
6691321,兵庫県,三田市,けやき台
6691412,兵庫県,三田市,木器
6691401,兵庫県,三田市,小柿
6691405,兵庫県,三田市,酒井
6691325,兵庫県,三田市,さくら坂
6691335,兵庫県,三田市,沢谷
6691533,兵庫県,三田市,三田町
6691506,兵庫県,三田市,志手原
6691345,兵庫県,三田市,下相野
6691302,兵庫県,三田市,下青野
6691315,兵庫県,三田市,下井沢
6691333,兵庫県,三田市,下内神
6691416,兵庫県,三田市,下里
6691522,兵庫県,三田市,下田中
6691413,兵庫県,三田市,下槻瀬
6691543,兵庫県,三田市,下深田
6691303,兵庫県,三田市,末
6691402,兵庫県,三田市,末吉
6691417,兵庫県,三田市,鈴鹿
6691322,兵庫県,三田市,すずかけ台
6691352,兵庫県,三田市,須磨田
6691525,兵庫県,三田市,対中町
6691512,兵庫県,三田市,高次
6691406,兵庫県,三田市,田中
6691529,兵庫県,三田市,中央町
6691347,兵庫県,三田市,つつじが丘南
6691348,兵庫県,三田市,つつじが丘北
6691339,兵庫県,三田市,テクノパーク
6691523,兵庫県,三田市,寺村町
6691531,兵庫県,三田市,天神
6691404,兵庫県,三田市,十倉
6691516,兵庫県,三田市,友が丘
6691334,兵庫県,三田市,中内神
6691527,兵庫県,三田市,中町
6691355,兵庫県,三田市,長坂
6691517,兵庫県,三田市,成谷
6691341,兵庫県,三田市,西相野
6691314,兵庫県,三田市,西野上
6691537,兵庫県,三田市,西山
6691505,兵庫県,三田市,尼寺
6691545,兵庫県,三田市,狭間が丘
6691411,兵庫県,三田市,波豆川
6691524,兵庫県,三田市,八景町
6691312,兵庫県,三田市,東野上
6691357,兵庫県,三田市,東本庄
6691353,兵庫県,三田市,東山
6691332,兵庫県,三田市,広沢
6691331,兵庫県,三田市,広野
6691407,兵庫県,三田市,布木
6691313,兵庫県,三田市,福島
6691547,兵庫県,三田市,富士が丘
6691343,兵庫県,三田市,洞
6691344,兵庫県,三田市,溝口
6691535,兵庫県,三田市,南が丘
6691317,兵庫県,三田市,宮脇
6691513,兵庫県,三田市,三輪
6691544,兵庫県,三田市,武庫が丘
6691501,兵庫県,三田市,母子
6691532,兵庫県,三田市,屋敷町
6691511,兵庫県,三田市,山田
6691546,兵庫県,三田市,弥生が丘
6691324,兵庫県,三田市,ゆりのき台
6691534,兵庫県,三田市,横山町
6691342,兵庫県,三田市,四ツ辻
6692300,兵庫県,篠山市,
6692205,兵庫県,篠山市,網掛
6692402,兵庫県,篠山市,県守
6692822,兵庫県,篠山市,明野
6692223,兵庫県,篠山市,味間奥
6692224,兵庫県,篠山市,味間北
6692214,兵庫県,篠山市,味間新
6692222,兵庫県,篠山市,味間南
6692114,兵庫県,篠山市,油井
6692804,兵庫県,篠山市,荒子新田
6692354,兵庫県,篠山市,有居
6692541,兵庫県,篠山市,井串
6692436,兵庫県,篠山市,池上
6692811,兵庫県,篠山市,石住
6692406,兵庫県,篠山市,泉
6692813,兵庫県,篠山市,一印谷
6692613,兵庫県,篠山市,市野々
6692718,兵庫県,篠山市,市山
6692335,兵庫県,篠山市,乾新町
6692105,兵庫県,篠山市,犬飼
6692416,兵庫県,篠山市,井ノ上
6692304,兵庫県,篠山市,今谷
6692357,兵庫県,篠山市,今福
6692454,兵庫県,篠山市,岩崎
6692336,兵庫県,篠山市,魚屋町
6692106,兵庫県,篠山市,牛ケ瀬
6692715,兵庫県,篠山市,打坂
6692455,兵庫県,篠山市,宇土
6692801,兵庫県,篠山市,追入
6692306,兵庫県,篠山市,大上
6692318,兵庫県,篠山市,大熊
6692212,兵庫県,篠山市,大沢
6692211,兵庫県,篠山市,大沢新
6692363,兵庫県,篠山市,大谷
6692355,兵庫県,篠山市,大野
6692611,兵庫県,篠山市,大藤
6692309,兵庫県,篠山市,大渕
6692803,兵庫県,篠山市,大山上
6692823,兵庫県,篠山市,大山下
6692827,兵庫県,篠山市,大山新
6692802,兵庫県,篠山市,大山宮
6692326,兵庫県,篠山市,小川町
6692401,兵庫県,篠山市,奥県守
6692302,兵庫県,篠山市,奥畑
6692501,兵庫県,篠山市,奥原山
6692616,兵庫県,篠山市,奥山
6692614,兵庫県,篠山市,小倉
6692731,兵庫県,篠山市,小坂
6692435,兵庫県,篠山市,小多田
6692532,兵庫県,篠山市,小立
6692704,兵庫県,篠山市,遠方
6692522,兵庫県,篠山市,小野奥谷
6692523,兵庫県,篠山市,小野新
6692604,兵庫県,篠山市,小原
6692554,兵庫県,篠山市,貝田
6692714,兵庫県,篠山市,垣屋
6692408,兵庫県,篠山市,春日江
6692732,兵庫県,篠山市,上板井
6692534,兵庫県,篠山市,上筱見
6692415,兵庫県,篠山市,上宿
6692722,兵庫県,篠山市,河内台
6692724,兵庫県,篠山市,川北
6692725,兵庫県,篠山市,川北新田
6692701,兵庫県,篠山市,川阪
6692735,兵庫県,篠山市,川西
6692512,兵庫県,篠山市,川原
6692325,兵庫県,篠山市,河原町
6692451,兵庫県,篠山市,北
6692317,兵庫県,篠山市,北沢田
6692417,兵庫県,篠山市,北島
6692332,兵庫県,篠山市,北新町
6692824,兵庫県,篠山市,北野
6692825,兵庫県,篠山市,北野新田
6692438,兵庫県,篠山市,京町
6692113,兵庫県,篠山市,草野
6692544,兵庫県,篠山市,草ノ上
6692723,兵庫県,篠山市,口阪本
6692367,兵庫県,篠山市,熊谷
6692407,兵庫県,篠山市,倉谷
6692713,兵庫県,篠山市,倉本
6692711,兵庫県,篠山市,栗柄
6692465,兵庫県,篠山市,栗栖野
6692321,兵庫県,篠山市,黒岡
6692726,兵庫県,篠山市,黒田
6692703,兵庫県,篠山市,桑原
6692341,兵庫県,篠山市,郡家
6692112,兵庫県,篠山市,古森
6692545,兵庫県,篠山市,小田中
6692412,兵庫県,篠山市,小中
6692124,兵庫県,篠山市,不来坂
6692322,兵庫県,篠山市,呉服町
6692461,兵庫県,篠山市,小枕
6692144,兵庫県,篠山市,今田町間新田
6692155,兵庫県,篠山市,今田町芦原新田
6692154,兵庫県,篠山市,今田町市原
6692152,兵庫県,篠山市,今田町荻野分
6692143,兵庫県,篠山市,今田町釜屋
6692132,兵庫県,篠山市,今田町上小野原
6692135,兵庫県,篠山市,今田町上立杭
6692162,兵庫県,篠山市,今田町黒石
6692145,兵庫県,篠山市,今田町木津
6692151,兵庫県,篠山市,今田町今田
6692153,兵庫県,篠山市,今田町今田新田
6692156,兵庫県,篠山市,今田町佐曽良新田
6692161,兵庫県,篠山市,今田町四斗谷
6692133,兵庫県,篠山市,今田町下小野原
6692141,兵庫県,篠山市,今田町下立杭
6692131,兵庫県,篠山市,今田町辰巳
6692142,兵庫県,篠山市,今田町東庄
6692163,兵庫県,篠山市,今田町本荘
6692134,兵庫県,篠山市,今田町休場
6692542,兵庫県,篠山市,細工所
6692712,兵庫県,篠山市,坂本
6692365,兵庫県,篠山市,佐倉
6692405,兵庫県,篠山市,佐貫谷
6692425,兵庫県,篠山市,後川奥
6692422,兵庫県,篠山市,後川上
6692424,兵庫県,篠山市,後川下
6692421,兵庫県,篠山市,後川新田
6692423,兵庫県,篠山市,後川中
6692439,兵庫県,篠山市,渋谷
6692733,兵庫県,篠山市,下板井
6692535,兵庫県,篠山市,下筱見
6692503,兵庫県,篠山市,下原山
6692543,兵庫県,篠山市,塩岡
6692311,兵庫県,篠山市,新荘
6692204,兵庫県,篠山市,杉
6692312,兵庫県,篠山市,菅
6692315,兵庫県,��
| 1 |
9e83190ba101ff865ded4f3fba7ad806dcc6226b
|
Python
|
�山市,筋山
6692125,兵庫県,篠山市,住山
6692231,兵庫県,篠山市,住吉台
6692303,兵庫県,篠山市,瀬利
6692444,兵庫県,篠山市,曽地奥
6692442,兵庫県,篠山市,曽地口
6692443,兵庫県,篠山市,曽地中
6692805,兵庫県,篠山市,園田分
6692812,兵庫県,篠山市,高倉
6692717,兵庫県,篠山市,高坂
6692727,兵庫県,篠山市,高屋
6692612,兵庫県,篠山市,立金
6692323,兵庫県,篠山市,立町
6692453,兵庫県,篠山市,谷山
6692531,兵庫県,篠山市,垂水
6692362,兵庫県,篠山市,知足
6692814,兵庫県,篠山市,長安寺
6692815,兵庫県,篠山市,町ノ田
6692411,兵庫県,篠山市,辻
6692366,兵庫県,篠山市,寺内
6692821,兵庫県,篠山市,東河地
6692111,兵庫県,篠山市,当野
6692826,兵庫県,篠山市,徳永
6692525,兵庫県,篠山市,栃梨
6692434,兵庫県,篠山市,殿町
6692601,兵庫県,篠山市,中
6692213,兵庫県,篠山市,中野
6692502,兵庫県,篠山市,中原山
6692331,兵庫県,篠山市,二階町
6692346,兵庫県,篠山市,西岡屋
6692736,兵庫県,篠山市,西木之部
6692221,兵庫県,篠山市,西古佐
6692728,兵庫県,篠山市,西阪本
6692447,兵庫県,篠山市,西荘
6692334,兵庫県,篠山市,西新町
6692721,兵庫県,篠山市,西谷
6692504,兵庫県,篠山市,西野々
6692352,兵庫県,篠山市,西浜谷
6692206,兵庫県,篠山市,西吹
6692404,兵庫県,篠山市,西本荘
6692342,兵庫県,篠山市,西町
6692433,兵庫県,篠山市,西八上
6692521,兵庫県,篠山市,二之坪
6692353,兵庫県,篠山市,野尻
6692452,兵庫県,篠山市,野中
6692445,兵庫県,篠山市,野々垣
6692313,兵庫県,篠山市,野間
6692716,兵庫県,篠山市,乗竹
6692103,兵庫県,篠山市,波賀野
6692122,兵庫県,篠山市,波賀野新田
6692524,兵庫県,篠山市,箱谷
6692418,兵庫県,篠山市,畑井
6692413,兵庫県,篠山市,畑市
6692505,兵庫県,篠山市,安口
6692553,兵庫県,篠山市,幡路
6692305,兵庫県,篠山市,畑宮
6692101,兵庫県,篠山市,初田
6692307,兵庫県,篠山市,般若寺
6692301,兵庫県,篠山市,火打岩
6692441,兵庫県,篠山市,日置
6692345,兵庫県,篠山市,東岡屋
6692737,兵庫県,篠山市,東木之部
6692201,兵庫県,篠山市,東古佐
6692314,兵庫県,篠山市,東沢田
6692324,兵庫県,篠山市,東新町
6692351,兵庫県,篠山市,東浜谷
6692202,兵庫県,篠山市,東吹
6692403,兵庫県,篠山市,東本荘
6692343,兵庫県,篠山市,風深
6692344,兵庫県,篠山市,吹上
6692203,兵庫県,篠山市,吹新
6692603,兵庫県,篠山市,福井
6692513,兵庫県,篠山市,福住
6692369,兵庫県,篠山市,藤岡奥
6692368,兵庫県,篠山市,藤岡口
6692605,兵庫県,篠山市,藤坂
6692552,兵庫県,篠山市,藤之木
6692123,兵庫県,篠山市,古市
6692702,兵庫県,篠山市,本郷
6692511,兵庫県,篠山市,本明谷
6692316,兵庫県,篠山市,前沢田
6692527,兵庫県,篠山市,松ケ鼻
6692462,兵庫県,篠山市,真南条上
6692464,兵庫県,篠山市,真南条下
6692463,兵庫県,篠山市,真南条中
6692361,兵庫県,篠山市,丸山
6692121,兵庫県,篠山市,見内
6692602,兵庫県,篠山市,三熊
6692333,兵庫県,篠山市,南新町
6692102,兵庫県,篠山市,南矢代
6692615,兵庫県,篠山市,宮代
6692734,兵庫県,篠山市,宮田
6692414,兵庫県,篠山市,宮ノ前
6692526,兵庫県,篠山市,向井
6692437,兵庫県,篠山市,糯ケ坪
6692432,兵庫県,篠山市,八上内
6692446,兵庫県,篠山市,八上上
6692431,兵庫県,篠山市,八上下
6692356,兵庫県,篠山市,矢代
6692104,兵庫県,篠山市,矢代新
6692551,兵庫県,篠山市,安田
6692337,兵庫県,篠山市,山内町
6692533,兵庫県,篠山市,山田
6692364,兵庫県,篠山市,鷲尾
6692308,兵庫県,篠山市,和田
6670000,兵庫県,養父市,
6670115,兵庫県,養父市,上箇
6670103,兵庫県,養父市,浅野
6670104,兵庫県,養父市,伊豆
6670132,兵庫県,養父市,稲津
6670131,兵庫県,養父市,上野
6671119,兵庫県,養父市,鵜縄
6671127,兵庫県,養父市,大久保
6671102,兵庫県,養父市,大谷
6670125,兵庫県,養父市,大塚
6670134,兵庫県,養父市,大坪
6670436,兵庫県,養父市,大屋町明延
6670322,兵庫県,養父市,大屋町筏
6670431,兵庫県,養父市,大屋町糸原
6670301,兵庫県,養父市,大屋町上山
6670303,兵庫県,養父市,大屋町おうみ
6670314,兵庫県,養父市,大屋町大杉
6670311,兵庫県,養父市,大屋町大屋市場
6670313,兵庫県,養父市,大屋町笠谷
6670433,兵庫県,養父市,大屋町門野
6670315,兵庫県,養父市,大屋町加保
6670321,兵庫県,養父市,大屋町蔵垣
6670325,兵庫県,養父市,大屋町栗ノ下
6670434,兵庫県,養父市,大屋町須西
6670302,兵庫県,養父市,大屋町樽見
6670304,兵庫県,養父市,大屋町中
6670305,兵庫県,養父市,大屋町夏梅
6670306,兵庫県,養父市,大屋町宮垣
6670432,兵庫県,養父市,大屋町宮本
6670312,兵庫県,養父市,大屋町山路
6670324,兵庫県,養父市,大屋町横行
6670323,兵庫県,養父市,大屋町若杉
6670435,兵庫県,養父市,大屋町和田
6670111,兵庫県,養父市,大薮
6670121,兵庫県,養父市,奥米地
6671104,兵庫県,養父市,尾崎
6671114,兵庫県,養父市,小路頃
6671115,兵庫県,養父市,葛畑
6670123,兵庫県,養父市,鉄屋米地
6671113,兵庫県,養父市,川原場
6671122,兵庫県,養父市,草出
6670124,兵庫県,養父市,口米地
6670114,兵庫県,養父市,小城
6670105,兵庫県,養父市,左近山
6670102,兵庫県,養父市,十二所
6670107,兵庫県,養父市,新津
6671105,兵庫県,養父市,関宮
6670142,兵庫県,養父市,建屋
6670106,兵庫県,養父市,玉見
6671124,兵庫県,養父市,丹戸
6671112,兵庫県,養父市,出合
6671117,兵庫県,養父市,轟
6671121,兵庫県,養父市,外野
6671116,兵庫県,養父市,中瀬
6670145,兵庫県,養父市,長野
6670122,兵庫県,養父市,中米地
6671123,兵庫県,養父市,梨ケ原
6671125,兵庫県,養父市,奈良尾
6670143,兵庫県,養父市,能座
6670133,兵庫県,養父市,畑
6670101,兵庫県,養父市,広谷
6671126,兵庫県,養父市,福定
6670135,兵庫県,養父市,船谷
6671128,兵庫県,養父市,別宮
6670126,兵庫県,養父市,堀畑
6671103,兵庫県,養父市,万久里
6670136,兵庫県,養父市,三谷
6671101,兵庫県,養父市,三宅
6670144,兵庫県,養父市,餅耕地
6670141,兵庫県,養父市,森
6671118,兵庫県,養父市,安井
6670112,兵庫県,養父市,養父市場
6670113,兵庫県,養父市,薮崎
6670001,兵庫県,養父市,八鹿町青山
6670024,兵庫県,養父市,八鹿町朝倉
6670011,兵庫県,養父市,八鹿町浅間
6670012,兵庫県,養父市,八鹿町伊佐
6670051,兵庫県,養父市,八鹿町石原
6670014,兵庫県,養父市,八鹿町岩崎
6670015,兵庫県,養父市,八鹿町大江
6670053,兵庫県,養父市,八鹿町小佐
6670004,兵庫県,養父市,八鹿町上小田
6670023,兵庫県,養父市,八鹿町上網場
6670044,兵庫県,養父市,八鹿町国木
6670031,兵庫県,養父市,八鹿町九鹿
6670032,兵庫県,養父市,八鹿町小山
6670042,兵庫県,養父市,八鹿町今滝寺
6670013,兵庫県,養父市,八鹿町坂本
6670005,兵庫県,養父市,八鹿町下小田
6670022,兵庫県,養父市,八鹿町下網場
6670003,兵庫県,養父市,八鹿町宿南
6670043,兵庫県,養父市,八鹿町高柳
6670052,兵庫県,養父市,八鹿町日畑
6670016,兵庫県,養父市,八鹿町舞狂
6670002,兵庫県,養父市,八鹿町三谷
6670045,兵庫県,養父市,八鹿町米里
6670041,兵庫県,養父市,八鹿町八木
6670021,兵庫県,養父市,八鹿町八鹿
6671111,兵庫県,養父市,吉井
6693300,兵庫県,丹波市,
6693831,兵庫県,丹波市,青垣町市原
6693827,兵庫県,丹波市,青垣町稲土
6693843,兵庫県,丹波市,青垣町奥塩久
6693812,兵庫県,丹波市,青垣町小倉
6693822,兵庫県,丹波市,青垣町大名草
6693823,兵庫県,丹波市,青垣町大稗
6693841,兵庫県,丹波市,青垣町口塩久
6693802,兵庫県,丹波市,青垣町栗住野
6693824,兵庫県,丹波市,青垣町小稗
6693811,兵庫県,丹波市,青垣町佐治
6693842,兵庫県,丹波市,青垣町沢野
6693825,兵庫県,丹波市,青垣町惣持
6693804,兵庫県,丹波市,青垣町田井縄
6693832,兵庫県,丹波市,青垣町遠阪
6693834,兵庫県,丹波市,青垣町中佐治
6693803,兵庫県,丹波市,青垣町西芦田
6693801,兵庫県,丹波市,青垣町東芦田
6693821,兵庫県,丹波市,青垣町桧倉
6693826,兵庫県,丹波市,青垣町文室
6693833,兵庫県,丹波市,青垣町山垣
6694324,兵庫県,丹波市,市島町市島
6694316,兵庫県,丹波市,市島町岩戸
6694321,兵庫県,丹波市,市島町上垣
6694317,兵庫県,丹波市,市島町上牧
6694336,兵庫県,丹波市,市島町乙河内
6694323,兵庫県,丹波市,市島町梶原
6694344,兵庫県,丹波市,市島町上鴨阪
6694322,兵庫県,丹波市,市島町上田
6694341,兵庫県,丹波市,市島町上竹田
6694315,兵庫県,丹波市,市島町喜多
6694325,兵庫県,丹波市,市島町北岡本
6694312,兵庫県,丹波市,市島町北奥
6694337,兵庫県,丹波市,市島町酒梨
6694343,兵庫県,丹波市,市島町下鴨阪
6694301,兵庫県,丹波市,市島町下竹田
6694313,兵庫県,丹波市,市島町多利
6694332,兵庫県,丹波市,市島町勅使
6694345,兵庫県,丹波市,市島町徳尾
6694333,兵庫県,丹波市,市島町戸坂
6694311,兵庫県,丹波市,市島町戸平
6694302,兵庫県,丹波市,市島町中竹田
6694331,兵庫県,丹波市,市島町東勅使
6694334,兵庫県,丹波市,市島町白毫寺
6694314,兵庫県,丹波市,市島町南
6694342,兵庫県,丹波市,市島町矢代
6694335,兵庫県,丹波市,市島町与戸
6693314,兵庫県,丹波市,柏原町挙田
6693307,兵庫県,丹波市,柏原町石戸
6693315,兵庫県,丹波市,柏原町大新屋
6693309,兵庫県,丹波市,柏原町柏原
6693304,兵庫県,丹波市,柏原町上小倉
6693316,兵庫県,丹波市,柏原町鴨野
6693306,兵庫県,丹波市,柏原町北中
6693313,兵庫県,丹波市,柏原町北山
6693308,兵庫県,丹波市,柏原町小南
6693305,兵庫県,丹波市,柏原町下小倉
6693312,兵庫県,丹波市,柏原町田路
6693302,兵庫県,丹波市,柏原町東奥
6693311,兵庫県,丹波市,柏原町母坪
6693303,兵庫県,丹波市,柏原町見長
6693301,兵庫県,丹波市,柏原町南多田
6693411,兵庫県,丹波市,春日町朝日
6694121,兵庫県,丹波市,春日町池尾
6693413,兵庫県,丹波市,春日町石才
6694135,兵庫県,丹波市,春日町稲塚
6693404,兵庫県,丹波市,春日町牛河内
6693414,兵庫県,丹波市,春日町歌道谷
6694251,兵庫県,丹波市,春日町上三井庄
6694262,兵庫県,丹波市,春日町栢野
6694253,兵庫県,丹波市,春日町鹿場
6694141,兵庫県,丹波市,春日町黒井
6694273,兵庫県,丹波市,春日町国領
6694122,兵庫県,丹波市,春日町小多利
6693415,兵庫県,丹波市,春日町坂
6694252,兵庫県,丹波市,春日町下三井庄
6693402,兵庫県,丹波市,春日町新才
6693412,兵庫県,丹波市,春日町園部
6694125,兵庫県,丹波市,春日町多田
6694274,兵庫県,丹波市,春日町棚原
6694123,兵庫県,丹波市,春日町多利
6693403,兵庫県,丹波市,春日町長王
6694265,兵庫県,丹波市,春日町中山
6694131,兵庫県,丹波市,春日町七日市
6694124,兵庫県,丹波市,春日町野上野
6694261,兵庫県,丹波市,春日町野瀬
6694132,兵庫県,丹波市,春日町野村
6693416,兵庫県,丹波市,春日町野山
6694272,兵庫県,丹波市,春日町東中
6694133,兵庫県,丹波市,春日町平松
6694263,兵庫県,丹波市,春日町広瀬
6694134,兵庫県,丹波市,春日町古河
6694264,兵庫県,丹波市,春日町松森
6693401,兵庫県,丹波市,春日町山田
6694271,兵庫県,丹波市,春日町柚津
6693112,兵庫県,丹波市,山南町青田
6693111,兵庫県,丹波市,山南町阿草
6693125,兵庫県,丹波市,山南町池谷
6693143,兵庫県,丹波市,山南町井原
6693141,兵庫県,丹波市,山南町岩屋
6693158,兵庫県,丹波市,山南町応地
6693123,兵庫県,丹波市,山南町大河
6693113,兵庫県,丹波市,山南町太田
6693124,兵庫県,丹波市,山南町大谷
6693127,兵庫県,丹波市,山南町岡本
6693144,兵庫県,丹波市,山南町奥
6693121,兵庫県,丹波市,山南町奥野々
6693166,兵庫県,丹波市,山南町小野尻
6693167,兵庫県,丹波市,山南町小畑
6693154,兵庫県,丹波市,山南町梶
6693156,兵庫県,丹波市,山南町金倉
6693128,兵庫県,丹波市,山南町金屋
6693101,兵庫県,丹波市,山南町上滝
6693105,兵庫県,丹波市,山南町北太田
6693152,兵庫県,丹波市,山南町北和田
6693148,兵庫県,丹波市,山南町きらら通
6693151,兵庫県,丹波市,山南町草部
6693161,兵庫県,丹波市,山南町五ケ野
6693155,兵庫県,丹波市,山南町小新屋
6693162,兵庫県,丹波市,山南町坂尻
6693103,兵庫県,丹波市,山南町篠場
6693147,兵庫県,丹波市,山南町子茂田
6693102,兵庫県,丹波市,山南町下滝
6693131,兵庫県,丹波市,山南町谷川
6693122,兵庫県,丹波市,山南町玉巻
6693165,兵庫県,丹波市,山南町富田
6693126,兵庫県,丹波市,山南町長野
6693168,兵庫県,丹波市,山南町西谷
6693145,兵庫県,丹波市,山南町野坂
6693104,兵庫県,丹波市,山南町畑内
6693153,兵庫県,丹波市,山南町前川
6693146,兵庫県,丹波市,山南町南中
6693159,兵庫県,丹波市,山南町美和
6693142,兵庫県,丹波市,山南町村森
6693132,兵庫県,丹波市,山南町山崎
6693163,兵庫県,丹波市,山南町山本
6693164,兵庫県,丹波市,山南町若林
6693157,兵庫県,丹波市,山南町和田
6693606,兵庫県,丹波市,氷上町上成松
6693574,兵庫県,丹波市,氷上町朝阪
6693643,兵庫県,丹波市,氷上町伊佐口
6693464,兵庫県,丹波市,氷上町石生
6693461,兵庫県,丹波市,氷上町市辺
6693632,兵庫県,丹波市,氷上町井中
6693466,兵庫県,丹波市,氷上町稲継
6693581,兵庫県,丹波市,氷上町稲畑
6693604,兵庫県,丹波市,氷上町犬岡
6693462,兵庫県,丹波市,氷上町大崎
6693621,兵庫県,丹波市,氷上町大谷
6693612,兵庫県,丹波市,氷上町長野
6693575,兵庫県,丹波市,氷上町小野
6693611,兵庫県,丹波市,氷上町柿柴
6693613,兵庫県,丹波市,氷上町上新庄
6693631,兵庫県,丹波市,氷上町賀茂
6693645,兵庫県,丹波市,氷上町鴨内
6693463,兵庫県,丹波市,氷上町北野
6693653,兵庫県,丹波市,氷上町北油良
6693641,兵庫県,丹波市,氷上町絹山
6693626,兵庫県,丹波市,氷上町清住
6693605,兵庫県,丹波市,氷上町黒田
6693642,兵庫県,丹波市,氷上町香良
6693646,兵庫県,丹波市,氷上町小谷
6693633,兵庫県,丹波市,氷上町御油
6693652,兵庫県,丹波市,氷上町桟敷
6693582,兵庫県,丹波市,氷上町佐野
6693614,兵庫県,丹波市,氷上町下新庄
6693602,兵庫県,丹波市,氷上町常楽
6693571,兵庫県,丹波市,氷上町新郷
6693572,兵庫県,丹波市,氷上町谷村
6693625,兵庫県,丹波市,氷上町中
6693623,兵庫県,丹波市,氷上町中野
6693601,兵庫県,丹波市,氷上町成松
6693603,兵庫県,丹波市,氷上町西中
6693634,兵庫県,丹波市,氷上町沼
6693651,兵庫県,丹波市,氷上町氷上
6693644,兵庫県,丹波市,氷上町日比宇
6693583,兵庫県,丹波市,氷上町福田
6693467,兵庫県,丹波市,氷上町本郷
6693624,兵庫県,丹波市,氷上町三方
6693654,兵庫県,丹波市,氷上町南油良
6693622,兵庫県,丹波市,氷上町三原
6693573,兵庫県,丹波市,氷上町油利
6693465,兵庫県,丹波市,氷上町横田
6695200,兵庫県,朝来市,
6695125,兵庫県,朝来市,山東町粟鹿
6695123,兵庫県,朝来市,山東町一品
6695112,兵庫県,朝来市,山東町大内
6695102,兵庫県,朝来市,山東町大垣
6695153,兵庫県,朝来市,山東町大月
6695142,兵庫県,朝来市,山東町越田
6695143,兵庫県,朝来市,山東町柿坪
6695152,兵庫県,朝来市,山東町楽音寺
6695115,兵庫県,朝来市,山東町金浦
6695133,兵庫県,朝来市,山東町喜多垣
6695151,兵庫県,朝来市,山東町小谷
6695113,兵庫県,朝来市,山東町塩田
6695124,兵庫県,朝来市,山東町柴
6695111,兵庫県,朝来市,山東町新堂
6695101,兵庫県,朝来市,山東町滝田
6695114,兵庫県,朝来市,山東町野間
6695134,兵庫県,朝来市,山東町迫間
6695131,兵庫県,朝来市,山東町柊木
6695104,兵庫県,朝来市,山東町末歳
6695132,兵庫県,朝来市,山東町溝黒
6695141,兵庫県,朝来市,山東町三保
6695135,兵庫県,朝来市,山東町森
6695103,兵庫県,朝来市,山東町矢名瀬町
6695136,兵庫県,朝来市,山東町与布土
6695121,兵庫県,朝来市,山東町和賀
6695122,兵庫県,朝来市,山東町早田
6695221,兵庫県,朝来市,和田山町秋葉台
6695238,兵庫県,朝来市,和田山町朝日
6695234,兵庫県,朝来市,和田山町市場
6695262,兵庫県,朝来市,和田山町市御堂
6695236,兵庫県,朝来市,和田山町内海
6695204,兵庫県,朝来市,和田山町駅北
6695246,兵庫県,朝来市,和田山町岡
6695224,兵庫県,朝来市,和田山町岡田
6695264,兵庫県,朝来市,和田山町加都
6695253,兵庫県,朝来市,和田山町久世田
6695228,兵庫県,朝来市,和田山町久田和
6695263,兵庫県,朝来市,和田山町久留引
6695214,兵庫県,朝来市,和田山町桑原
6695251,兵庫県,朝来市,和田山町栄町
6695256,兵庫県,朝来市,和田山町三波
6695258,兵庫県,朝来市,和田山町城南台
6695220,兵庫県,朝来市,和田山町白井
6695243,兵庫県,朝来市,和田山町高田
6695252,兵庫県,朝来市,和田山町竹田
6695237,兵庫県,朝来市,和田山町竹ノ内
6695233,兵庫県,朝来市,和田山町高生田
6695268,兵庫県,朝来市,和田山町立ノ原
6695213,兵庫県,朝来市,和田山町玉置
6695265,兵庫県,朝来市,和田山町筒江
6695232,兵庫県,朝来市,和田山町寺内
6695203,兵庫県,朝来市,和田山町寺谷
6695255,兵庫県,朝来市,和田山町殿
6695226,兵庫県,朝来市,和田山町中
6695225,兵庫県,朝来市,和田山町野村
6695231,兵庫県,朝来市,和田山町林垣
6695241,兵庫県,朝来市,和田山町土田
6695202,兵庫県,朝来市,和田山町東谷
6695227,兵庫県,朝来市,和田山町東和田
6695266,兵庫県,朝来市,和田山町比治
6695261,兵庫県,朝来市,和田山町枚田
6695215,兵庫県,朝来市,和田山町枚田岡
6695211,兵庫県,朝来市,和田山町平野
6695257,兵庫県,朝来市,和田山町藤和
6695245,兵庫県,朝来市,和田山町法道寺
6695267,兵庫県,朝来市,和田山町法興寺
6695222,兵庫県,朝来市,和田山町万葉台
6695229,兵庫県,朝来市,和田山町宮
6695244,兵庫県,朝来市,和田山町宮内
6695242,兵庫県,朝来市,和田山町宮田
6695223,兵庫県,朝来市,和田山町室尾
6695254,兵庫県,朝来市,和田山町安井
6695212,兵庫県,朝来市,和田山町柳原
6695216,兵庫県,朝来市,和田山町弥生が丘
6695235,兵庫県,朝来市,和田山町和田
6695201,兵庫県,朝来市,和田山町和田山
6660200,兵庫県,川辺郡猪名川町,
6660212,兵庫県,川辺郡猪名川町,旭ケ丘
6660246,兵庫県,川辺郡猪名川町,猪名川台
6660255,兵庫県,川辺郡猪名川町,猪渕
6660244,兵庫県,川辺郡猪名川町,上野
6660241,兵庫県,川辺郡猪名川町,内馬場
6660243,兵庫県,川辺郡猪名川町,柏梨田
6660204,兵庫県,川辺郡猪名川町,柏原
6660202,兵庫県,川辺郡猪名川町,鎌倉
6660231,兵庫県,川辺郡猪名川町,上阿古谷
6660236,兵庫県,川辺郡猪名川町,北田原
6660234,兵庫県,川辺郡猪名川町,北野
6660225,兵庫県,川辺郡猪名川町,木津
6660254,兵庫県,川辺郡猪名川町,肝川
6660256,兵庫県,川辺郡猪名川町,銀山
6660223,兵庫県,川辺郡猪名川町,木間生
6660227,兵庫県,川辺郡猪名川町,笹尾
6660253,兵庫県,川辺郡猪名川町,差組
6660215,兵庫県,川辺郡猪名川町,島
6660214,兵庫県,川辺郡猪名川町,清水
6660213,兵庫県,川辺郡猪名川町,清水東
6660237,兵庫県,川辺郡猪名川町,下阿古谷
6660257,兵庫県,川辺郡猪名川町,白金
6660201,兵庫県,川辺郡猪名川町,杉生
6660238,兵庫県,川辺郡猪名川町,荘苑
6660232,兵庫県,川辺郡猪名川町,民田
6660222,兵庫県,川辺郡猪名川町,槻並
6660245,兵庫県,川辺郡猪名川町,つつじが丘
6660221,兵庫県,川辺郡猪名川町,杤原
6660203,兵庫県,川辺郡猪名川町,西畑
6660211,兵庫県,川辺郡猪名川町,仁頂寺
6660226,兵庫県,川辺郡猪名川町,林田
6660242,兵庫県,川辺郡猪名川町,原
6660252,兵庫県,川辺郡猪名川町,広根
6660262,兵庫県,川辺郡猪名川町,伏見台
6660261,兵庫県,川辺郡猪名川町,松尾台
6660224,兵庫県,川辺郡猪名川町,万善
6660235,兵庫県,川辺郡猪名川町,南田原
6660233,兵庫県,川辺郡猪名川町,紫合
6660251,兵庫県,川辺郡猪名川町,若葉
6696500,兵庫県,美方郡香美町,
6671533,兵庫県,美方郡香美町,小代区秋岡
6671512,兵庫県,美方郡香美町,小代区石寺
6671503,兵庫県,美方郡香美町,小代区大谷
6671542,兵庫県,美方郡香美町,小代区鍛治屋
6671531,兵庫県,美方郡香美町,小代区茅野
6671511,兵庫県,美方郡香美町,小代区神水
6671514,兵庫県,美方郡香美町,小代区神場
6671501,兵庫県,美方郡香美町,小代区久須部
6671522,兵庫県,美方郡香美町,小代区実山
6671541,兵庫県,美方郡香美町,小代区佐坊
6671502,兵庫県,美方郡香美町,小代区城山
6671543,兵庫県,美方郡香美町,小代区忠宮
6671532,兵庫県,美方郡香美町,小代区新屋
6671544,兵庫県,美方郡香美町,小代区貫田
6671521,兵庫県,美方郡香美町,小代区野間谷
6671545,兵庫県,美方郡香美町,小代区東垣
6671523,兵庫県,美方郡香美町,小代区平野
6671513,兵庫県,美方郡香美町,小代区広井
6671515,兵庫県,美方郡香美町,小代区水間
6696404,兵庫県,美方郡香美町,香住区相谷
6696432,兵庫県,美方郡香美町,香住区上計
6696671,兵庫県,美方郡香美町,香住区余部
6696431,兵庫県,美方郡香美町,香住区浦上
6696425,兵庫県,美方郡香美町,香住区大梶
6696554,兵庫県,美方郡香美町,香住区大谷
6696555,兵庫県,美方郡香美町,香住区大野
6696433,兵庫県,美方郡香美町,香住区沖浦
6696552,兵庫県,美方郡香美町,香住区加鹿野
6696544,兵庫県,美方郡香美町,香住区香住
6696414,兵庫県,美方郡香美町,香住区上岡
6696411,兵庫県,美方郡香美町,香住区九斗
6696402,兵庫県,美方郡香美町,香住区訓谷
6696559,兵庫県,美方郡香美町,香住区小原
6696541,兵庫県,美方郡香美町,香住区境
6696413,兵庫県,美方郡香美町,香住区下岡
6696564,兵庫県,美方郡香美町,香住区下浜
6696551,兵庫県,美方郡香美町,香住区守柄
6696556,兵庫県,美方郡香美町,香住区中野
6696546,兵庫県,美方郡香美町,香住区七日市
6696416,兵庫県,美方郡香美町,香住区丹生地
6696415,兵庫県,美方郡香美町,香住区西下岡
6696423,兵庫県,美方郡香美町,香住区畑
6696422,兵庫県,美方郡香美町,香住区土生
6696424,兵庫県,美方郡香美町,香住区隼人
6696542,兵庫県,美方郡香美町,香住区一日市
6696557,兵庫県,美方郡香美町,香住区藤
6696561,兵庫県,美方郡香美町,香住区間室
6696426,兵庫県,美方郡香美町,香住区三川
6696553,兵庫県,美方郡香美町,香住区三谷
6696401,兵庫県,美方郡香美町,香住区無南垣
6696412,兵庫県,美方郡香美町,香住区米地
6696421,兵庫県,美方郡香美町,香住区本見塚
6696545,兵庫県,美方郡香美町,香住区森
6696403,兵庫県,美方郡香美町,香住区安木
6696563,兵庫県,美方郡香美町,香住区矢田
6696558,兵庫県,美方郡香美町,香住区八原
6696562,兵庫県,美方郡香美町,香住区油良
6696672,兵庫県,美方郡香美町,香住区鎧
6696543,兵庫県,美方郡香美町,香住区若松
6671312,兵庫県,美方郡香美町,村岡区相田
6671346,兵庫県,美方郡香美町,村岡区池ケ平
6671315,兵庫県,美方郡香美町,村岡区板仕野
6671324,兵庫県,美方郡香美町,村岡区市原
6671368,兵庫県,美方郡香美町,村岡区入江
6671344,兵庫県,美方郡香美町,村岡区大笹
6671321,兵庫県,美方郡香美町,村岡区大糠
6671333,兵庫県,美方郡香美町,村岡区大野
6671323,兵庫県,美方郡香美町,村岡区耀山
6671366,兵庫県,美方郡香美町,村岡区川会
6671313,兵庫県,美方郡香美町,村岡区神坂
6671342,兵庫県,美方郡香美町,村岡区口大谷
6671353,兵庫県,美方郡香美町,村岡区熊波
6671335,兵庫県,美方郡香美町,村岡区黒
| 2 |
9e83190ba101ff865ded4f3fba7ad806dcc6226b
|
Python
|
田
6671354,兵庫県,美方郡香美町,村岡区柤岡
6671326,兵庫県,美方郡香美町,村岡区光陽
6671303,兵庫県,美方郡香美町,村岡区小城
6671301,兵庫県,美方郡香美町,村岡区境
6671316,兵庫県,美方郡香美町,村岡区鹿田
6671325,兵庫県,美方郡香美町,村岡区高井
6671345,兵庫県,美方郡香美町,村岡区高坂
6671365,兵庫県,美方郡香美町,村岡区高津
6671331,兵庫県,美方郡香美町,村岡区作山
6671322,兵庫県,美方郡香美町,村岡区寺河内
6671352,兵庫県,美方郡香美町,村岡区長板
6671343,兵庫県,美方郡香美町,村岡区中大谷
6671364,兵庫県,美方郡香美町,村岡区長須
6671361,兵庫県,美方郡香美町,村岡区長瀬
6671314,兵庫県,美方郡香美町,村岡区萩山
6671362,兵庫県,美方郡香美町,村岡区原
6671337,兵庫県,美方郡香美町,村岡区日影
6671334,兵庫県,美方郡香美町,村岡区福岡
6671351,兵庫県,美方郡香美町,村岡区丸味
6671363,兵庫県,美方郡香美町,村岡区味取
6671311,兵庫県,美方郡香美町,村岡区村岡
6671341,兵庫県,美方郡香美町,村岡区森脇
6671332,兵庫県,美方郡香美町,村岡区八井谷
6671336,兵庫県,美方郡香美町,村岡区宿
6671302,兵庫県,美方郡香美町,村岡区山田
6671317,兵庫県,美方郡香美町,村岡区用野
6671304,兵庫県,美方郡香美町,村岡区和佐父
6671367,兵庫県,美方郡香美町,村岡区和田
6671347,兵庫県,美方郡香美町,村岡区和池
6696700,兵庫県,美方郡新温泉町,
6696714,兵庫県,美方郡新温泉町,赤崎
6696701,兵庫県,美方郡新温泉町,芦屋
6696832,兵庫県,美方郡新温泉町,飯野
6696751,兵庫県,美方郡新温泉町,居組
6696953,兵庫県,美方郡新温泉町,石橋
6696805,兵庫県,美方郡新温泉町,伊角
6696801,兵庫県,美方郡新温泉町,井土
6696803,兵庫県,美方郡新温泉町,今岡
6696808,兵庫県,美方郡新温泉町,歌長
6696945,兵庫県,美方郡新温泉町,内山
6696952,兵庫県,美方郡新温泉町,海上
6696811,兵庫県,美方郡新温泉町,多子
6696946,兵庫県,美方郡新温泉町,越坂
6696802,兵庫県,美方郡新温泉町,金屋
6696942,兵庫県,美方郡新温泉町,鐘尾
6696752,兵庫県,美方郡新温泉町,釜屋
6696954,兵庫県,美方郡新温泉町,岸田
6696711,兵庫県,美方郡新温泉町,清富
6696815,兵庫県,美方郡新温泉町,桐岡
6696812,兵庫県,美方郡新温泉町,切畑
6696721,兵庫県,美方郡新温泉町,久谷
6696727,兵庫県,美方郡新温泉町,久斗山
6696804,兵庫県,美方郡新温泉町,熊谷
6696726,兵庫県,美方郡新温泉町,境
6696712,兵庫県,美方郡新温泉町,指杭
6696833,兵庫県,美方郡新温泉町,塩山
6696741,兵庫県,美方郡新温泉町,七釜
6696723,兵庫県,美方郡新温泉町,正法庵
6696742,兵庫県,美方郡新温泉町,新市
6696713,兵庫県,美方郡新温泉町,田井
6696728,兵庫県,美方郡新温泉町,対田
6696722,兵庫県,美方郡新温泉町,高末
6696761,兵庫県,美方郡新温泉町,竹田
6696831,兵庫県,美方郡新温泉町,竹田
6696813,兵庫県,美方郡新温泉町,丹土
6696943,兵庫県,美方郡新温泉町,千谷
6696941,兵庫県,美方郡新温泉町,千原
6696745,兵庫県,美方郡新温泉町,栃谷
6696814,兵庫県,美方郡新温泉町,中辻
6696702,兵庫県,美方郡新温泉町,浜坂
6696807,兵庫県,美方郡新温泉町,春来
6696806,兵庫県,美方郡新温泉町,桧尾
6696732,兵庫県,美方郡新温泉町,福富
6696725,兵庫県,美方郡新温泉町,藤尾
6696731,兵庫県,美方郡新温泉町,二日市
6696743,兵庫県,美方郡新温泉町,古市
6696746,兵庫県,美方郡新温泉町,戸田
6696724,兵庫県,美方郡新温泉町,辺地
6696822,兵庫県,美方郡新温泉町,細田
6696951,兵庫県,美方郡新温泉町,前
6696715,兵庫県,美方郡新温泉町,三尾
6696747,兵庫県,美方郡新温泉町,三谷
6696944,兵庫県,美方郡新温泉町,宮脇
6696753,兵庫県,美方郡新温泉町,諸寄
6696821,兵庫県,美方郡新温泉町,湯
6696744,兵庫県,美方郡新温泉町,用土
6696716,兵庫県,美方郡新温泉町,和田
| 3 |
e1f8424e0d08b91a96e8e8fa4e7271d7a13cf0a3
|
Python
|
import FWCore.ParameterSet.Config as cms
maxEvents = cms.untracked.PSet( input = cms.untracked.int32(-1) )
readFiles = cms.untracked.vstring()
secFiles = cms.untracked.vstring()
source = cms.Source ("PoolSource",fileNames = readFiles, secondaryFileNames = secFiles)
readFiles.extend( [
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/002F2CE1-38BB-E611-AF9F-0242AC130005.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/009CE684-45BB-E611-A261-001E67E6F8FA.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/044FF9CC-42BB-E611-ACB0-0CC47AD98BC2.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/06103109-48BB-E611-86BE-001E673968A6.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/0843C79F-FCBD-E611-B38C-001E67A3F8A8.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/0881BCD8-8FBE-E611-8796-002590FD5A72.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/08E524F3-0ABC-E611-984F-141877639F59.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/08F5FD50-23BC-E611-A4C2-00259073E3DA.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/0A85AA82-45BB-E611-8ACD-001E674FB063.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/0CA050B2-57BB-E611-8A7A-001E674FBA1D.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/0CC4E5F0-8EBE-E611-81A0-FA163E0546A6.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/10104CB2-51BB-E611-BCDC-FA163E2D421C.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/10B1C835-51BB-E611-962E-0025901D08B8.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/10F3C0E6-BDBD-E611-B15C-001E674FB24D.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/144E9DCA-3ABD-E611-B140-0025905B85EE.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/147E1208-0EBC-E611-8AB4-20CF307C9897.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/16409928-3FBB-E611-B72C-002590E2F5CE.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/16602767-48BB-E611-B7A6-0CC47AD98BC8.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/16BBACA8-FBBD-E611-BEC0-FA163E72410F.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/16D0A23B-0EBD-E611-A7D4-00266CFF090C.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/184B6801-D9BC-E611-8E6A-00259073E52C.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/1C48C9F3-58BB-E611-95E5-FA163E897AAE.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/203922CF-19BD-E611-A4CB-002590D0AF54.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/245AB734-3DBB-E611-A2BE-0090FAA575B0.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/260BAF20-F9BD-E611-AB0D-141877411FCD.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/269219CA-42BB-E611-9B4D-001E67444EAC.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/2A5DBC04-3CBB-E611-9C43-0CC47AA99436.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/2C22DEF7-8EBE-E611-9D17-0025905A497A.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/3048BFB6-48BD-E611-B2AE-FA163E7B239E.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/3232416A-4CBB-E611-9301-001E67348055.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/3665009B-FFBD-E611-9358-0025905A610A.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/38F82D20-42BB-E611-AA3B-002590747E14.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/3C994553-4DBB-E611-829E-00259048BF92.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/3CB1F166-3EBB-E611-BBAC-001E674FB24D.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/3E96493A-F2BD-E611-B4D4-24BE05C6E561.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/44E3620A-31BB-E611-B8EE-001E67444EAC.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/44F1F2FC-3BBB-E611-9596-0CC47A546E5E.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/48723720-30BC-E611-906D-0025905B855C.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/4C6573B5-F8BC-E611-8B6D-0CC47A7C340E.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/5A85E548-3FBB-E611-9AF8-001E674FCAE9.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/6064DD7D-27BC-E611-9269-FA163E3A554D.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/6079A2C9-5CBB-E611-9D23-0CC47A546E5E.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/64BFBA66-2ABC-E611-9884-02163E013C92.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/68FD6C45-4EBB-E611-8CE3-0CC47A7452D8.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/6AAB2667-DFBC-E611-BCE9-44A842CFCA0D.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/6C7AF5E2-51BB-E611-944C-0025905A60B8.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/6CB42E13-4ABB-E611-B37A-B083FECFF6AB.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/6E2754F6-49BB-E611-A8B6-00259074AE8A.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/6E4F2DD7-3FBB-E611-A5F6-0CC47A13CD44.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/6E7B7470-FEBD-E611-9FD6-0CC47A78A446.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/6E8A3BE5-4CBB-E611-A86D-00259073E4E4.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/70F436E2-3DBB-E611-92D6-0CC47A546E5E.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/7240B6CA-D6BC-E611-8854-B083FED04276.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/74368CB5-42BB-E611-B3D9-0CC47AD98BC8.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/74390FDB-35BD-E611-932E-02163E013EF0.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/76C36A06-48BB-E611-984D-0090FAA58204.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/78EB0B24-02BE-E611-B6ED-FA163E275D07.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/7A70E7A0-52BB-E611-A35E-001E674FC800.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/7AA6AA1E-30BC-E611-8E7E-0025905A610A.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/7AF0E4F9-4EBB-E611-9B9B-0CC47A13D284.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/7CE9BE91-0EBC-E611-A5DA-180373FF8446.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/7CF35EDF-E8BC-E611-A47E-24BE05C488E1.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/7EDEC297-2EBE-E611-857F-0242AC130003.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/8044F84B-44BB-E611-8915-001E674440E2.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/864E3740-E6BC-E611-AD01-0CC47AD98BC8.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/88A401C7-48BB-E611-A057-001E67348055.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/88FE7E84-17BC-E611-B83A-001EC94BF93F.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/90410DC3-0EBC-E611-AAC2-001E675A6AA9.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/9488E5A5-4ABB-E611-8F1A-0025905A60AA.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/94F362D7-5DBB-E611-AB61-FA163E508270.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/98E58784-40BB-E611-ACF5-0CC47AD98D0C.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/9A25B43C-3DBB-E611-917E-001E674FB149.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/9E2603C9-4DBB-E611-A64D-001E674FBA1D.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/A09103A1-3ABB-E611-9459-001E6745764D.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/A29BDC40-47BB-E611-93D2-B083FED42A1A.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/A416944E-41BB-E611-9753-0CC47AD99144.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/A86280DE-51BB-E611-B051-0025905A6118.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/A8BEF298-12BD-E611-90EE-E41D2D08DE30.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/AC1C716C-51BB-E611-BA14-0025907D1D6C.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/AC289CA3-4CBB-E611-83E8-001EC94BF6CA.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/AC9BCBDD-19BC-E611-9B23-002590791DA2.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/B0050E8E-32BB-E611-B390-0025900E3508.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/B08DA7E7-43BB-E611-993E-002590E2F5CE.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/B64CF6DF-3CBB-E611-BB5A-001E674FC800.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/B6B969CA-4EBB-E611-AF09-FA163EC9E920.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/B82DE45D-4DBB-E611-88CE-0025905B85FE.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/BC7EA562-4BBB-E611-BE25-0025901D08B8.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/BEDB4181-97BE-E611-A59F-001E675A67BB.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/C0306F0A-8FBE-E611-9828-0025905A6132.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/C0A1AF5B-44BB-E611-BD4B-0CC47AA992B4.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/C8F2E99E-3BBB-E611-9324-0090FAA59864.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/CA4A3834-2BBB-E611-834E-90B11C2CA3F8.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/CC245041-E2BC-E611-8171-00266CFCCB44.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/CEDA35B7-15BC-E611-B1AD-001E67E6F819.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/D21EA92D-07BC-E611-BD74-70106F4A93E8.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/D453B2C6-8FBE-E611-9644-141877411FCD.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/D4E99AD0-46BB-E611-BB98-001E67348055.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/D62C98A8-08BC-E611-A9AC-B8CA3A70A410.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/DA92E40D-48BB-E611-B782-0025905A612E.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/E2B687E9-3DBB-E611-943D-0025907253B6.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/E2E24C18-5ABB-E611-A1C2-FA163E0546A6.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/E8411037-15BE-E611-A222-002590D9D8B6.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/EAEF3807-51BB-E611-92CA-001E674440E2.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/EC9F5605-4CBB-E611-B15D-002590A88800.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/F463E2E0-4ABB-E611-ADEE-001E6745764D.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/120000/F8CFD45A-4BBB-E611-9F9E-001E67A40523.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/06C6936D-7EBC-E611-B990-0025905A60B4.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/0C858DC7-3CBD-E611-AD67-0242AC130002.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/1A6F76CE-3BBD-E611-83FD-FA163E02238B.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/1C9D4463-20BE-E611-85E9-A0000420FE80.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/1CEA7D79-34BD-E611-8312-24BE05C33C81.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/2274F4B0-90BE-E611-833A-001E67DDC88A.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/2E44EBA0-38BD-E611-B455-FA163E66032D.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/34003670-98BD-E611-BB5A-001E67586A2F.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/3AB85B19-57BD-E611-B55E-24BE05C6C741.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/3CD657F2-06BE-E611-8DDA-00259075D72E.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/4A993269-E8BE-E611-ADF4-FA163E0EFB0F.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_T
| 0 |
e1f8424e0d08b91a96e8e8fa4e7271d7a13cf0a3
|
Python
|
uneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/560F7D9C-7FBE-E611-BFD8-D067E5F910F5.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/5C89D593-7EBD-E611-A808-0CC47A4D7654.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/6CA4DF87-5FBD-E611-9B94-A4BADB1E6B36.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/6CA9275C-C0BD-E611-B377-0025905B8566.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/7200B868-E8BE-E611-A7F9-FA163E702259.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/74104143-88BD-E611-86FF-0CC47A6C0716.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/746D6C2B-86BD-E611-84C8-001E67A42A71.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/7AD42ECF-E8BE-E611-8E74-B083FECFF2BF.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/8A977455-4BBD-E611-A087-0025905B8606.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/8E66F610-E9BE-E611-A5DC-0CC47A57CCEA.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/8EA28415-7CBC-E611-BD99-B083FED02AD1.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/908E6B68-5DBD-E611-BB8F-02163E014939.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/927E9406-82BC-E611-B9D4-0CC47A4C8EBA.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/92AD2277-E8BE-E611-A1A7-0CC47A4C8E64.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/9AC5A158-82BD-E611-81A7-0025905B856E.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/9C342D41-0BBE-E611-B0AF-001E67E95A40.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/A6D46533-83BC-E611-951A-0025907D2502.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/B0545BAA-BDBD-E611-B355-0CC47A4C8EEA.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/B0DB98A8-45BE-E611-8171-B083FED4239C.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/B871CA8E-5EBD-E611-8C0F-003048FFD79E.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/BAA9F89F-E8BE-E611-A262-0CC47AD98F6A.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/CAEC4CA1-0EBE-E611-B31F-02163E00C8AF.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/D6D2C6BC-9ABE-E611-A0FB-0CC47A745294.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/DCC635A5-77BD-E611-9430-14187740D279.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/E6D0DEEE-3EBD-E611-B8C3-002590E3A004.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/E6FE08F6-48BD-E611-A296-B083FED40671.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/F4C2F65E-39BD-E611-8630-E0DB550BA718.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/50000/FABC8385-E8BE-E611-B290-0025905AA9CC.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/04AA3519-78BB-E611-BE3C-001E67E69879.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/04F715A6-08BC-E611-B9C2-001E67457A5D.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/06B5DD90-8BBC-E611-A7F0-008CFA0527CC.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/06CABBDA-3EBD-E611-9E1F-02163E00B58A.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/06D9FFFF-1CBC-E611-8BAB-001E67DFFB86.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/0C744224-61BB-E611-9685-001E67E6F8AF.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/1019A18E-4EBB-E611-9A28-FA163E6CC06D.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/1023F365-48BB-E611-A2DC-0025905A4964.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/10759B01-49BB-E611-BD64-00259048812E.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/12723348-74BC-E611-9153-008CFA1980B8.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/1E13B576-8FBC-E611-A831-6C3BE5B50170.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/1E17922C-59BB-E611-806D-0CC47A78A3EE.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/1EE8E7C8-B6BD-E611-9244-001E67586629.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/2041BDC3-51BB-E611-B061-24BE05C44BB1.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/24919263-BFBB-E611-9813-B8CA3A70BAC8.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/2610F510-B1BB-E611-AE92-00259073E4E4.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/2616102B-5DBB-E611-86E7-02163E012D2E.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/262EC509-5CBB-E611-9D05-002590E7DE20.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/26D730F4-5CBB-E611-9F20-0025901AC3F6.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/2A624B00-61BB-E611-83FC-E0CB4E1277DF.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/2AEC73E7-B7BB-E611-AC2E-549F3525DB98.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/2C8ABFE0-E5BC-E611-BD07-848F69FD29DF.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/2CC5B687-5EBB-E611-AB4D-0CC47A13D110.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/2E2BDEC8-47BB-E611-9247-0025905A60B8.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/30AE7047-57BB-E611-854C-002590494FA8.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/343F470A-5BBB-E611-A1F7-0025905B85F6.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/365648D1-54BD-E611-B6F7-008CFA0A5818.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/36591B5B-05BC-E611-8151-0CC47A57D036.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/36FE3A3A-5DBB-E611-8566-002590494FA8.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/3C24D130-4FBB-E611-8B0F-0CC47AD98F64.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/3E266B78-5ABB-E611-A814-001E674FCAE9.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/3E5CC689-5DBB-E611-8C42-0025905A6066.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/3E8A1C73-8ABC-E611-9B5E-24BE05CEEB81.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/44DB4315-57BB-E611-995E-90B11C2CA3F8.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/469B9982-5EBB-E611-9769-001E67444EAC.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/46B827C3-98BD-E611-83DD-001E67E6F7F6.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/480070FD-B9BB-E611-AD45-002590E3A0D4.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/4840F306-6BBB-E611-BA80-FA163E71673B.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/487D6062-51BB-E611-886C-0025905A4964.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/4A7F614C-1BBC-E611-A31E-3417EBE6453D.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/4A858527-ADBB-E611-B14A-0025904AC2CC.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/4C6F5203-55BB-E611-AAF7-0025905A4964.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/4CDB60CF-54BB-E611-844C-001E675A6630.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/4E7ADF82-AEBB-E611-80A0-001E67A40514.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/4EADF3B2-4BBB-E611-851C-24BE05CE3C91.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/500C9FDC-51BB-E611-BCF8-F04DA2752F68.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/54C17AF5-58BB-E611-A76C-0CC47A7C34EE.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/54DFEB5B-4FBB-E611-93D4-1866DAEEB344.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/5634EE46-C9BB-E611-A3BE-20CF305B04D2.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/5AD25019-57BB-E611-90E6-1418774118F6.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/5EA1A28D-4ABB-E611-ADD9-001E67E6F774.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/604C2313-C4BB-E611-8A16-02163E01308C.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/60F0052B-58BB-E611-A737-0CC47AA98F9A.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/62372BCA-54BB-E611-86FB-001E67457E7C.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/626AEFCB-56BB-E611-99BB-002590E7E00A.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/62E2233B-4FBB-E611-A733-0025905AA9F0.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/68E69FBA-4EBB-E611-AAB6-24BE05CE3C91.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/6A6B6985-BABB-E611-A8E2-0CC47A4D765A.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/6AE8E49B-47BB-E611-BB27-24BE05CECB81.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/6C2DAC6D-53BB-E611-AF58-5065F381F1C1.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/6C657BC3-8BBC-E611-8748-0CC47A4D76D2.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/6C77BB1A-69BD-E611-9B17-047D7BD6DD56.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/6EAB5B6E-55BB-E611-A77D-00259073E34C.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/70B16FDD-C8BC-E611-91E7-001E67E71BFF.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/721640BC-59BB-E611-B026-842B2B1814E3.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/72B03B93-72BD-E611-9645-00259021A39E.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/760C07CF-A8BB-E611-B4AE-FA163E3C6237.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/765D3877-90BD-E611-B86C-0025905B85DE.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/76F7FCAB-94BC-E611-AA4D-00259073E30E.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/78405767-51BD-E611-AB62-141877639F59.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/7A883500-BABB-E611-A0C2-001E67DDBEDA.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/7CE0E1E0-55BB-E611-853C-D4AE527EDBD4.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/7EC34512-C0BD-E611-A475-002590D9D88C.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/80616104-5BBB-E611-B899-0CC47A4C8E5E.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/8208268F-4EBB-E611-A943-24BE05C68671.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/8273D41F-63BB-E611-A1C4-02163E00ADA2.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/82805D06-55BB-E611-864B-02163E012D2E.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/82935CEA-D0BC-E611-AE9E-B083FED02AD1.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/86B5F6E7-B4BB-E611-A22E-FA163EC14324.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/885AED4A-50BB-E611-B113-E0CB4E1277DF.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/88A64B17-57BB-E611-BFCA-001E673986B0.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/88C17D9F-63BB-E611-AF7F-0CC47A537688.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/8A5F6492-62BB-E611-AC86-002590494E18.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/8CAA98C4-59BB-E611-871D-002590E7D7CE.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/8E4600AE-5BBB-E611-9ABC-0CC47AD98F70.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/92233F29-C5BB-E611-AC46-20CF3027A613.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/98A6F3C0-54BB-E611-BA22-1418774126FB.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/A4E9EDC3-F1BD-E611-8E1D-0025901D0C52.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/A6A1CA79-57BB-E611-8C5C-0025907D250C.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/B098D6CC-95BD-E611-8126-A0000420FE80.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/B450D260-51BB-E611-8EBA-
| 1 |
e1f8424e0d08b91a96e8e8fa4e7271d7a13cf0a3
|
Python
|
FA163E4625E4.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/BA4BF56A-86BC-E611-B038-0CC47A4D7628.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/BA5BFCCA-74BB-E611-9BC4-FA163E0D029A.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/C07F1E43-47BB-E611-89F3-0025908653C4.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/C4DF8DFA-5CBB-E611-ABE3-002590E3A222.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/C6621154-64BB-E611-AA61-FA163E5E91D1.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/C845816C-9FBD-E611-BF21-001E674FBFC2.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/C86658F5-57BB-E611-B6C0-0CC47A4C8E14.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/CA8946A8-0EBC-E611-A208-0CC47A7E6BDE.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/CAF439BC-51BB-E611-B2C6-001E673972F6.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/CEE671C4-83BC-E611-A0FA-0CC47AD98C86.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/D256DBB1-5FBB-E611-ADDF-0025905B8580.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/D29F6AD8-4CBB-E611-B0FA-24BE05C68671.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/D2DDB5C9-45BB-E611-AE58-0CC47AD98C88.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/D2E83075-55BB-E611-9525-0025905B85F6.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/D6940CC7-54BB-E611-B0F4-0CC47A13D110.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/D8F8A24A-57BB-E611-82B6-FA163EE1F3FE.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/DA5A8C92-58BB-E611-9661-001E674FC800.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/E03CEBF1-57BB-E611-BFC4-001E67E69E32.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/E2032456-D8BB-E611-98B6-001E6779242E.root',
] )
readFiles.extend( [
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/E2EDC4CA-51BB-E611-A622-782BCB20E959.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/E2FC9494-CFBC-E611-AE9C-00304867FDFB.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/E40BBD77-5ABB-E611-89F6-0025907253B6.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/E8DE1702-7BBB-E611-A982-FA163EBEC103.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/EADBF61A-A7BB-E611-B3DB-5065F382C221.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/EC5A4AC2-BCBB-E611-A28C-001E674820F0.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/EC7674EF-B7BB-E611-95FC-0025905A606A.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/EEAEC58B-03BC-E611-83F4-B083FED42FB0.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/F2689A6D-25BC-E611-B763-0CC47ABAC11C.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/F2B33A97-4DBB-E611-83CD-A0369F30FFD2.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/F4182A72-62BB-E611-B7AA-0025905A60B0.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/F47230CE-46BB-E611-B3CA-0025905B8560.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/F63829CB-BABB-E611-AEA5-002590D9D8AA.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/F8663E21-61BB-E611-BE1E-FA163EB7FE16.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/FA9974E9-5ABB-E611-A277-02163E011591.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/FCD03ECE-54BB-E611-A054-002590D9D976.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/60000/FE93D59C-6BBB-E611-A870-FA163E832397.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/0049958C-41BB-E611-9FFD-24BE05C44BC1.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/021F1221-9CBD-E611-B1C8-0CC47A4D75F8.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/0A9D4A81-E3BB-E611-AF73-B083FED42FC4.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/0ABF0BC9-40BB-E611-B8A9-A0000420FE80.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/0C04ACF9-D6BC-E611-900D-3417EBE64561.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/0C6EFA8D-12BC-E611-84DB-047D7BD6DEC4.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/142169C1-4CBB-E611-94B6-0025901D08E8.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/16A11D58-FABD-E611-A9A2-0019B9CABE16.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/22125190-07BE-E611-BC7C-0025905B85B8.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/227D0461-C4BC-E611-9876-0025905B8560.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/264B8DB8-0DBC-E611-AE25-24BE05C62711.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/2C23BF13-44BB-E611-BB17-24BE05C44BC1.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/2E3653CF-CCBE-E611-B469-0CC47A4D76AA.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/30CF49E8-4DBB-E611-959E-90B11C2AA430.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/30E73B14-40BB-E611-ABAD-5065F3818291.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/3644C9A5-4ABB-E611-8834-0025905A612A.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/3C3E54EF-C7BE-E611-9C19-1866DAEA6520.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/3CBAF338-B0BC-E611-A51A-001C23C0A63C.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/42310A70-02BE-E611-A992-0025905A4964.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/42498B58-F0BC-E611-B2BE-0CC47A13CBEA.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/4256CB52-FDBD-E611-938D-782BCB539695.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/462DFAB6-50BB-E611-BF53-002590D9D8A4.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/46F6CE79-E7BE-E611-AE4C-D4AE52AAF583.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/483DC42D-FBBD-E611-AFA9-02163E00C3B6.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/48670377-C0BE-E611-A5A8-FA163E6A92FB.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/4AD91F7E-01BE-E611-BAB7-0025901F8740.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/52570313-0FBC-E611-A1B3-20CF307C98F1.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/52FAD6C9-F8BD-E611-B9B1-0025904A91F6.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/5476EB9C-03BC-E611-BD70-7845C4FC3B48.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/5A0C37F4-37BC-E611-8385-02163E013DF6.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/5EBF73F5-C7BE-E611-9F4D-002590E3A0FA.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/60A9F704-42BB-E611-B922-0CC47A13D416.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/6234FF81-E8BC-E611-9298-5065F382A241.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/62500CF2-D6BC-E611-BF36-6CC2173BB830.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/6269E13B-27BC-E611-9298-0CC47A78A3D8.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/64620C4D-7BBD-E611-89D8-002590D9D8B6.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/66EEABF3-49BB-E611-A0CD-B083FED42A6E.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/6ABE11F4-45BC-E611-8DEC-0242AC130003.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/7018CEDA-3EBB-E611-AEA6-24BE05BDCEF1.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/723CD3A0-66BB-E611-81EF-FA163E89941A.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/724216E2-45BB-E611-82A0-0CC47A4C8E2A.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/7476C568-B8BE-E611-986C-24BE05C63721.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/74A358CD-E6BB-E611-8ABC-0025905A612E.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/7A69DDA2-6BBB-E611-ACD7-FA163E9B46B5.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/7E732764-CCBE-E611-A5F4-0025901AC0FA.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/82A199BB-51BB-E611-83DD-0025905A6090.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/84E70035-3DBB-E611-BDD5-24BE05C48801.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/86E7D7D9-3CBB-E611-99C4-24BE05C656A1.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/8A416855-4BBB-E611-B3AE-0CC47AD98F72.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/90EF4B08-C1BE-E611-9781-001E674FB149.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/94250B23-1EBC-E611-B3F7-002590D9D896.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/9AE25501-FEBD-E611-AC08-0025904A91F6.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/A215AB70-ECBB-E611-9643-0CC47A78A45A.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/A6638BCC-3BBB-E611-9BA0-24BE05C44BC1.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/A6FE78CC-E8BC-E611-83B6-001E67E71BE1.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/A8E416C8-54BB-E611-9E1F-001E67397DF5.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/AAFDCF92-1EBC-E611-AA68-00259073E3DA.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/AC116782-8ABD-E611-B894-001E674DA1AD.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/B249E9C0-23BC-E611-A811-001E67A4061D.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/B458B32A-58BB-E611-923C-001E674FB24D.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/B699C90F-C6BE-E611-91A1-02163E0176B7.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/BA00965D-44BB-E611-B0B8-90B11C27F8B2.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/C20853C9-4CBB-E611-9206-0025905A60D6.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/C607932E-F7BD-E611-9737-141877410522.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/D0ECFAFA-CABC-E611-861A-B083FED12B5C.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/D49903B9-0DBC-E611-8CED-001E67E6F92C.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/D898D6EE-52BB-E611-8FA5-0CC47A13CDB0.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/DC35E165-24BD-E611-9422-00259073E34A.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/E04BDCDF-50BB-E611-BE70-0025905A6104.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/E0F9A1D7-3ABB-E611-90F6-24BE05CEBD61.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/EE420CAF-42BB-E611-AC1A-A0000420FE80.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/EEB1A329-49BB-E611-BA64-0CC47A0AD3BC.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/F0F3A9FC-20BC-E611-BAE8-00238BCE45A0.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/F41F5D98-48BB-E611-A02A-00259048B754.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/F646B19D-4DBB-E611-AC98-002590D9D8A4.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/70000/F8EBBFBF-48BB-E611-A6CD-001E67A3FEAC.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/00BEFF95-8BBF-E611-BCC4-001E67A3FDF8.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/040ACF1F-1BBE-E611-AE01-B083FED42ED0.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/0430EC9F-30BD-E611-A35D-001E677927CE.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/04FE23CC-96BB-E611-833A-24BE05C6E561.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/08CBBABB-1FBD-E611-BF0B-0025907DCA4A.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/0C1D3A78-E6BE-E611-B2C9-0CC47AA98B8C.root',
'/store/mc/RunIISummer16MiniAODv2/WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/PUMoriond17_80X_mcRun2_asymptotic_2016_TrancheIV_v6-v1/80000/22186CA1-1CBD-E6
| 2 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.