Dataset Viewer
blob_id
stringlengths 40
40
| language
stringclasses 1
value | text
stringlengths 12.9k
50.6k
|
---|---|---|
d786e89b9d478dcff3c541c89731247075d078c3
|
Python
|
<|begin_of_text|>'''
@author: Ken Venner
@contact: [email protected]
@version: 1.13
Read in a file of wine names and create consistent wine descriptions
from these names.
'''
import kvutil
import kvcsv
import re
import sys
import shutil
# may comment out in the future
import pprint
pp = pprint.PrettyPrinter(indent=4)
ppFlag = False
# application variables
optiondictconfig = {
'AppVersion' : {
'value' : '1.13',
'description' : 'defines the version number for the app',
},
'debug' : {
'value' : False,
'type' : 'bool',
'description' : 'defines if we are running in debug mode',
},
'verbose' : {
'value' : 1,
'type' : 'int',
'description' : 'defines the display level for print messages',
},
'setup_check' : {
'value' : False,
'type' : 'bool',
'description' : 'defines if we checking out setup',
},
'pprint' : {
'value' : False,
'type' : 'bool',
'description' : 'defines if we output with pretty print when debugging',
},
'csvfile_master_in' : {
'value' : 'wine_xref.csv',
'description' : 'defines the name of the master data input file',
},
'csvfile_update_in' : {
'value' : 'wineref.csv',
'description' : 'defines the name of the input file to updated',
},
'csvfile_update_out' : {
'value' : 'wineref2.csv',
'description' : 'defines the name of the updated output file',
},
'fldWine' : {
'value' : 'wine',
'description' : 'defines the name of the field that holds the Wine ',
},
'fldWineDescr' : {
'value' : 'winedescr',
'description' : 'defines the name of the field holding the wine description',
},
'fldWineDescrNew' : {
'value' : 'winedescrnew',
'description' : 'defines the name of the NEW field holding the new description ',
},
'fldWineDescrMatch' : {
'value' : None,
'description' : 'defines the name of the NEW field holding the results of comparison existing to new description ',
},
'fldWineMaster' : {
'value' : None,
'description' : 'defines the name of the field that holds the Wine when reading the master file ',
},
'fldWineDescrMaster' : {
'value' : None,
'description' : 'defines the name of the field holding the wine description when reading the master file',
},
'backupfile_ext' : {
'value' : '.bak',
'description' : 'defines the extension to use to copy the update input file to if we are replacing it with output',
},
'defaultnew' : {
'value' : None,
'description' : 'defines if we should take field fldWineDescrNew and set to a value if not set',
},
}
### GLOBAL VARIABLES / LOOKUPS ########################################
# regex search for vintage in wine name
vintageLookup = (
re.compile('\d\d\d\d\s+\d\d(\d\d)'), # two years together - get this one over early
re.compile('^\d\d(\d\d)'), # four position start of line
re.compile('\s\d\d(\d\d)$'), # four position end of line
re.compile('\s\d\d(\d\d)\s'), # four position middle of line
re.compile('XX\d\d(\d\d)\s'), # four position middle of line
re.compile('\s\d\d(\d\d)\/'), # four position split
re.compile('\s\'?(\d\d)\'?$|\s\'?(\d\d)\'?\s'), # two position date with optional apostrophe front or back
)
# regex search for case in wine name
reCase = re.compile(r'12\s*X\s*750\s*ML|\bcase\b|12\/750\s*ML',re.IGNORECASE)
# regex to pick up qualifiers from the wine
reQualLookup = (
(None, re.compile(r'\bWithout\s+Gift\b|\bNo\s+Gift', re.IGNORECASE)), # the none gift do them first
('Gift', re.compile(r'\bGift\b', re.IGNORECASE)),
('VAP', re.compile(r'\bVAP\b', re.IGNORECASE)),
('VAP', re.compile(r'\bGlassVAP\b', re.IGNORECASE)),
('Glass', re.compile(r'\bGlass\b', re.IGNORECASE)),
('Glass', re.compile(r'\bGlasses\b', re.IGNORECASE)),
('Etch', re.compile(r'\bEtch\b', re.IGNORECASE)),
('Basket', re.compile(r'\bBasket\b', re.IGNORECASE)),
)
# regex search to define the size of the wine bottle
sizeLookup = (
('1.75L', re.compile(r'\b1\.75\s*Li?|\b1\.75$', re.IGNORECASE)),
('1.5L', re.compile(r'\b1\.5\s*L?\b|\bMagnum\b', re.IGNORECASE)),
('375mL', re.compile(r'Half\s+Bottle|375ml', re.IGNORECASE)),
('200mL', re.compile(r'\b200\s*ML|\(200\s*ML', re.IGNORECASE)),
('50mL', re.compile(r'\b50\s*ML|\(50\s*ML', re.IGNORECASE)),
('500mL', re.compile(r'\b500\s*ML|\(500\s*ML', re.IGNORECASE)),
('3L', re.compile(r'\b3\s*Li?', re.IGNORECASE)),
('6L', re.compile(r'\b6\s*Li?', re.IGNORECASE)),
('9L', re.compile(r'\b9\s*Li?', re.IGNORECASE)),
('1L', re.compile(r'\b1L\b|\b1\s+L$|\b1.0\s*L\b|\b1\s+Liter\b|\bOne\s+Liter\b|\bLITER\b|\b1\s*LTR', re.IGNORECASE)),
)
# regex extract winery names from the wine field
wineryLookup = (
('Alban', re.compile(r'\bAlban\b', re.IGNORECASE)),
('Arrowood', re.compile(r'\bArrowood\b', re.IGNORECASE)),
('Atalon', re.compile(r'\bAtalon\b', re.IGNORECASE)),
('Attune', re.compile(r'\bAttune\b', re.IGNORECASE)),
('Auteur', re.compile(r'\bAuteur\b', re.IGNORECASE)),
('Austin Hope', re.compile(r'\bAustin\s+Hope\b', re.IGNORECASE)),
('Badge', re.compile(r'\bBadge\b', re.IGNORECASE)),
('Balletto', re.compile(r'\bBalletto\b', re.IGNORECASE)),
('Bell', re.compile(r'\bBell\s+Cellar', re.IGNORECASE)),
('BR Cohn', re.compile(r'\bB\.?\s?R\.?\s+Cohn\b', re.IGNORECASE)),
('Bremer', re.compile(r'\bBremer\b', re.IGNORECASE)),
('Brewer-Clifton', re.compile(r'\bBrewer[\s\-]Clifton\b', re.IGNORECASE)),
('BV', re.compile(r'\bBeaulieu\s+V|\bBV\b', re.IGNORECASE)),
('Belle Glos', re.compile(r'\bBelle\s+Glos\b', re.IGNORECASE)),
('Bennett Ln', re.compile(r'\bBennet+\sLane\b', re.IGNORECASE)),
('Benovia', re.compile(r'\bBenovia\b', re.IGNORECASE)),
('Beringer', re.compile(r'\bBeringer\b', re.IGNORECASE)),
('Blackstone', re.compile(r'\bBlackstone\b', re.IGNORECASE)),
('Brancott', re.compile(r'\bBrancott\b', re.IGNORECASE)),
('Cade', re.compile(r'\bCade\b', re.IGNORECASE)),
('Cain Five', re.compile(r'\bCain\s+Five\b|\bCain\s-\sFive\b|\bCain\s5\b|\bCainFive\b', re.IGNORECASE)),
('Cakebread', re.compile(r'\bCakebread\b', re.IGNORECASE)),
('Cardinale', re.compile(r'\bCardinale\b', re.IGNORECASE)),
('Caymus', re.compile(r'\bCaymus\b', re.IGNORECASE)),
('Chappellet', re.compile(r'\bChappellet\b', re.IGNORECASE)),
('Chalk Hill', re.compile(r'\bChalk\s+Hill\b', re.IGNORECASE)),
('Clos Du Bois', re.compile(r'\bClos\s+Du\s+Bois\b', re.IGNORECASE)),
('ClosDuVal', re.compile(r'\bClos\s+du\s+Val\b', re.IGNORECASE)),
('Colgin', re.compile(r'\bColgin\b', re.IGNORECASE)),
('Concha Don Melchor', re.compile(r'\bConcha\s.*Don\s+Melchor\b|Don\s+Melchor\b', re.IGNORECASE)),
('Continuum', re.compile(r'\bContinuum\b', re.IGNORECASE)),
('Corison', re.compile(r'\bCorison\b', re.IGNORECASE)),
('Cristal', re.compile(r'Roederer\s?.*Cristal\b|\bCristal\b.+Brut', re.IGNORECASE)),
('Curran', re.compile(r'\bCurran\b', re.IGNORECASE)),
('Darioush', re.compile(r'\bDarioush\b', re.IGNORECASE)),
('Darioush', re.compile(r'\bCaravan\b', re.IGNORECASE)),
('David Arthur', re.compile(r'\bDavid\s+Arthur\b', re.IGNORECASE)),
('David Bruce', re.compile(r'\bDavid\s+Bruce\b', re.IGNORECASE)),
('Davis Family', re.compile(r'\bDavis\s+Family\b', re.IGNORECASE)),
('Del Dotto', re.compile(r'\bDel\s+Dotto\b', re.IGNORECASE)),
('Dominus', re.compile(r'\bDominus\b', re.IGNORECASE)),
('Goldeneye', re.compile(r'\bGoldeneye\b', re.IGNORECASE)), # before duckhorn
('Paraduxx', re.compile(r'\bParaduxx\b', re.IGNORECASE)), # before duckhorn
('Domaine Carneros', re.compile(r'\bDomaine\s+Carneros\b', re.IGNORECASE)),
('Dominus', re.compile(r'\Dominus\b', re.IGNORECASE)),
('Drappier', re.compile(r'\bDrappier\b', re.IGNORECASE)),
('Duckhorn', re.compile(r'\bDuckhorn\b', re.IGNORECASE)),
('Dumol', re.compile(r'\bDumol\b', re.IGNORECASE)),
('Dunn', re.compile(r'\bDunn\b', re.IGNORECASE)),
('Ehlers', re.compile(r'\bEhlers\b', re.IGNORECASE)),
('Etude', re.compile(r'\bEtude\b', re.IGNORECASE)),
('Far Niente', re.compile(r'\bFar Niente\b', re.IGNORECASE)),
('Flora', re.compile(r'\bFlora\s+Springs\b', re.IGNORECASE)),
('Flowers', re.compile(r'\bFlowers\b', re.IGNORECASE)),
('Robert Foley', re.compile(r'\bRobert\s+\bFoley\b', re.IGNORECASE)), #before Foley
('Foley', re.compile(r'\bFoley\b', re.IGNORECASE)),
('Foxen', re.compile(r'\bFoxen\b', re.IGNORECASE)),
('Franciscan', re.compile(r'\bFranciscan\b', re.IGNORECASE)),
('Frank Family', re.compile(r'\bFrank Family\b', re.IGNORECASE)),
('Gary Farrell', re.compile(r'\bGary\s+Farrel+\b', re.IGNORECASE)),
('Ghost Block', re.compile(r'\bGhost\s+Block\b', re.IGNORECASE)),
('Grgich', re.compile(r'\bGrgich\b', re.IGNORECASE)),
('Groth', re.compile(r'\bGroth\b', re.IGNORECASE)),
('Gundlach', re.compile(r'\bGundlach\b', re.IGNORECASE)),
('Hansel', re.compile(r'\bHansel\b', re.IGNORECASE)),
('Hanzell', re.compile(r'\bHanzell\b', re.IGNORECASE)),
('Hess', re.compile(r'\bHess\b', re.IGNORECASE)),
('Hewitt', re.compile(r'\bHewitt\b', re.IGNORECASE)),
('Hobbs', re.compile(r'\bHobbs\b|\bcrossbarn\b', re.IGNORECASE)),
('Hundred Acre', re.compile(r'\bHundred\s+Acre\b', re.IGNORECASE)),
('Jordan', re.compile(r'\bJordan\b', re.IGNORECASE)),
('Justin', re.compile(r'\bJustin\b', re.IGNORECASE)),
('Kim Crawford', re.compile(r'\bKim\s+Crawford\b', re.IGNORECASE)),
('Kistler', re.compile(r'\bKistler\b', re.IGNORECASE)),
('Kosta', re.compile(r'\bKosta\s+Browne?\b', re.IGNORECASE)),
('Krug', re.compile(r'\bKrug\b', re.IGNORECASE)),
('Kunde', re.compile(r'\bKunde\b', re.IGNORECASE)),
('LaCrema', re.compile(r'\bLa\s?Crema\b', re.IGNORECASE)),
('Lewis', re.compile(r'\bLewis\b', re.IGNORECASE)),
('Lokoya', re.compile(r'\bLokoya\b', re.IGNORECASE)),
('Meiomi', re.compile(r'\bMeiomi\b', re.IGNORECASE)),
('Melville', re.compile(r'\bMelville\b', re.IGNORECASE)),
('Momento Mori', re.compile(r'\bMomento\s+Mori\b', re.IGNORECASE)),
('Mondavi', re.compile(r'\bMondavi\b', re.IGNORECASE)),
('Montelena', re.compile(r'\bMontelena\b', re.IGNORECASE)),
('Mt Veeder', re.compile(r'^Mount\s+Veeder\b|^Mt\.? Veeder\b|\d+\s+M[^t]*t\s+Veeder\b', re.IGNORECASE)),
('Newton', re.compile(r'\bNewton\b', re.IGNORECASE)),
('Nickel', re.compile(r'\bNickel\b', re.IGNORECASE)),
('Opus One', re.compile(r'\bOpus\s+One\b', re.IGNORECASE)),
('P Togni', re.compile(r'\bTogni\b', re.IGNORECASE)),
('Pahlmeyer Jayson', re.compile(r'\bJayson\b', re.IGNORECASE)), # this before pahlmeyer
('Pahlmeyer', re.compile(r'\bPahlmeyer\b(?!\s*Jay)', re.IGNORECASE)),
('Papillon', re.compile(r'\bPapillon\b', re.IGNORECASE)),
('Patz', re.compile(r'\bPatz\b', re.IGNORECASE)),
('Phelps', re.compile(r'\bPhelps\b', re.IGNORECASE)),
('Plumpjack', re.compile(r'\bPlumpjack\b', re.IGNORECASE)),
('Pride', re.compile(r'\bPride\b', re.IGNORECASE)),
('Prisoner', re.compile(r'\bPrisoner\b', re.IGNORECASE)),
('Provenance', re.compile(r'\bProvenance\b', re.IGNORECASE)),
('R Sinskey', re.compile(r'\bSinskey\b', re.IGNORECASE)),
('Ramey', re.compile(r'\bRamey\b', re.IGNORECASE)),
('Revana', re.compile(r'\bRevana\b', re.IGNORECASE)),
('Raptor', re.compile(r'\bRaptor\s+Ridge\b', re.IGNORECASE)),
('Revana', re.compile(r'\bRevana\b', re.IGNORECASE)),
('Ridge', re.compile(r'\bRidge\b', re.IGNORECASE)),
('Robert Foley', re.compile(r'\bRobert\s+Foley\b', re.IGNORECASE)),
('Rombauer', re.compile(r'\bRombauer\b', re.IGNORECASE)),
('Rudd', re.compile(r'\bRudd\b', re.IGNORECASE)),
('Scarecrow', re.compile(r'\bScarecrow\b', re.IGNORECASE)),
('Sea Smoke', re.compile(r'\bSea\s+Smoke\b', re.IGNORECASE)),
('Seghesio', re.compile(r'\bSeghesio\b', re.IGNORECASE)),
('Shafer', re.compile(r'\bShafer\b', re.IGNORECASE)),
('Sherwin', re.compile(r'\bSherwin\b', re.IGNORECASE)),
('Silver Oak', re.compile(r'\bSilver\s+Oak\b', re.IGNORECASE)),
('Silverado', re.compile(r'\bSilverado\b', re.IGNORECASE)),
('Simi', re.compile(r'\bSimi\b', re.IGNORECASE)),
('Sonoma Cutrer', re.compile(r'\bCutrer\b', re.IGNORECASE)),
('Spottswoode', re.compile(r'\bSpottswoode\b', re.IGNORECASE)),
('Stag Leap', re.compile(r'\bStag.*\sLeap\b', re.IGNORECASE)),
('Sullivan', re.compile(r'\bSullivan\b', re.IGNORECASE)),
('Summerland', re.compile(r'\bSummerland\b', re.IGNORECASE)),
('Summers', re.compile(r'\bSummers\b', re.IGNORECASE)),
('Tantara', re.compile(r'\bTantara\b', re.IGNORECASE)),
('Turnbull', re.compile(r'\bTurnbull\b', re.IGNORECASE)),
('Veuve', re.compile(r'\bVeuve\b', re.IGNORECASE)),
('Viader', re.compile(r'\bViader\b', re.IGNORECASE)),
('Waterstone', re.compile(r'\bWaterstone\b', re.IGNORECASE)),
('Whitehall', re.compile(r'\bWhitehall\b', re.IGNORECASE)),
('Wm Selyem', re.compile(r'\bWilliams\s*\-?Selyem\b', re.IGNORECASE)),
('ZD', re.compile(r'\bZD\b', re.IGNORECASE)),
('Zaca', re.compile(r'\bZaca\b', re.IGNORECASE)),
('zBourbon Woodford Res', re.compile(r'\bWoodford\s+Reserve\b', re.IGNORECASE)),
('zBourbon Woodford Res', re.compile(r'\bWoodford\s+Rsv\b', re.IGNORECASE)),
('zCognac Courvoisier', re.compile(r'\bCourvoisier\b', re.IGNORECASE)),
('zCognac Hennessy', re.compile(r'\bHennesse?y\b', re.IGNORECASE)),
('zCognac Remy', re.compile(r'\bRemy\s+Martin\b|\bRemy\s+Louis', re.IGNORECASE)),
('zCointreau', re.compile(r'\bCointreau\b', re.IGNORECASE)),
('zGin Hendrick', re.compile(r'\bHendrick', re.IGNORECASE)),
('zGin Tanqueray', re.compile(r'\bTanqueray\b', re.IGNORECASE)),
('zRum Mt Gay', re.compile(r'\bMount\s+Gay\b|\bMt\s+Gay', re.IGNORECASE)),
('zRum Ron Zacapa', re.compile(r'\bRon\s+Zacapa\b', re.IGNORECASE)),
('zRye Hayden', re.compile(r'\bBasil\s+Hayden\b', re.IGNORECASE)),
('zSambuca', re.compile(r'\bSambuca\b', re.IGNORECASE)),
('zScotch Glenmorangie', re.compile(r'\bGlenmorangie\b', re.IGNORECASE)),
('zScotch Hibiki Harmony', re.compile(r'\bHibiki\s.*Harmony\b', re.IGNORECASE)),
('zScotch Hibiki', re.compile(r'\bHibiki\b(?!\s*Har)', re.IGNORECASE)),
('zScotch Macallan', re.compile(r'\bMacallan\b', re.IGNORECASE)),
('zTeq Campo Azul', re.compile(r'\bCampo\s+Azul\b', re.IGNORECASE)),
('zTeq Casamigos', re.compile(r'\bCasamigos\b', re.IGNORECASE)),
('zTeq Casino Azul', re.compile(r'\bCasino\s+Azul\b', re.IGNORECASE)),
('zTeq Clase Azul', re.compile(r'\bClase\s+Azul\b', re.IGNORECASE)),
('zTeq Cuervo', re.compile(r'\bJose\s+Cuervo\b|^Cuervo\b', re.IGNORECASE)),
('zTeq Don Julio', re.compile(r'\bDon\s+Julio\b', re.IGNORECASE)),
('zTeq Dos Artes', re.compile(r'\bDos\s+Artes\b|^Cuervo\b', re.IGNORECASE)),
('zTeq Gran Cava', re.compile(r'\bGran\s+Cava\b', re.IGNORECASE)),
('zTeq Herradura', re.compile(r'\bHerradura\b', re.IGNORECASE)),
('zTeq Loma Azul', re.compile(r'\bLoma\s+Azul\b', re.IGNORECASE)),
('zTeq Padre Azul', re.compile(r'\bPadre\s+Azul\b', re.IGNORECASE)),
('zTeq Partida', re.compile(r'\bPartida\b', re.IGNORECASE)),
('zTeq Patron', re.compile(r'\bPatron\b', re.IGNORECASE)),
('zTripleSec Gr Marnier', re.compile(r'\bGrand\s+Marnier\b', re.IGNORECASE)),
('zTripleSec Dekuyper', re.compile(r'\bDekuyper\b', re.IGNORECASE)),
('zTripleSec Hiram', re.compile(r'\bHiram\b', re.IGNORECASE)),
('zVodka Absolut', re.compile(r'\bAbsolut\b', re.IGNORECASE)),
('zVodka Skyy', re.compile(r'\bSkyy\b', re.IGNORECASE)),
('zVodka Tito', re.compile(r'\bTito', re.IGNORECASE)),
('zWhiskey Balvenie', re.compile(r'\bBalvenie\b', re.IGNORECASE)),
('zWhiskey J Walker', re.compile(r'\bJohn+ie\s+Walker\b', re.IGNORECASE)),
# ('', re.compile(r'\b\b', re.IGNORECASE)),
)
# regex extract the grape from the wine fld
grapeLookup = (
('Cab Franc', re.compile(r'\bCabernet\s+Franc|\bCab\s+Franc', re.IGNORECASE)), # before cab
('Cab', re.compile(r'\bCabernet\b|\sCS\s|\sCS$|\bCab\b', re.IGNORECASE)),
('Claret', re.compile(r'\bClaret\b', re.IGNORECASE)),
('Rose Pinot', re.compile(r'\bRose\b.*\bPinot\b|\bPinot\b.*\bRose\b', re.IGNORECASE)),
('Pinot', re.compile(r'\bPinot\b|\bPN\b|\bP\s+Noir\b', re.IGNORECASE)),
('Merlot', re.compile(r'\bMerlot\b|\bME\b', re.IGNORECASE)),
('Sauv Blanc', re.compile(r'\bSauvignon\s+Blanc\b|\bSB\b', re.IGNORECASE)),
('Sauv Blanc', re.compile(r'\bSauvignon\/Fume\s+Blanc\b', re.IGNORECASE)),
('Meritage', re.compile(r'\bMeritage\b', re.IGNORECASE)),
('Fume', re.compile(r'\bFume\b|\bFumé', re.IGNORECASE)),
('Champagne', re.compile(r'\bChampagne\b', re.IGNORECASE)),
('Chard', re.compile(r'\bChar+d|\bCH\b', re.IGNORECASE)),
('Shiraz', re.compile(r'\bShiraz\b', re.IGNORECASE)),
('Syrah', re.compile(r'\bSyrah\b|\bSY\b',re.IGNORECASE)),
('Zin', re.compile(r'\bZinfandel\b|\bZIN\b|\bZN\b', re.IGNORECASE)),
('Rose', re.compile(r'\bRose\b|\bRosé', re.IGNORECASE)),
('Sangiovese', re.compile(r'\Sangiovese\b', re.IGNORECASE)),
# ('Brandy', re.compile(r'\bBrandy\b', re.IGNORECASE)),
('Gewurzt', re.compile(r'\bGew.rztraminer\b|\bGewürzt', re.IGNORECASE)),
('Malbec', re.compile(r'\bMalbec\b', re.IGNORECASE)),
('Viognier', re.compile(r'\bViognier\b', re.IGNORECASE)),
('Roussanne', re.compile(r'\bRoussanne\b', re.IGNORECASE)),
('Charbono', re.compile(r'\bCharbono\b', re.IGNORECASE)),
('PSirah', re.compile(r'\bPetite Sirah\b', re.IGNORECASE)),
('Cuvee', re.compile(r'\bCuvee\b', re.IGNORECASE)),
('Red', re.compile(r'\bRed\b|\bBordeaux\s+Blend\b', re.IGNORECASE)),
('Syrah-Cab', re.compile(r'\bSyrcab\b|\bsyrah[-\s\/]+cab', re.IGNORECASE)),
('Grenache', re.compile(r'\bGrenache\b', re.IGNORECASE)),
('Tempranillo', re.compile(r'\bTempranillo\b', re.IGNORECASE)),
)
# wineries that we don't want to look up the grape on
ignoreGrapeLookup = {
'Cristal' : ['Rose', None],
'Domaine Carneros' : ['Brut', None],
'Dominus' : [None],
'Papillon' : None,
'Paraduxx' : None,
'Veuve' : None,
'zCointreau' : None,
'zGin Hendrick' : None,
'zGin Tanqueray' : ['Ten', None],
'zTripleSec Gr Marnier' : ['1880', '100th', 'Cent', 'Quin', None],
'zTripleSec Dekuyper' : None,
'zTripleSec Hiram' : None,
'zVodka Skyy' : ['Citrus', None],
'zVodka Tito' : None,
# 'Prisoner' : ['Cuttings', 'Red', 'Derange', 'Saldo', 'Blindfold', None],
}
# winery to wine lookup when no grape is found in the wine name
#
# extract the wine name from a winery - when a field does not have a grape lookup for the row
# the name looked up and found will be the name used
noGrapeLookup = {
'Ehlers' : ['120-80'], # matches an abbreviations - and matches fldWineDescr
'Alban' : ['Pandora'],
'BV' : ['Tapestry', 'Latour'],
'Bennett Ln' : ['Maximus'],
'Bremer' : ['Austintatious'],
'Cain Five' : None,
'Colgin' : ['Cariad', 'IX'],
'Concha Don Melchor' : None,
'Continuum' : None,
'Darioush' : ['Duel', 'Darius'],
'Duckhorn' : ['Discussion'],
'Far Niente' : ['Dolce'],
'Flora' : ['Trilogy'],
'Franciscan' : ['Magnificat'],
'Grgich' : ['Violetta'],
'Gundlach' : ['Vintage Reserve'],
'Justin' : ['Isosceles'],
'Krug' : ['Generations'],
'Mondavi' : ['Maestro'],
'Newton' : ['Puzzle'],
'Opus One' : None,
'Phelps' : ['Insignia'],
'Prisoner' : ['Cuttings', 'Derange', 'Saldo', 'Blindfold'],
'Ridge' : ['Monte Bello'],
'Robert Foley' : ['Griffin'],
'Sullivan' : ['Coeur de Vigne'],
'Zaca' : ['ZThree', 'ZCuvee'],
'zCognac Courvoisier' : ['Napolean', 'VS', 'VSOP', 'XO'],
'zCognac Hennessy' : ['Paradis', 'Richard', 'VS', 'VSOP', 'XO', 'Master'],
'zCognac Remy' : ['1738', 'Louis XIII', 'VSOP', 'XO', 'VS'],
'zRum Ron Zacapa' : ['23', 'Negra', 'XO'],
'zRye Hayden' : ['Dark', 'Caribbean'],
'zScotch Hibiki Harmony' : None,
# 'zScotch Hibiki' : ['Toki', '12', '17', '21', '30'],
'zTeq Campo Azul' : ['Extra Anejo', 'Anejo', 'Blanco', 'Reposado'],
'zTeq Casamigos' : ['Extra Anejo', 'Anejo', 'Blanco', 'Reposado'],
'zTeq Casino Azul' : ['Extra Anejo', 'Anejo', 'Blanco', 'Reposado', 'Silver'],
'zTeq Clase Azul' : ['Ultra', 'Extra Anejo', 'Anejo', 'Blanco', 'Reposado', 'Mezcal', 'Plata', 'Platino'],
'zTeq Dos Artes' : ['Extra Anejo'],
'zTeq Gran Cava' : ['Extra Anejo'],
'zTeq Loma Azul' : ['Extra Anejo', 'Anejo', 'Blanco', 'Reposado'],
# 'zTeq Padre Azul' : ['Extra Anejo', 'Anejo', 'Blanco', 'Reposado'],
'zTeq Partida' : ['Blanco', 'Elegante'],
'zVodka Absolut' : ['Citron', 'Mandarin', 'Mandrin', 'Mango', 'Ruby', 'Vanilia', 'Raspberri', 'Grapevine', None],
'zWhiskey J Walker' : ['Double Black', 'Black', 'Blue', 'Gold', 'Green', 'Platinum', 'Red','Swing', 'White', '18', '21'],
}
# regex to use to determine if this is a liquor not a wine
#
# winery -> [ liquor, regex ]
# if there is no grape, and no noGrapeLookup found, but the winery has a liquorLookup
# use the list of lookups to find the additional infomratoin to add to the winery
#
liquorLookup = {
'zRum Mt Gay' : [
('1703 Mst', re.compile(r'\b1703\b', re.IGNORECASE)),
('BB', re.compile(r'\bBlack Barrel\b', re.IGNORECASE)),
('Eclipse Silver', re.compile(r'\bEclipse\s+Silver\b', re.IGNORECASE)),
('Eclipse', re.compile(r'\bEclipse\b', re.IGNORECASE)),
('Old Peat', re.compile(r'\bOld Peat', re.IGNORECASE)),
('Old Pot', re.compile(r'\bPot\s+Still\b', re.IGNORECASE)),
('Old', re.compile(r'\bOld\b', re.IGNORECASE)),
('Silver', re.compile(r'\bSilver\b', re.IGNORECASE)),
('XO Peat', re.compile(r'\bXO\b', re.IGNORECASE)),
],
'zScotch Glenmorangie' : [
('10', re.compile(r'\b10(YR)?\b', re.IGNORECASE)),
('14 Port', re.compile(r'14.+\bQuinta\b|14.+\bPort\b|\bQuinta\b.+14|\bPort\b.+14', re.IGNORECASE)),
('12 Bacalta', re.compile(r'\bBacalta\b', re.IGNORECASE)),
('12 Burgundy', re.compile(r'\bBurgundy\b', re.IGNORECASE)),
('12 Nectar', re.compile(r'\bNectar\b', re.IGNORECASE)),
('12 Port', re.compile(r'\bQuinta\b|\bPort\b', re.IGNORECASE)),
('12 Sherry', re.compile(r'\bLa\s?Santa\b|\bSherry\b', re.IGNORECASE)),
('12 Signet', re.compile(r'\bSignet\b', re.IGNORECASE)),
('15 Cadboll', re.compile(r'\bCadboll', re.IGNORECASE)),
('15', re.compile(r'\b15(YR)?\b', re.IGNORECASE)),
('18', re.compile(r'\b18(YR)?\b|\b18YEAR\b', re.IGNORECASE)),
('25 Astar', re.compile(r'\bAstar\b', re.IGNORECASE)),
('25', re.compile(r'\b25(YR)?\b', re.IGNORECASE)),
('Companta', re.compile(r'\bCompanta\b', re.IGNORECASE)),
('Finealta', re.compile(r'\bFinealta\b', re.IGNORECASE)),
('Milsean', re.compile(r'\bMilsean\b', re.IGNORECASE)),
('Sonnalta', re.compile(r'\bSonnalta\b', re.IGNORECASE)),
],
'zScotch Macallan' : [
('10 Fine', re.compile(r'\bFine.*\b10\b|\b10.*Fine')),
('10', re.compile(r'\b10\b')),
('12 Double Gold', re.compile(r'\bDbl\b.*Gold|\bDouble\b.*Gold', re.IGNORECASE)),
('12 Double', re.compile(r'\bDouble\s.*12(YR)?\b', re.IGNORECASE)),
('12 Double', re.compile(r'\b12\s.*Double\b', re.IGNORECASE)),
('12 Double', re.compile(r'\bDbl\b|\bDouble\b', re.IGNORECASE)),
('12 Edition 1', re.compile(r'\bEdition\s.*1\b', re.IGNORECASE)),
('12 Edition 2', re.compile(r'\bEdition\s.*2\b', re.IGNORECASE)),
('12 Edition 3', re.compile(r'\bEdition\s.*3\b', re.IGNORECASE)),
('12 Edition 4', re.compile(r'\bEdition\s.*4\b', re.IGNORECASE)),
('12 Sherry', re.compile(r'\b12\s.*Sherry\b|\bSherry\b\s.*\b12', re.IGNORECASE)),
('12 Triple', re.compile(r'\b12(YR)?\s.*Triple\b', re.IGNORECASE)),
('12 Triple', re.compile(r'\bTriple\s.*12\b', re.IGNORECASE)),
('12', re.compile(r'\b12(YR)?\b', re.IGNORECASE)),
('15 Triple', re.compile(r'\b15(YR)?\s.*Triple\b|Triple.+\b15(YR)?\b', re.IGNORECASE)),
('15 Fine', re.compile(r'\b15(YR)?\b.*\bFine\b', re.IGNORECASE)),
('15', re.compile(r'\b15(YR)?\b', re.IGNORECASE)),
('17 Sherry', re.compile(r'\b17(YR)?\s.*Sherry\b', re.IGNORECASE)),
('17 Fine', re.compile(r'\b17(YR)?\b.*\bFine\b', re.IGNORECASE)),
('17', re.compile(r'\b17(YR)?\b', re.IGNORECASE)),
('18 Sherry', re.compile(r'\b18(YR)?\s.*Sherry\b|Sherry\b.*18', re.IGNORECASE)),
('18 Triple', re.compile(r'\b18(YR)?\s.*Triple\b|Triple.+\b18(YR)?\b', re.IGNORECASE)),
('18 Fine', re.compile(r'\b18(YR)?\b.*\bFine\b', re.IGNORECASE)),
('18 Gran', re.compile(r'Gran\b.*\b18', re.IGNORECASE)),
('18', re.compile(r'\b18(YR)?\b', re.IGNORECASE)),
('21 Fine', re.compile(r'\b21.*Fine\b', re.IGNORECASE)),
('21', re.compile(r'\b21(YR)?\b', re.IGNORECASE)),
('25 Sherry', re.compile(r'\b25\s.*Sherry\b', re.IGNORECASE)),
('25', re.compile(r'\b25(YR)?\b')),
('30 Sherry', re.compile(r'\b30\s.*Sherry', re.IGNORECASE)),
('30 Triple', re.compile(r'\b30(YR)?\s.*Triple\b|Triple.+\b30(YR)?\b', re.IGNORECASE)),
('30 Fine', re.compile(r'\b30(YR)?\b.*\bFine\b|Fine.*30', re.IGNORECASE)),
('30', re.compile(r'\b30(YR)?\b')),
('Rare', re.compile(r'\bRare\b', re.IGNORECASE)),
],
'zTeq Cuervo' : [
('Especial Gold', re.compile(r'\bEspecial\b.*Gold\b|Gold.*Especial', re.IGNORECASE)),
('Especial Blue', re.compile(r'\bEspecial\b.*Blue\b', re.IGNORECASE)),
('Especial', re.compile(r'\bEspecial\b', re.IGNORECASE)),
('Familia Platino', re.compile(r'\bPlatino\b', re.IGNORECASE)),
('Familia Anejo', re.compile(r'\bFamilia\b|\bReserva\b', re.IGNORECASE)),
('Gold', re.compile(r'\bGold\b', re.IGNORECASE)),
('Reposado Lagavulin', re.compile(r'\bReposado.*Lagavulin', re.IGNORECASE)),
('Tradicional Anejo', re.compile(r'Tradicional.*Anejo|Anejo.*Tradicional', re.IGNORECASE)),
('Tradicional Reposado', re.compile(r'Tradicional.*Reposado|Reposado.*Tradicional', re.IGNORECASE)),
('Tradicional Silver', re.compile(r'\bTradicional\b', re.IGNORECASE)),
('Tradicional Silver', re.compile(r'\bTraditional\b', re.IGNORECASE)),
('Reposado', re.compile(r'\bReposado\b', re.IGNORECASE)),
('Silver', re.compile(r'\bSilver\b', re.IGNORECASE)),
],
'zTeq Don Julio' : [
('1942', re.compile(r'\b1942\b', re.IGNORECASE)),
('Real', re.compile(r'\bReal\b', re.IGNORECASE)),
('Anejo Claro 70th', re.compile(r'\b70th\b', re.IGNORECASE)),
('Anejo Claro', re.compile(r'\bAnejo\b\s*Claro\b', re.IGNORECASE)),
('Anejo', re.compile(r'\bAnejo\b', re.IGNORECASE)),
('Blanco', re.compile(r'\bBlanco\b', re.IGNORECASE)),
('Reposado Lagavulin', re.compile(r'\bRepo.+Lagvulin\b', re.IGNORECASE)),
('Reposado Dbl', re.compile(r'\bReposado.+Double\b', re.IGNORECASE)),
('Reposado Dbl', re.compile(r'\bReposado.+Dbl\b', re.IGNORECASE)),
('Reposado Dbl', re.compile(r'\bDouble.+Reposado\b', re.IGNORECASE)),
('Reposado Private', re.compile(r'\bReposado.+Private\b', re.IGNORECASE)),
('Reposado', re.compile(r'\bReposado\b', re.IGNORECASE)),
('Silver', re.compile(r'\bSilver\b', re.IGNORECASE)),
],
'zTeq Herradura' : [
('Ultra', re.compile(r'\bUltra\b', re.IGNORECASE)),
('Suprema', re.compile(r'\bSuprema\b', re.IGNORECASE)),
('Anejo', re.compile(r'\bAnejo\b', re.IGNORECASE)),
('Blanco', re.compile(r'\bBlanco\b', re.IGNORECASE)),
('Reposado Gold', re.compile(r'\bReposado\s+Gold\b|\bGold\s+Reposado\b', re.IGNORECASE)),
('Reposado Scotch', re.compile(r'\bReposado.+Scotch\b|\bScotch.+Reposado\b', re.IGNORECASE)),
('Reposado Port', re.compile(r'\bPort.+Reposado\b|\bReposado.+Port\b', re.IGNORECASE)),
('Reposado', re.compile(r'\bReposado\b', re.IGNORECASE)),
('Silver', re.compile(r'\bSilver\b', re.IGNORECASE)),
],
'zTeq Patron' : [
('Gran Piedra', re.compile(r'\bPiedra\b', re.IGNORECASE)),
('DELETE Roca DELETE', re.compile(r'\bRoca\b', re.IGNORECASE)),
('Anejo Extra Lalique', re.compile(r'\bLalique\b', re.IGNORECASE)),
('Anejo Extra 7yr', re.compile(r'\b7YR\b|\b7 anos\b|\b7 year\b', re.IGNORECASE)),
('Anejo Extra 5yr', re.compile(r'\b5YR\b|\b5 anos\b|\b5 year\b', re.IGNORECASE)),
('Anejo Extra 10yr', re.compile(r'\b10\b.+\bExtra\b|\bExtra\b.+10', re.IGNORECASE)),
('Anejo Extra', re.compile(r'\bExtra\s+Anejo\b', re.IGNORECASE)),
('Gran Anejo', re.compile(r'\bGran\s+Anejo\b', re.IGNORECASE)),
('Gran Anejo', re.compile(r'\bBurdeos\b', re.IGNORECASE)),
('Gran Smoky', re.compile(r'\bGran\s+.*Smoky\b', re.IGNORECASE)),
('Anejo', re.compile(r'\bAnejo\b', re.IGNORECASE)),
('Gran Platinum', re.compile(r'\bPlatinum\b', re.IGNORECASE)),
('Reposado', re.compile(r'\bReposado\b', re.IGNORECASE)),
('Silver LTD', re.compile(r'\bSilver.*Limited\b|\bLimited.*Silver\b', re.IGNORECASE)),
('Silver Estate', re.compile(r'\bEstate.*Silver\b|\bSilver.*Estate\b', re.IGNORECASE)),
('Silver', re.compile(r'\bSilver\b', re.IGNORECASE)),
('Blanco', re.compile(r'\bBlanco\b', re.IGNORECASE)),
# ('', re.compile(r'\b\b', re.IGNORECASE)),
],
'zTeq Padre Azul' : [
('Blanco', re.compile(r'\bsilver\b', re.IGNORECASE)),
],
'zWhiskey Balvenie' : [
('12 Double', re.compile(r'\bDouble.*12(YR)?\b', re.IGNORECASE)),
('12 Double', re.compile(r'\b12(YR)?\s.*Double', re.IGNORECASE)),
('12 First', re.compile(r'\b12(YR)?\s.*First', re.IGNORECASE)),
('12 USA', re.compile(r'\b12.*American|American.*12', re.IGNORECASE)),
('12 Toast', re.compile(r'\b12(YR)?\s.*Toast', re.IGNORECASE)),
('12', re.compile(r'\b12(YR)?\b', re.IGNORECASE)),
('14 Carib', re.compile(r'\b14(YR)?\s.*Carib', re.IGNORECASE)),
('14 Carib', re.compile(r'\b14(YR)?\s.*CB\s+Cask', re.IGNORECASE)),
('14 Carib', re.compile(r'\bCarr?ib', re.IGNORECASE)),
('14 Peat', re.compile(r'\b14(YR)?\s.*Peat', re.IGNORECASE)),
('15 Sherry', re.compile(r'\b15(YR)?\s.*Sherry\b', re.IGNORECASE)),
('15 Sherry', re.compile(r'\bSherry\s+.*15(YR)?\b', re.IGNORECASE)),
('15', re.compile(r'\b15(YR)?\b', re.IGNORECASE)),
('16 Triple', re.compile(r'\b16(YR)?\s.*Triple\b', re.IGNORECASE)),
('17 Sherry Double', re.compile
|
920cd41b18f5cfb45f46c44ed707cebe682d4dd9
|
Python
|
<|begin_of_text|># Software License Agreement (BSD License)
#
# Copyright (c) 2009-2011, Eucalyptus Systems, Inc.
# All rights reserved.
#
# Redistribution and use of this software in source and binary forms, with or
# without modification, are permitted provided that the following conditions
# are met:
#
# Redistributions of source code must retain the above
# copyright notice, this list of conditions and the
# following disclaimer.
#
# Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the
# following disclaimer in the documentation and/or other
# materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# Author: [email protected]
'''
@author: clarkmatthew
extension of the boto instance class, with added convenience methods + objects
Add common instance test routines to this class
Examples:
from eucaops import Eucaops
from nephoria.windows_instance import WinInstance
tester = Eucaops(credpath='eucarc-10.111.5.80-eucalyptus-sys_admin')
wins = WinInstance.make_euinstance_from_instance(tester.get_instances(idstring='i-89E13DA8')[0], tester=tester, keypair='test')
vol = tester.get_volume(status='available', zone=wins.placement)
wins.attach_volume(vol)
'''
import socket
import os
import re
import time
import copy
import types
import operator
from prettytable import PrettyTable, ALL
from boto.ec2.instance import Instance
from nephoria.aws.ec2.euvolume import EuVolume
from cloud_utils.log_utils import eulogger, get_line, markup
from nephoria.euca.taggedresource import TaggedResource
from boto.ec2.instance import InstanceState
from datetime import datetime
from cloud_utils.net_utils import winrm_connection
termline = get_line()
class WinInstanceDiskType():
gigabyte = 1073741824
megabyte = 1048576
def __init__(self, win_instance, wmic_dict):
self.check_dict_requires(wmic_dict)
self.__dict__ = self.convert_numbers_in_dict(copy.copy(wmic_dict))
self.win_instance = win_instance
self.size_in_gb = self.get_size_in_gb()
self.size_in_mb = self.get_size_in_mb()
self.size = long(self.size or 0)
self.last_updated = time.time()
self.setup()
def setup(self):
raise Exception('Not Implemented')
def check_dict_requires(self, wmic_dict):
raise Exception('Not Implemented')
def convert_numbers_in_dict(self, dict):
#convert strings representing numbers to ints
for key in dict:
value = str(dict[key])
if (re.search("\S", str(dict[key])) and not re.search("\D", str(dict[key]))):
dict[key] = long(dict[key])
return dict
def get_partition_ids(self):
retlist = []
for part in self.disk_partitions:
retlist.append(part.deviceid)
return retlist
def get_logicaldisk_ids(self):
retlist = []
for part in self.disk_partitions:
retlist.extend(part.get_logicaldisk_ids())
return retlist
def get_size_in_gb(self):
'''
Attempts to convert self.size from bytes to gigabytes as well as round up >.99 to account for a differences
in how the size is represented
'''
self.size = int(self.size or 0)
gigs = self.size / self.gigabyte
if (self.size % self.gigabyte) /float(self.gigabyte) >.99:
gigs += 1
return gigs
def get_size_in_mb(self):
'''
Attempts to convert self.size from bytes to gigabytes as well as round up >.99 to account for a differences
in how the size is represented
'''
self.size = int(self.size or 0)
mb = self.size / self.megabyte
if (self.size % self.megabyte) /float(self.megabyte) >.99:
mb += 1
return mb
def print_self(self):
self.get_summary(printmethod=self.win_instance.debug)
def get_summary(self, printheader=True, printmethod=None):
raise Exception('Method not implemented')
def print_self_full(self, printmethod=None):
'''
formats and prints self.dict
'''
self.win_instance.print_dict(dict=self.__dict__, printmethod=printmethod)
class WinInstanceDiskDrive(WinInstanceDiskType):
def setup(self):
if not hasattr(self,'serialnumber'):
self.serialnumber = ''
if not hasattr(self, 'caption'):
self.caption = ''
if hasattr(self,'model'):
self.caption = self.model
else:
self.model = self.caption
self.cygwin_scsi_drive = self.win_instance.get_cygwin_scsi_dev_for_windows_drive(windisk=self)
self.update_ebs_info()
self.disk_partitions = []
def check_dict_requires(self, wmic_dict):
if not ('deviceid' in wmic_dict and
'size' in wmic_dict and
('caption' in wmic_dict or'model in wmic_dict') and
'index' in wmic_dict):
raise Exception('wmic_dict passed does not contain needed attributes; deviceid, size, caption, and index')
def get_partition_ids(self):
retlist = []
for part in self.disk_partitions:
retlist.append(part.deviceid)
return retlist
def get_logicaldisk_ids(self):
retlist = []
for part in self.disk_partitions:
retlist.extend(part.get_logicaldisk_ids())
return retlist
def update_md5_info_from_ebs(self):
self.md5 = None
self.md5len = None
for vol in self.win_instance.attached_vols:
if vol.guestdev == self.deviceid:
if not vol.md5:
vol.md5len = 1024
vol.md5 = self.win_instance.get_dev_md5(self.cygwin_scsi_drive, vol.md5len)
self.md5 = vol.md5
self.md5len = vol.md5len
break
def update_ebs_info_from_serial_number(self):
'''
Attempts to parse the serial number field from an EBS volume and find the correlating ebs volume
example format: vol-81C13EA4-dev-sdg
'''
if re.match("^vol-", self.serialnumber):
split = self.serialnumber.split('-')
self.ebs_volume = str(split[0]) + "-" + str(split[1])
self.ebs_cloud_dev = "/" + str(split[2]) + "/" + str(split[3])
else:
self.ebs_volume = ''
self.ebs_cloud_dev = ''
def update_ebs_info(self):
self.update_ebs_info_from_serial_number()
if not self.ebs_volume:
if self.index == 0 and self.win_instance.root_device_type == 'ebs':
bdm = self.win_instance.block_device_mapping[self.win_instance.root_device_name]
self.ebs_volume = bdm.volume_id
else:
for vol in self.win_instance.attached_vols:
if vol.guestdev == self.deviceid:
self.ebs_volume = vol.id
break
if not self.ebs_cloud_dev and self.ebs_volume:
volume = self.win_instance.tester.get_volume(volume_id=self.ebs_volume)
if hasattr(volume,'attach_data') and volume.attach_data:
self.ebs_cloud_dev = volume.attach_data.device
self.update_md5_info_from_ebs()
def get_summary(self, printheader=True, printmethod=None):
buf = ""
deviceid = 20
size = 16
sizegb = 7
ebsvol = 12
serialnumber = 24
caption = 36
part_count = 6
logical_ids = 8
cygdrive = 10
md5 = 32
header = "DISKDRIVE DEV ID".center(deviceid) + "|" + \
"SIZE B".center(size) + "|" + \
"SIZE GB".center(sizegb) + "|" + \
"EBS VOL".center(ebsvol) + "|" + \
"CAPTION".center(caption) + "|" + \
"PARTS".center(part_count) + "|" + \
"LOGICAL".center(logical_ids) + "|" + \
"CYGDRIVE".center(cygdrive) + "|" + \
"SERIAL NUMBER".center(serialnumber) + "|" + \
"MD5 CHECK SUM".center(md5) + "|"
summary = str(self.deviceid).center(deviceid) + "|" + \
str(self.size).center(size) + "|" + \
str(self.size_in_gb).center(sizegb) + "|" + \
str(self.ebs_volume).center(ebsvol) + "|" + \
str(self.caption).center(caption) + "|" + \
str(self.partitions).center(part_count) + "|" + \
str(",".join(str(x) for x in self.get_logicaldisk_ids())).center(logical_ids) + "|" + \
str(self.cygwin_scsi_drive).center(cygdrive) + "|" + \
str(self.serialnumber).center(serialnumber) + "|" + \
str(self.md5).center(md5) + "|"
length = len(header)
if len(summary) > length:
length = len(summary)
line = get_line(length)
if printheader:
buf += line + header + line
buf += summary + line
if printmethod:
printmethod(buf)
return buf
class WinInstanceDiskPartition(WinInstanceDiskType):
def setup(self):
#self.cygwin_scsi_drive = self.win_instance.get_cygwin_scsi_dev_for_windows_drive(drive_id=self.deviceid)
self.logicaldisks = []
#Set values in case 'brief' was used when fetching partitions
if not hasattr(self,'deviceid'):
self.deviceid = self.name
if not hasattr(self,'bootable'):
self.bootable = self.bootpartition
if not hasattr(self,'diskindex'):
self.diskindex = self.get_disk_index_from_name()
def check_dict_requires(self, wmic_dict):
if not ('name' in wmic_dict and
'size' in wmic_dict and
'bootpartition' in wmic_dict and
'index' in wmic_dict):
raise Exception('wmic_dict passed does not contain needed attributes; deviceid, size, index and bootable')
def get_disk_index_from_name(self):
diskindex = None
diskindexstring = self.name.split(',')[0]
if re.search('disk', diskindexstring, re.IGNORECASE):
diskindex = int(diskindexstring.split('#')[1])
return diskindex
def get_logicaldisk_ids(self):
retlist = []
for disk in self.logicaldisks:
retlist.append(disk.deviceid)
return retlist
def get_summary(self, printheader=True, printmethod=None):
buf = ""
deviceid = 24
size = 16
sizegb = 12
sizemb = 12
bootable = 10
header = "PARTITION DEV ID".center(deviceid) + "|" + \
"SIZE B".center(size) + "|" + \
"SIZE GB".center(sizegb) + "|" + \
"SIZE MB".center(sizemb) + "|" + \
"BOOTABLE".center(bootable) + "|"
summary = str(self.deviceid).center(deviceid) + "|" + \
str(self.size).center(size) + "|" + \
str(self.size_in_gb).center(sizegb) + "|" + \
str(self.size_in_mb).center(sizemb) + "|" + \
str(self.bootable).center(bootable) + "|"
length = len(header)
if len(summary) > length:
length = len(summary)
line = get_line(length)
if printheader:
buf += line + header + line
buf += summary + line
if printmethod:
printmethod(buf)
return buf
class WinInstanceLogicalDisk(WinInstanceDiskType):
def setup(self):
self.cygwin_scsi_drive = self.win_instance.get_cygwin_scsi_dev_for_windows_drive(windisk=self)
self.partition = None
def check_dict_requires(self, wmic_dict):
if not ('deviceid' in wmic_dict and
'size' in wmic_dict and
'description' in wmic_dict and
'freespace' in wmic_dict and
'filesystem' in wmic_dict):
raise Exception('wmic_dict passed does not contain needed attributes; deviceid, size, and description')
def get_summary(self, printheader=True, printmethod=None):
buf = ""
deviceid = 24
size = 16
freespace = 16
filesystem = 24
description = 30
cygdrive = 10
header = "LOGICAL DEV ID".center(deviceid) + "|" + \
"SIZE".center(size) + "|" + \
"FREE SPACE".center(freespace) + "|" + \
"FILE SYSTEM".center(filesystem) + "|" + \
"DESCRIPTION".center(description) + "|" + \
"CYGDRIVE".center(cygdrive) + "|"
summary = str(self.deviceid).center(deviceid) + "|" + \
str(self.size).center(size) + "|" + \
str(self.freespace).center(freespace) + "|" + \
str(self.filesystem).center(filesystem) + "|" + \
str(self.description).center(description) + "|" + \
str(self.cygwin_scsi_drive).center(cygdrive) + "|"
length = len(header)
if len(summary) > length:
length = len(summary)
line = get_line(length)
if printheader:
buf += line + header + line
buf += summary + line
if printmethod:
printmethod(buf)
return buf
class WinInstance(Instance, TaggedResource):
gigabyte = 1073741824
megabyte = 1048576
@classmethod
def make_euinstance_from_instance(cls,
instance,
tester,
debugmethod = None,
keypair=None,
keypath=None,
password=None,
username="Administrator",
auto_connect = True,
verbose=True,
timeout=120,
private_addressing = False,
reservation = None,
cmdstart=None,
try_non_root_exec=True,
winrm_port='5985',
winrm_protocol='http',
rdp_port='3389',
rootfs_device = "sda",
block_device_prefix = "sd",
bdm_root_vol = None,
virtio_blk = True,
cygwin_path = None,
disk_update_interval=10,
retry=2,
brief=False
):
'''
Primary constructor for this class. Note: to avoid an ssh session within this method, provide keys, username/pass later.
Arguments:
instance - mandatory- a Boto instance object used to build this euinstance object
keypair - optional- a boto keypair object used for creating ssh connection to the instance
username - optional- string used to create ssh connection as an alternative to keypair
password - optional- string used to create ssh connection to this instance as an alternative to keypair
exec_password -optional -string used for su or sudo where prompted for password, will default to 'password'
auto_connect -optional -boolean, if True will attempt to automatically create an ssh session for this instance
try_non_root_exec -optional -boolean, if True will attempt to use sudo if available else su -c to execute privileged commands
timeout - optional- integer used for ssh connection timeout
debugmethod - optional - method, used for debug output
verbose - optional - boolean to determine if debug is to be printed using debug()
retry - optional - integer, ssh connection attempts for non-authentication failures
'''
newins = WinInstance(instance.connection)
newins.__dict__ = instance.__dict__
newins.tester = tester
newins.winrm_port = winrm_port
newins.rdp_port = rdp_port
newins.bdm_root_vol = None
newins.winrm_protocol = winrm_protocol
newins.debugmethod = debugmethod
if newins.debugmethod is None:
newins.log = eulogger.Eulogger(identifier= str(instance.id))
newins.debugmethod= newins.log.debug
if (keypair is not None):
if isinstance(keypair,types.StringTypes):
keyname = keypair
keypair = tester.get_keypair(keyname)
else:
keyname = keypair.name
newins.keypath = keypath or os.getcwd() + "/" + keyname + ".pem"
newins.keypair = keypair
newins.password = password
newins.username = username
newins.verbose = verbose
newins.attached_vols=[]
newins.timeout = timeout
newins.virtio_blk = virtio_blk
newins.disk_update_interval = disk_update_interval
newins.retry = retry
newins.brief = brief
newins.rootfs_device = rootfs_device
newins.block_device_prefix = block_device_prefix
newins.private_addressing = private_addressing
newins.reservation = reservation or newins.get_reservation()
if newins.reservation:
newins.security_groups = newins.tester.get_instance_security_groups(newins)
else:
newins.security_groups = None
newins.laststate = newins.state
newins.cmdstart = cmdstart
newins.auto_connect = auto_connect
newins.set_last_status()
newins.update_vm_type_info()
newins.cygwin_path = cygwin_path
newins.system_info = None
newins.diskdrives = []
newins.disk_partitions = []
newins.logicaldisks = []
newins.cygwin_dev_map = {}
#newins.set_block_device_prefix()
if newins.root_device_type == 'ebs':
try:
volume = newins.tester.get_volume(volume_id = newins.block_device_mapping.get(newins.root_device_name).volume_id)
newins.bdm_root_vol = EuVolume.make_euvol_from_vol(volume, tester=newins.tester,cmdstart=newins.cmdstart)
except:pass
newins.winrm = None
if newins.auto_connect and newins.state == 'running':
newins.connect_to_instance(timeout=timeout)
return newins
@property
def age(self):
launchtime = self.tester.get_datetime_from_resource_string(self.launch_time)
# return the elapsed time in seconds
return (time.mktime(datetime.utcnow().utctimetuple()) -
time.mktime(launchtime.utctimetuple()))
def update(self, validate=False, dry_run=False,
err_state='terminated', err_code=-1):
ret = None
tb = ""
retries = 2
for x in xrange(0, retries):
try:
#send with validation True, fail later...
ret = super(WinInstance, self).update(validate=True,
dry_run=dry_run)
break
except ValueError:
if validate:
raise
tb = self.tester.get_traceback()
self.debug('Failed to update instance. Attempt:{0}/{1}'
.format(x, retries))
if not ret:
failmsg = 'Failed to update instance. Instance may no longer'\
'be present on system"{0}"'.format(self.id)
self.debug('{0}\n{1}'.format(tb, failmsg))
self.debug('{0} setting fake state to:"{1}"'.format(self.id,
err_state))
state = InstanceState(name=err_state, code=err_code)
self._state = state
ret = self.state
self.set_last_status()
return ret
def update_vm_type_info(self):
self.vmtype_info = self.tester.get_vm_type_from_zone(self.placement,self.instance_type)
return self.vmtype_info
def set_last_status(self,status=None):
self.laststate = self.state
self.laststatetime = time.time()
self.age_at_state = self.tester.get_instance_time_launched(self)
#Also record age from user's perspective, ie when they issued the run instance request (if this is available)
if self.cmdstart:
self.age_from_run_cmd = "{0:.2f}".format(time.time() - self.cmdstart)
else:
self.age_from_run_cmd = None
def print_dict(self, dict=None, printmethod=None):
'''
formats and prints
'''
printmethod = printmethod or self.debug
buf = "\n"
dict = dict or self.__dict__
longest_key = 0
for key in dict:
if len(key) > longest_key:
longest_key = len(key)
for key in dict:
buf += str(key).ljust(longest_key) + " -----> :" + str(dict[key]) + "\n"
printmethod(buf)
def printself(self, title=True, footer=True, printmethod=None, printme=True):
def state_markup(state):
# Markup instance state...
if state == 'running':
return markup(state, markups=[1, 92])
if state == 'terminated':
return markup(state, markups=[1, 97])
if state =='shutting-down':
return markup(state, markups=[1, 95])
if state == 'pending':
return markup(state, markups=[1, 93])
if state =='stopped':
return markup(state, markups=[1, 91])
else:
return markup(state, markups=[1, 91])
def multi_line(lines):
# Utility method for creating multi line table entries...
buf = ""
maxlen = 0
for line in lines:
if len(line) + 2 > maxlen:
maxlen = len(line) + 2
for line in lines:
buf += str(line).ljust(maxlen) + "\n"
buf = buf.rstrip()
return (buf, maxlen)
bdmvol = self.root_device_type
if self.bdm_root_vol:
bdmvol += ":" + self.bdm_root_vol.id
reservation_id = None
if self.reservation:
reservation_id = self.reservation.id
owner_id = self.reservation.owner_id
else:
owner_id = "???"
# Create a multi line field for instance's run info
idlist = [markup("{0} {1}".format('ID:', self.id), markups=[1, 4, 94]),
"{0} {1}".format(markup('TYPE:'), self.instance_type),
"{0} {1}".format(markup('RES:'), reservation_id),
"{0}".format(markup("ACCOUNT ID:")), owner_id]
id_string, idlen = multi_line(idlist)
try:
emi = self.tester.get_emi(self.image_id)
emi_name = str(emi.name[0:18]) + ".."
except:
emi_name = ""
# Create a multi line field for the instance's image info
virt_type = 'PV'
if self.virtualization_type == 'hvm':
virt_type = 'HVM'
emi_string, emilen = multi_line(
[markup("{0} {1}".format('EMI:', self.image_id)),
"{0} {1}".format(markup('OS:'), self.platform or 'linux'),
"{0} {1}".format(markup('VIRT:'), virt_type),
"{0}".format(markup('IMAGE NAME:')),
emi_name])
# Create a multi line field for the instance's state info
if self.age:
age = int(self.age)
state_string, state_len = multi_line(["STATE: " + state_markup(self.laststate),
"{0} {1}".format(markup('AGE:'), age),
"{0} {1}".format(markup("ZONE:"), self.placement),
markup('ROOTDEV:'), bdmvol])
# Create the primary table called pt...
netinfo = 'INSTANCE NETWORK INFO:'
idheader = 'INSTANCE ID'
imageheader = 'INSTANCE IMAGE'
stateheader = 'INSTANCE STATE'
pt = PrettyTable([idheader, imageheader, stateheader, netinfo])
pt.align[netinfo] = 'l'
pt.valign[netinfo] ='m'
pt.align[idheader] = 'l'
pt.align[imageheader] = 'l'
pt.align[stateheader] = 'l'
pt.max_width[idheader] = idlen
pt.max_width[imageheader] = emilen
pt.max_width[stateheader] = state_len
pt.padding_width = 0
pt.hrules = ALL
# PrettyTable headers do not work with ascii markups, so make a sudo header
new_header = []
for field in pt._field_names:
new_header.append(markup(field, markups=[1, 4]))
pt.add_row(new_header)
pt.header = False
# Create a subtable 'netpt' to summarize and format the networking portion...
# Set the maxwidth of each column so the tables line up when showing multiple instances
vpc_col = ('VPC', 4)
subnet_col = ('SUBNET', 6)
if self.vpc_id:
vpc_col = ('VPC', 12)
subnet_col = ('SUBNET', 15)
secgrp_col = ('SEC GRPS', 11)
privaddr_col = ('P', 1)
privip_col = ('PRIV IP', 15)
pubip_col = ('PUB IP', 15)
net_cols = [vpc_col, subnet_col, secgrp_col, privaddr_col, privip_col, pubip_col]
# Get the Max width of the main tables network summary column...
# Start with 2 to account for beginning and end column borders
netinfo_width = 2
netinfo_header = []
for col in net_cols:
netinfo_width += col[1] + 1
netinfo_header.append(col[0])
pt.max_width[netinfo] = netinfo_width
netpt = PrettyTable([vpc_col[0], subnet_col[0], secgrp_col[0], privaddr_col[0],
privip_col[0], pubip_col[0]])
netpt.padding_width = 0
netpt.vrules = ALL
for col in net_cols:
netpt.max_width[col[0]] = col[1]
sec_grps = []
for grp in self.groups:
sec_grps.append(str(grp.id))
sec_grps = ",".join(sec_grps)
private_addressing = "N"
if self.private_addressing:
private_addressing = "Y"
netpt.add_row([str(self.vpc_id).center(vpc_col[1]),
str(self.subnet_id).center(subnet_col[1]),
str(sec_grps).center(secgrp_col[1]),
str(private_addressing).center(privaddr_col[1]),
str(self.private_ip_address).center(privip_col[1]),
str(self.ip_address).center(pubip_col[1])])
# To squeeze a potentially long keyname under the network summary table, get the length
# and format this column to allow for wrapping a keyname under the table...
# netbuf = netpt.get_string()
netbuf = "{0}:{1} {2}:{3}\n".format(markup("NODE"),
self.tags.get('euca:node', "???").ljust(16),
markup("KEYPAIR"), self.key_name)
netbuf += "\n".join(netpt.get_string().splitlines()[0:-1])
# Create the row in the main table...
pt.add_row([id_string, emi_string, state_string, netbuf])
if printme:
printmethod = printmethod or self.log.debug
printmethod("\n" + str(pt) + "\n")
return pt
def get_password(self,
private_key_path=None,
key=None,
dir=None,
exten=".pem",
encoded=True,
force_update=False):
'''
:param private_key_path: private key file used to decrypt password
:param key: name of private key
:param dir: Path to private key
:param exten: extension of private key
:param encoded: boolean of whether string returned from server is
Base64 encoded
:return: decrypted password
'''
if self.password is None or force_update:
self.password = self.tester.get_windows_instance_password(
self,
private_key_path=private_key_path,
key=key,
dir=dir,
exten=exten,
encoded=encoded)
return self.password
def reset_ssh_connection(self, timeout=None):
# todo: Remove ssh reference from this method, use something like
# reset_instance_connection, etc..
self.debug('Note ssh not implemented at this time, using winrm for '
'shell access instead...')
return self.reset_winrm_connection(timeout=timeout)
def reset_winrm_connection(self, timeout=None, force=False):
# todo:
timeout = timeout or self.timeout
self.debug('reset_winrm_connection for:'+str(self.id))
self.get_password(force_update=True)
if self.username is None or self.password is None:
#Allow but warn here as this may be a valid negative test
self.debug('Warning username and/or password were None in '
'winrm connnection?')
# Create a new winrm interface if this is a new instance or
# an attribute has changed...
try:
#Check the port in order to provide debug if the connection fails
self.test_port_status(port=self.winrm_port, ip=self.ip_address)
except:pass
if force or not (self.winrm and \
self.winrm.hostname == self.ip_address and \
self.winrm.username == self.username and \
self.winrm.password == self.password):
if self.winrm:
self.winrm.close_shell()
self.winrm = winrm_connection.Winrm_Connection(
hostname = self.ip_address,
username = self.username,
password = self.password,
port = self.winrm_port,
protocol = self.winrm_protocol,
debug_method = self.debug,
verbose=True
)
def get_reservation(self):
res = None
try:
res = self.tester.get_reservation_for_instance(self)
except Exception, e:
self.update()
self.debug('Could not get reservation for instance in state:' +
str(self.state) + ", err:" + str(e))
return res
def connect_to_instance(self, wait_for_boot=180, timeout=120):
'''
Attempts to connect to an instance via ssh.
:params wait_for_boot: time to wait, allowing guest to boot before
attempting to poll for ports active status
:params timeout: -optional - time in seconds to wait when polling
port(s) status(s) before failure
'''
self.debug("{0}connect_to_instance starting.\nwait_for_boot:{1} "
"seconds\ntimeout from boot:{2}{3}"
.format(termline, wait_for_boot, timeout, termline))
try:
self.poll_for_port_status_with_boot_delay(waitforboot=wait_for_boot,
timeout=timeout)
except Exception, e:
self.debug('Warning failed to poll port status:' + str(e))
self.debug("Attempting to create connection to instance:" + self.id)
attempts = 0
start = time.time()
elapsed = 0
if self.winrm is not None:
self.winrm.close_shell()
self.winrm = None
while (elapsed < timeout):
attempts += 1
try:
self.update()
self.reset_winrm_connection()
self.debug('Try some sys...')
self.sys("whoami")
except Exception, se:
tb = self.tester.get_traceback()
self.debug('Caught exception attempting to connect '
'winrm shell:\n'+ str(tb) + str(se))
elapsed = int(time.time()-start)
self.debug('connect_to_instance: Attempts:' + str(attempts) +
', elapsed:'+str(elapsed)+'/'+str(timeout))
if self.winrm is not None:
self.winrm.close_shell()
self.winrm = None
time.sleep(5)
pass
else:
break
elapsed = int(time.time()-start)
if self.winrm is None:
self.get_connection_debug()
raise RuntimeError(str(self.id) +
":Failed establishing management connection to "
"instance, elapsed:" + str(elapsed) +
"/" + str(timeout))
self.debug('Connect_to_instance updating attached volumes/disk '
'info for vols:'+ str(self.attached_vols))
if self.brief:
self.update_system_info()
else:
self.update_system_and_disk_info()
self.init_attached_volumes()
self.debug("{0}connect_to_instance completed{1}"
.format(termline, termline))
def get_connection_debug(self):
# Add network debug/diag info here...
# First show arp cache from local machine
# todo Consider getting info from relevant euca components:
# - iptables info
# - route info
# - instance xml
try:
# Show local ARP info...
arp_out = "\nLocal ARP cache for instance ip: " \
+ str(self.ip_address) + "\n"
arp_fd = os.popen('arp'+ str(self.ip_address))
for line in arp_fd:
arp_out += line
self.debug(arp_out)
except Exception as AE:
self.log.debug('Failed to get arp info:' + str(AE))
try:
self.tester.get_console_output(self)
except Exception as CE:
self.log.debug('Failed to get console output:' + str(CE))
def update_root_device_diskdrive(self):
if not self.root_device_type == 'ebs':
return
for disk in self.diskdrives:
if disk.index == 0:
if disk.ebs_volume:
for vol in self.attached_vols:
if vol.id == disk.ebs_volume:
if not disk.md5:
disk.update_md5_info_from_ebs()
return
volume = self.tester.get_volume(volume_id=disk.ebs_volume)
if not isinstance(volume, EuVolume):
volume = EuVolume.make_euvol_from_vol(volume, self.tester)
volume.guestdev = disk.deviceid
volume.md5len = 1024
volume.md5 = self.get_dev_md5(disk.cygwin_scsi_drive, volume.md5len)
if not self.get_volume_from_attached_list_by_id(volume.id):
self.debug("{0} updating with root vol:{1}{2}"
.format(termline,
volume.id,
termline))
self.attached_vols.append(volume)
disk.update_md5_info_from_ebs()
return
def get_volume_from_attached_list_by_id(self, volume_id):
for vol in self.attached_vols:
if vol.id == volume_id:
return vol
def update_system_and_disk_info(self):
try:
self.update_system_info()
except Exception, sie:
tb = self.tester.get_traceback()
self.debug(str(tb) + "\nError updating system info:" + str(sie))
try:
self.update_disk_info()
self.update_root_device_diskdrive()
self.print_partition_summary()
self.print_logicaldisk_summary()
self.print_diskdrive_summary()
except Exception, ude:
tb = self.tester.get_traceback()
self.debug(str(tb) + "\nError updating disk info:" + str(ude))
def has_sudo(self):
return False
def debug(self,msg,traceback=1,method=None,frame=False):
'''
Used to print debug, defaults to print() but over ridden by self.debugmethod if not None
msg - mandatory -string, message to be printed
'''
if ( self.verbose is True ):
self.debugmethod(msg)
def sys(self, cmd, verbose=True, code=None, include_stderr=False, enable_debug=False, timeout=None):
'''
Issues a command against the ssh connection to this instance
Returns a list of the lines from stdout+stderr as a result of the command
cmd - mandatory - string, the command to be executed
verbose - optional - boolean flag to enable debug
timeout - optional - command timeout in seconds
'''
if (self.winrm is None):
raise Exception("WinInstance winrm connection is None")
return self.winrm.sys(command=cmd, include_stderr=include_stderr, timeout=timeout, verbose=verbose, code=code)
def test_rdp_port_status(self, ip=None, port=3389, timeout=10):
'''
Description: Attempts to test that the host is accepting tcp connections to the RDP port
'''
ip = ip or self.ip_address
return self.test_port_status(ip=ip, port=port, timeout=timeout)
def test_port_status(self, port, ip=None, timeout=5, tcp=True, verbose=True):
ip = ip or self.ip_address
return self.tester.test_port_status(ip, int(port), timeout=timeout, tcp=tcp, verbose=verbose)
def poll_for_port_status_with_boot_delay(self, interval=15, ports=[], socktimeout=5,timeout=180, waitforboot=300):
'''
Make sure some time has passed before we test on the guest side before running guest test...
'''
launch_seconds = self.tester.get_instance_time_launched(self)
sleeptime = 0 if launch_seconds > waitforboot else (waitforboot - launch_seconds)
self.debug("Instance was launched "+str(launch_seconds)+" seconds ago, waiting:"+str(sleeptime)+" for instance to boot")
time.sleep(sleeptime)
return self.poll_for_ports_status(ports,
ip=self.ip_address,
interval=interval,
socktimeout=socktimeout,
timeout=timeout)
def wait_for_time_since_launch(self,waitforboot=420):
'''
When using larger instance store images, this can allow for the delays caused by image size/transfer.
'''
boot_seconds = self.tester.get_instance_time_launched(self)
sleeptime = 0 if boot_seconds > waitforboot else (waitforboot - boot_seconds)
self.debug("Instance was launched "+str(boot_seconds)+"/"+str(waitforboot) + " seconds ago, waiting:"+str(sleeptime)+" for instance to boot")
start = time.time()
elapsed = 0
print "Waiting for Windows to fully boot:",
while elapsed < sleeptime:
print "Waiting for Windows to fully boot:"+str(sleeptime-elapsed),
time.sleep(5)
elapsed=int(time.time()-start)
self.debug("test_wait_for_instance_boot: done waiting, instance up for "+str(waitforboot)+" seconds")
def poll_for_ports_status(self, ports=[], ip=None, interval=10, socktimeout=5, timeout=180):
ip = ip or self.ip_address
ports = ports or [self.rdp_port, self.winrm_port]
start = time.time()
elapsed = 0
attempt = 0
while elapsed < timeout:
attempt +=1
self.debug('test_poll_for_ports_status, ports:'+ ",".join(str(x) for x in ports) + ", attempt:" + str(attempt))
for port in ports:
if elapsed < timeout:
try:
self.debug('Trying ip:port:' + str(self.ip_address) + ':' + str(port) + ", elapsed:" + str(elapsed))
self.test_port_status(ip=ip, port=int(port), timeout=5)
return
except socket.error, se:
self.debug('test_ports_status failed socket error:'+str(se[0]))
#handle specific errors here, for now just for debug...
ecode=se[0]
if ecode == socket.errno.ETIMEDOUT or ecode == "timed out":
self.debug("test_poll_for_ports_status: Connect "+str(ip)+":" +str(port)+ " timed out retrying. Time remaining("+str(timeout-elapsed)+")")
except Exception, e:
tb = self.tester.get_traceback()
self.debug(tb)
self.debug('test_poll_for_ports_status:'+str(ip)+':'+str(port)+' FAILED after attempts:'+str(attempt)+', elapsed:'+str(elapsed)+', err:'+str(e) )
elapsed = int(time.time() -start)
if elapsed < timeout:
time.sleep(interval)
raise Exception('test_poll_for_ports_status:'+str(ip)+':'+str(port)+' FAILED after attempts:'+str(attempt)+', elapsed:'+str(elapsed)+' seconds')
def init_attached_volumes(self):
self.debug('init_attahced_volumes... attached_vols:'+ str(self.attached_vols))
syncdict = self.sync_attached_volumes_with_clouds_view()
if syncdict['errors']:
errmsg = 'Errors syncing guest volumes with cloud at init:' + ",".join(str(e) for e in syncdict['errors'])
errmsg += 'Failed to sync guest volumes with cloud at init:' + ",".join(str(x) for x in syncdict['badvols'])
self.debug(errmsg)
time.sleep(60)
raise Exception(errmsg)
def sync_attached_volumes_with_clouds_view(self):
self.debug(termline +
"Starting sync_attached_volumes_with_clouds_view"
+ termline )
badvols = []
errors = []
ret = {'errors':errors, 'badvols':badvols}
#Get a list of volumes that the cloud believes are currently attached
cloud_volumes = self.tester.get_volumes(attached_instance=self.id)
#Make a copy of a list of volumes this instance thinks are currenlty attached
locallist = copy.copy(self.attached_vols)
self.debug('Cloud list:' + str(cloud_volumes))
self.debug('Local list:' + str(locallist))
for vol in cloud_volumes:
for local_vol in locallist:
if local_vol.id == vol.id:
locallist.remove(local_vol)
if not isinstance(vol, EuVolume):
vol = EuVolume.make_euvol_from_vol(vol, self.tester)
try:
self.update_volume_guest_info(volume=vol)
except Exception, e:
badvols.append(vol)
errors.append(vol.id +'Error syncing with cloud:' + str (e) + '. \n')
for local_vol in locallist:
badvols.append(local_vol)
errors.append(local_vol.id +'Error unattached volume found in guests attach list. \n')
self.debug(termline +
"Finishing sync_attached_volumes_with_clouds_view"
+ termline )
return ret
def update_system_info(self):
'''
Gather basic system info for this windows instance object and store in self.system_info
Example:
# print wins.system_info.OS_NAME
'Microsoft Windows 7 Professional'
'''
currentkey = None
swap = re.compile('([!@#$%^&*. ])')
info = self.sys('systeminfo')
if self.system_info:
system_info = self.system_info
else:
system_info = type('obj', (object,),{})
if info:
for line in info:
if re.match("^\w.+:", line):
linevals = line.split(':')
currentkey = linevals.pop(0)
#clean up the key string...
currentkey = re.sub('[()]', '', currentkey)
currentkey = re.sub(swap, '_', currentkey)
currentkey = currentkey.lower()
value = ":".join(str(x) for x in linevals) or ""
setattr(system_info, currentkey, str(value).strip())
elif currentkey:
#this is an additional value to our previous key
prev_value = getattr(system_info, currentkey)
if not isinstance(prev_value, types.ListType):
updated_value = [prev_value]
updated_value.append(str(line).strip())
setattr(system_info, currentkey, updated_value)
self.system_info = system_info
def get_cygwin_path(self, prefix="c:\\"):
if self.cygwin_path:
return self.cygwin_path
path = None
self.debug('Trying to find cygwin path...')
out = self.sys('dir'+
|
33867677611ceb757f6973eb70368c9f75f3ce92
|
Python
|
<|begin_of_text|># system
import os
import numpy as np
import random
import copy
import time
# ROS
import rospy
import std_msgs.msg
import sensor_msgs.msg
import geometry_msgs.msg
import visualization_msgs.msg
import tf2_ros
import rosbag
import actionlib
from actionlib_msgs.msg import GoalStatus
import ros_numpy
# spartan ROS
import spartan_grasp_msgs.msg
import spartan_grasp_msgs.srv
import pdc_ros_msgs.msg
import fusion_server.msg
import fusion_server.srv
# spartan
import spartan.utils.utils as spartanUtils
import spartan.utils.ros_utils as rosUtils
import spartan.utils.director_utils as director_utils
import spartan.utils.control_utils as control_utils
from spartan.manipulation.schunk_driver import SchunkDriver
import fusion_server
from fusion_server.srv import *
import spartan.manipulation.gripper
from spartan.poser.poser_visualizer import PoserVisualizer
from spartan.manipulation.grasp_data import GraspData
from spartan.manipulation.object_manipulation import ObjectManipulation
from spartan.manipulation.category_manipulation_type import CategoryManipulationType
from spartan.utils.director_ros_visualizer import DirectorROSVisualizer
# director
from director import transformUtils
from director import visualization as vis
import director.objectmodel as om
import director.vtkNumpy as vnp
from director.debugVis import DebugData
import director.vtkAll as vtk
import director.segmentation as segmentation
import director.filterUtils as filterUtils
USING_DIRECTOR = True
if USING_DIRECTOR:
from spartan.utils.taskrunner import TaskRunner
MUG_RACK_CONFIG_FILE = os.path.join(spartanUtils.getSpartanSourceDir(), "src/catkin_projects/station_config/RLG_iiwa_1/manipulation/mug_rack.yaml")
# IF true limits you to this speed
DEBUG_SPEED = 20 # degrees per second
USE_DEBUG_SPEED = False
MANIP_TYPE = CategoryManipulationType.SHOE_ON_RACK
# MANIP_TYPE = CategoryManipulationType.MUG_ON_SHELF_3D
EXPERIMENT_MODE = True
class GraspSupervisorState(object):
STATUS_LIST = ["ABOVE_TABLE", "PRE_GRASP", "GRASP", "IK_FAILED", "NO_GRASP_FOUND", "GRASP_FOUND", "OBJECT_IN_GRIPPER", "GRASP_FAILED", "SAFETY_CHECK_FAILED", "PLANNING_FAILED", "FAILED"]
def __init__(self):
self.setPickFront()
self.clear()
def setPickFront(self):
self.graspingLocation = "front"
self.stowLocation = "left"
def setPickLeft(self):
self.graspingLocation = "left"
self.stowLocation = "front"
@property
def grasp_data(self):
return self._grasp_data
@grasp_data.setter
def grasp_data(self, value):
"""
:param value: GraspData
:return:
"""
self._grasp_data = value
@property
def cache(self):
return self._cache
def clear(self):
"""
Clear any stateful elements of the state
:return:
"""
self._grasp_data = None
self._status = None
self._cache = dict()
self._trajectory_result = None
def clear_cache(self):
"""
Clears only the cache
:return:
"""
self._cache = dict()
def set_status(self, status):
assert status in GraspSupervisorState.STATUS_LIST
self._status = status
@property
def status(self):
return self._status
@status.setter
def status(self, status):
assert status in GraspSupervisorState.STATUS_LIST
self._status = status
def set_status_ik_failed(self):
self.status = "IK_FAILED"
def print_status(self):
"""
Prints the status
:return:
"""
if self._status is None:
print "Current Status: None"
else:
print "Current Status: " + self._status
class GraspSupervisor(object):
def __init__(self, graspingParamsFile=None, cameraSerialNumber="carmine_1", tfBuffer=None):
self.graspingParamsFile = graspingParamsFile
self.reloadParams()
self.cameraSerialNumber = cameraSerialNumber
self.cameraName = 'camera_' + str(cameraSerialNumber)
self.pointCloudTopic = '/' + str(self.cameraName) + '/depth/points'
self.rgbImageTopic = '/' + str(self.cameraName) + '/rgb/image_rect_color'
self.depthImageTopic = '/' + str(self.cameraName) + '/depth_registered/sw_registered/image_rect'
self.camera_info_topic = '/' + str(self.cameraName) + '/rgb/camera_info'
self.graspFrameName = 'base'
self.ggcnn_grasp_frame_camera_axes_id = "ggcnn_grasp"
self.depthOpticalFrameName = self.cameraName + "_depth_optical_frame"
self.rgbOpticalFrameName = self.cameraName + "_rgb_optical_frame"
self.state = GraspSupervisorState()
self.robotService = rosUtils.RobotService.makeKukaRobotService()
self.robotService._use_debug_speed = USE_DEBUG_SPEED
self.robotService._debug_speed = DEBUG_SPEED
self.usingDirector = True
self.tfBuffer = tfBuffer # don't create a new one if it is passed in
self.setupConfig()
self._grasp_point = None # stores the grasp point to be used in grasp3DLocation
self._cache = dict()
self._gripper = spartan.manipulation.gripper.Gripper.make_schunk_gripper()
self._poser_visualizer = PoserVisualizer.make_default()
self.poser_result = None
self._object_manipulation = None
self._category_manip = None # can be assigned later as needed
self._shoe_manipulation_counter = 0
filename = os.path.join(os.path.join(spartanUtils.getSpartanSourceDir(),'src/catkin_projects/station_config/RLG_iiwa_1/stored_poses.yaml'))
self._stored_poses_director = spartanUtils.getDictFromYamlFilename(filename)
if USING_DIRECTOR:
self.taskRunner = TaskRunner()
self.taskRunner.callOnThread(self.setup)
else:
self.setup()
self.debugMode = False
if self.debugMode:
print "\n\n----------WARNING GRASP SUPERVISOR IN DEBUG MODE----------\n"
# if self.debugMode:
# self.pointCloudListMsg = GraspSupervisor.getDefaultPointCloudListMsg()
def reloadParams(self):
self.graspingParams = spartanUtils.getDictFromYamlFilename(self.graspingParamsFile)
def setup(self):
self.setupSubscribers()
self.setupPublishers()
self.setupTF()
self.setupROSActions()
self.gripperDriver = SchunkDriver()
self.setup_visualization()
def _clear_cache(self):
"""
Clears our local cache of variables
:return:
"""
self._cache = dict()
def setupDirector(self):
self.taskRunner.callOnThread(self.setup)
def setupConfig(self):
self.config = dict()
self.config['base_frame_id'] = "base"
self.config['end_effector_frame_id'] = "iiwa_link_ee"
self.config['pick_up_distance'] = 0.25 # distance to move above the table after grabbing the object
self.config["sleep_time_for_sensor_collect"] = 0.1
self.config['scan'] = dict()
self.config['scan']['pose_list'] = ['scan_left_close','scan_above_table','scan_right']
self.config['scan']['joint_speed'] = 45
self.config['grasp_speed'] = 20
normal_speed = 30
self.config['speed'] = dict()
self.config['speed']['stow'] = normal_speed
self.config['speed']['pre_grasp'] = normal_speed
self.config['speed']['grasp'] = 10
self.config['home_pose_name'] = 'above_table_pre_grasp'
self.config['grasp_nominal_direction'] = np.array([1, 0, 0]) # x forwards
self.config['grasp_to_ee'] = dict()
self.config["object_interaction"] = dict()
self.config["object_interaction"]["speed"] = 10
self.config["object_interaction"]["rotate_speed"] = 30
self.config["object_interaction"]["pickup_distance"] = 0.15
# self.config["object_interaction"]["drop_distance_above_grasp"] = 0.035 # good for shoes
self.config["object_interaction"]["drop_distance_above_grasp"] = 0.002 # good for mugs
self.config["object_interaction"]["drop_location"] = [0.65, 0, 0.5] # z coordinate is overwritten later
self.graspToIiwaLinkEE = spartanUtils.transformFromPose(
self.graspingParams['gripper_palm_to_ee'])
self.iiwaLinkEEToGraspFrame = self.graspToIiwaLinkEE.GetLinearInverse()
self.gripper_fingertip_to_iiwa_link_ee = spartanUtils.transformFromPose(
self.graspingParams['gripper_fingertip_to_ee'])
self.T_gripper_fingertip__iiwa_link_ee = self.gripper_fingertip_to_iiwa_link_ee.GetLinearInverse()
pos = [-0.15, 0, 0]
quat = [1, 0, 0, 0]
self.preGraspToGraspTransform = transformUtils.transformFromPose(pos, quat)
def setupSubscribers(self):
self.pointCloudSubscriber = rosUtils.SimpleSubscriber(self.pointCloudTopic, sensor_msgs.msg.PointCloud2)
self.rgbImageSubscriber = rosUtils.SimpleSubscriber(self.rgbImageTopic, sensor_msgs.msg.Image)
self.depthImageSubscriber = rosUtils.SimpleSubscriber(self.depthImageTopic, sensor_msgs.msg.Image)
self.camera_info_subscriber = rosUtils.SimpleSubscriber(self.camera_info_topic, sensor_msgs.msg.CameraInfo)
self.pointCloudSubscriber.start()
self.rgbImageSubscriber.start()
self.depthImageSubscriber.start()
self.camera_info_subscriber.start()
self.clicked_point_subscriber = rosUtils.SimpleSubscriber("/clicked_point", geometry_msgs.msg.PointStamped,
self.on_clicked_point)
self.clicked_point_subscriber.start()
self.ggcnn_subscriber = rosUtils.SimpleSubscriber('ggcnn/out/command', std_msgs.msg.Float32MultiArray)
def setupPublishers(self):
"""
Sets up some ROS publishers
"""
self.rviz_marker_publisher = rospy.Publisher("/spartan_grasp/visualization_marker",
visualization_msgs.msg.Marker, queue_size=1)
self.rviz_marker_array_publisher = rospy.Publisher("/grasp_supervisor/visualization_marker_array",
visualization_msgs.msg.MarkerArray, queue_size=1)
self.grasp_pointcloud_publisher = rospy.Publisher("/grasp_supervisor/points", sensor_msgs.msg.PointCloud2,
queue_size=1)
def setup_visualization(self):
self._vis_container = om.getOrCreateContainer("grasp supervisor")
def on_clicked_point(self, clicked_point_msg):
"""
Visualizes the clicked point in rviz
"""
print "received a /clicked_point message... visualizing"
pos = clicked_point_msg.point
x, y, z = pos.x, pos.y, pos.z
marker = visualization_msgs.msg.Marker()
marker.header.frame_id = "base"
marker.header.stamp = rospy.Time.now()
marker.ns = "clicked_point"
marker.id = 0
marker.type = visualization_msgs.msg.Marker.SPHERE
marker.action = visualization_msgs.msg.Marker.ADD
marker.pose.position.x = x
marker.pose.position.y = y
marker.pose.position.z = z
marker.pose.orientation.x = 0.0
marker.pose.orientation.y = 0.0
marker.pose.orientation.z = 0.0
marker.pose.orientation.w = 1.0
marker.scale.x = 0.03
marker.scale.y = 0.03
marker.scale.z = 0.03
marker.color.a = 1.0
marker.color.r = 1.0
marker.color.g = 0.0
marker.color.b = 0.0
# hack to get around director funny business
for i in xrange(0, 5):
self.rviz_marker_publisher.publish(marker)
rospy.sleep(0.02)
def get_clicked_point(self):
"""
Returns the stored clicked point. If there is none it raises and error
rtype: geometry_msgs.Point
"""
lastMsg = self.clicked_point_subscriber.lastMsg
if lastMsg is None:
raise ValueError("No /clicked_point messages found.")
return lastMsg.point
def setupROSActions(self):
actionName = '/spartan_grasp/GenerateGraspsFromPointCloudList'
self.generate_grasps_client = actionlib.SimpleActionClient(actionName,
spartan_grasp_msgs.msg.GenerateGraspsFromPointCloudListAction)
actionName = '/spartan_grasp/Grasp3DLocation'
self.grasp_3D_location_client = actionlib.SimpleActionClient(actionName,
spartan_grasp_msgs.msg.Grasp3DLocationAction)
findBestBatchActionName = '/FindBestMatch'
self.find_best_match_client = actionlib.SimpleActionClient(findBestBatchActionName,
pdc_ros_msgs.msg.FindBestMatchAction)
poser_action_name = '/Poser'
self.poser_client = actionlib.SimpleActionClient(poser_action_name,
pdc_ros_msgs.msg.DeformableRegistrationAction)
category_manipulation_name = "/CategoryManipulation"
self.category_manip_client = actionlib.SimpleActionClient(category_manipulation_name, pdc_ros_msgs.msg.CategoryManipulationAction)
action_name = "/KeypointDetection"
self.keypoint_detection_client = actionlib.SimpleActionClient(action_name, pdc_ros_msgs.msg.KeypointDetectionAction)
action_name = "/PoseEstimation"
self.pose_estimation_client = actionlib.SimpleActionClient(action_name,
pdc_ros_msgs.msg.EstimatePoseAction)
action_name = "/SaveRGBD"
self.save_RGBD_client = actionlib.SimpleActionClient(action_name,
pdc_ros_msgs.msg.KeypointDetectionAction)
def setupTF(self):
if self.tfBuffer is None:
self.tfBuffer = tf2_ros.Buffer()
self.tfListener = tf2_ros.TransformListener(self.tfBuffer)
self.tfBroadcaster = tf2_ros.TransformBroadcaster()
def getDepthOpticalFrameToWorldTransform(self):
depth_optical_frame_to_world = self.tfBuffer.lookup_transform("base", self.depthOpticalFrameName,
rospy.Time(0))
return depth_optical_frame_to_world
def get_transform(self, from_name, to_name, ros_time=None):
if ros_time is None:
ros_time = rospy.Time(0)
transform_stamped_msg = self.tfBuffer.lookup_transform(to_name, from_name, ros_time)
# convert to vtkTransform
pos, quat = rosUtils.poseFromROSTransformMsg(transform_stamped_msg.transform)
return pos, quat
def getRgbOpticalFrameToWorldTransform(self, time=None):
"""
:param time:
:type time:
:return: geometry_msgs/TransformStamped
:rtype:
"""
if time is None:
time = rospy.Time(0)
rgb_optical_frame_to_world = self.tfBuffer.lookup_transform("base", self.rgbOpticalFrameName,
time)
return rgb_optical_frame_to_world
def capturePointCloudAndCameraTransform(self, cameraOrigin=[0, 0, 0]):
"""
Captures the current PointCloud2 from the sensor. Also records the pose of camera frame.
"""
# sleep to transforms can update
msg = spartan_grasp_msgs.msg.PointCloudWithTransform()
msg.header.stamp = rospy.Time.now()
msg.camera_origin.x = cameraOrigin[0]
msg.camera_origin.y = cameraOrigin[1]
msg.camera_origin.z = cameraOrigin[2]
msg.point_cloud_to_base_transform = self.getDepthOpticalFrameToWorldTransform()
msg.point_cloud = self.pointCloudSubscriber.waitForNextMessage()
self.testData = msg # for debugging
return msg
def captureRgbdAndCameraTransform(self, cameraOrigin=[0, 0, 0]):
# sleep to transforms can update
msg = pdc_ros_msgs.msg.RGBDWithPose()
msg.header.stamp = rospy.Time.now()
msg.camera_pose = self.getRgbOpticalFrameToWorldTransform()
msg.rgb_image = self.rgbImageSubscriber.waitForNextMessage()
msg.depth_image = self.depthImageSubscriber.waitForNextMessage()
# maybe be careful about rostime here
msg.point_cloud = self.pointCloudSubscriber.waitForNextMessage()
msg.point_cloud_pose = self.getDepthOpticalFrameToWorldTransform()
return msg
def moveHome(self, speed=None):
rospy.loginfo("moving home")
if speed is None:
speed = self.graspingParams['speed']['nominal']
homePose = self.graspingParams[self.state.graspingLocation]['poses']['scan_above_table']
self.robotService.moveToJointPosition(homePose,
maxJointDegreesPerSecond=speed)
def getStowPose(self):
stow_location = self.state.stowLocation
params = self.graspingParams[stow_location]
return params['poses']['stow']
# scans to several positions
def collectSensorData(self, saveToBagFile=False, **kwargs):
"""
Collects PointCloud Messages, also RGB and Depth images.
Writes the result to two class variables
- self.pointCloudListMsg
- self.listOfRgbdWithPose
also returns these two values
"""
self.moveHome()
rospy.loginfo("collecting sensor data")
graspLocationData = self.graspingParams[self.state.graspingLocation]
pointCloudListMsg = spartan_grasp_msgs.msg.PointCloudList()
pointCloudListMsg.header.stamp = rospy.Time.now()
data = dict()
pose_list = graspLocationData['scan_pose_list']
listOfRgbdWithPoseMsg = []
for poseName in pose_list:
rospy.loginfo("moving to pose = " + poseName)
joint_positions = graspLocationData['poses'][poseName]
self.robotService.moveToJointPosition(joint_positions,
maxJointDegreesPerSecond=self.config['scan']['joint_speed'])
rospy.sleep(self.config["sleep_time_for_sensor_collect"])
pointCloudWithTransformMsg = self.capturePointCloudAndCameraTransform()
pointCloudListMsg.point_cloud_list.append(pointCloudWithTransformMsg)
data[poseName] = pointCloudWithTransformMsg
rgbdWithPoseMsg = self.captureRgbdAndCameraTransform()
listOfRgbdWithPoseMsg.append(rgbdWithPoseMsg)
self.sensorData = data
self.pointCloudListMsg = pointCloudListMsg
self.listOfRgbdWithPoseMsg = listOfRgbdWithPoseMsg
if saveToBagFile:
self.saveSensorDataToBagFile(pointCloudListMsg=pointCloudListMsg, **kwargs)
return pointCloudListMsg, listOfRgbdWithPoseMsg
def findBestBatch(self):
"""
This function will:
- collect a small handful of RGBDWithPose msgs
- call the FindBestMatch service (a service of pdc-ros)
- return what was found from FindBestMatch
"""
self.moveHome()
_, listOfRgbdWithPoseMsg = self.collectSensorData()
self.list_rgbd_with_pose_msg = listOfRgbdWithPoseMsg
# request via a ROS Action
rospy.loginfo("waiting for find best match server")
self.find_best_match_client.wait_for_server()
goal = pdc_ros_msgs.msg.FindBestMatchGoal()
goal.rgbd_with_pose_list = listOfRgbdWithPoseMsg
goal.camera_info = self.camera_info_subscriber.waitForNextMessage()
rospy.loginfo("requesting best match from server")
self.find_best_match_client.send_goal(goal)
self.moveHome()
rospy.loginfo("waiting for find best match result")
self.find_best_match_client.wait_for_result()
result = self.find_best_match_client.get_result()
rospy.loginfo("received best match result")
self.best_match_result = result
if result.match_found:
print "match found"
print "location:", result.best_match_location
else:
print "NO MATCH FOUND"
return result
def run_poser(self):
"""
This function will:
- collect a small handful of RGBDWithPose msgs
- call the FindBestMatch service (a service of pdc-ros)
- return what was found from FindBestMatch
"""
# self.moveHome()
rgbdWithPoseMsg = self.captureRgbdAndCameraTransform()
listOfRgbdWithPoseMsg = [rgbdWithPoseMsg]
self.list_rgbd_with_pose_msg = listOfRgbdWithPoseMsg
# request via a ROS Action
rospy.loginfo("waiting for poser server")
self.poser_client.wait_for_server()
rospy.loginfo("connected to poser server")
goal = pdc_ros_msgs.msg.DeformableRegistrationGoal()
goal.rgbd_with_pose_list = listOfRgbdWithPoseMsg
goal.camera_info = self.camera_info_subscriber.waitForNextMessage()
rospy.loginfo("requesting registration from poser")
self.poser_client.send_goal(goal)
self.moveHome()
rospy.loginfo("waiting for poser result")
self.poser_client.wait_for_result()
result = self.poser_client.get_result()
state = self.poser_client.get_state()
rospy.loginfo("received poser result")
print("result:\n", result)
succeeded = (state == GoalStatus.SUCCEEDED)
if not succeeded:
rospy.loginfo("Poser failed")
self.poser_result = result
self._cache['poser_result'] = result
result_dict = dict()
result_dict['result'] = result
result_dict['output_dir'] = result.output_dir
result_dict['state'] = state
result_dict['succeeded'] = succeeded
result_dict['type'] = "mankey"
self._cache["keypoint_detection_result"] = result_dict
self.taskRunner.callOnMain(self.visualize_poser_result)
def run_keypoint_detection(self, wait_for_result=True, move_to_stored_pose=True, clear_state=True):
"""
Runs keypoint detection using ManKey in pdc-ros. Note that this clears the cache
:return:
:rtype:
"""
if clear_state:
self._clear_cache()
self.state.clear()
if move_to_stored_pose:
CMT = CategoryManipulationType
q = self._stored_poses_director["General"]["home"] # for mugs
if MANIP_TYPE in [CMT.SHOE_ON_RACK, CMT.SHOE_ON_TABLE]:
q = self._stored_poses_director['General']['center_back']
else: # basically all mugs
q = self._stored_poses_director["General"]["home"]
self.robotService.moveToJointPosition(q,
maxJointDegreesPerSecond=self.graspingParams['speed']['fast'])
rgbdWithPoseMsg = self.captureRgbdAndCameraTransform()
self.state.cache['rgbd_with_pose_list'] = []
self.state.cache['rgbd_with_pose_list'].append(rgbdWithPoseMsg)
# request via a ROS Action
rospy.loginfo("waiting for KeypointDetection server")
self.keypoint_detection_client.wait_for_server()
rospy.loginfo("connected to KeypointDetection server")
goal = pdc_ros_msgs.msg.KeypointDetectionGoal()
goal.rgbd_with_pose_list = self.state.cache['rgbd_with_pose_list']
goal.camera_info = self.camera_info_subscriber.waitForNextMessage()
if EXPERIMENT_MODE:
goal.output_dir = "mankey_experiments/%s" %(spartanUtils.get_current_YYYY_MM_DD_hh_mm_ss())
rospy.loginfo("requesting action from KeypointDetection server")
self.keypoint_detection_client.send_goal(goal)
self.state.set_status("ABOVE_TABLE")
if wait_for_result:
self.wait_for_keypoint_detection_result()
def wait_for_keypoint_detection_result(self):
"""
Wait for keypont detection result, save it to cache
"""
rospy.loginfo("waiting for KeypointDetection result")
self.keypoint_detection_client.wait_for_result()
result = self.keypoint_detection_client.get_result()
state = self.keypoint_detection_client.get_state()
rospy.loginfo("received KeypointDetection result")
print "result:\n", result
self.keypoint_detection_result = result
succeeded = (state == GoalStatus.SUCCEEDED)
if not succeeded:
rospy.loginfo("KeypointDetection failed")
result_dict = dict()
result_dict['result'] = result
result_dict['output_dir'] = result.output_dir
result_dict['state'] = state
result_dict['succeeded'] = succeeded
result_dict['type'] = "mankey"
self._cache["keypoint_detection_result"] = result_dict
self.state._cache["keypoint_detection_result"] = result_dict
return result_dict
def check_keypoint_detection_succeeded(self):
"""
Checks whether keypoint detection succeeded or not
:return:
:rtype:
"""
# you should have run keypoint detection before this
keypoint_detection_result = self.state.cache['keypoint_detection_result']
if keypoint_detection_result["state"] == GoalStatus.SUCCEEDED:
return True
else:
print("keypoint detection failed, ABORTING")
return False
def check_category_goal_estimation_succeeded(self):
"""
Returns a bool as to whether category goal estimation succeeded or not
:return:
:rtype:
"""
state = self.state.cache['category_manipulation_goal']['state']
if state == GoalStatus.SUCCEEDED:
return True
else:
print("category goal estimation failed, ABORTING")
return False
def estimate_mug_rack_pose(self):
"""
:return:
:rtype:
"""
# fusion_params_file = os.path.join(spartanUtils.getSpartanSourceDir(), "src/catkin_projects/station_config/RLG_iiwa_1/fusion/fusion_params.yaml")
#
#
# fusion_params = spartanUtils.getDictFromYamlFilename(fusion_params_file)
# bbox_min = np.array(fusion_params['left']['bbox_min'])
# bbox_min[2] += 0.05 # be conservative on where bottom of table is
# bbox_max = np.array(fusion_params['left']['bbox_max'])
bbox_min = np.array([0.07001, 0.49, 0.01026])
bbox_max = np.array([0.47195, 0.85201, 0.75])
rgbd_with_pose_list = []
# move to pose 1, capture RGBD
q = self._stored_poses_director["left_table"]["look_at_rack"]
speed = self.graspingParams["speed"]["fast"]
self.robotService.moveToJointPosition(q, maxJointDegreesPerSecond=speed)
rgbd_with_pose = self.captureRgbdAndCameraTransform()
rgbd_with_pose_list.append(rgbd_with_pose)
# move to pose 2, capture RGBD
q = self._stored_poses_director["left_table"]["look_at_rack_2"]
speed = self.graspingParams["speed"]["fast"]
self.robotService.moveToJointPosition(q, maxJointDegreesPerSecond=speed)
rgbd_with_pose = self.captureRgbdAndCameraTransform()
rgbd_with_pose_list.append(rgbd_with_pose)
# convert to VTK poly data and crop
d = DebugData()
for msg in rgbd_with_pose_list:
pointcloud_numpy = DirectorROSVisualizer.numpy_from_pointcloud2_msg(msg.point_cloud)
pointcloud_vtk = vnp.getVtkPolyDataFromNumpyPoints(pointcloud_numpy)
T_world_pointcloud = ros_numpy.numpify(msg.point_cloud_pose.transform)
T_world_pointcloud_vtk = transformUtils.getTransformFromNumpy(T_world_pointcloud)
pointcloud_vtk = filterUtils.transformPolyData(pointcloud_vtk, T_world_pointcloud_vtk)
d.addPolyData(pointcloud_vtk)
pointcloud = d.getPolyData()
print "pointcloud.GetNumberOfPoints()", pointcloud.GetNumberOfPoints()
# crop
transform = vtk.vtkTransform()
bounds = np.zeros([2,3])
bounds[0,:] = bbox_min
bounds[1,:] = bbox_max
print "bounds", bounds
cropped_pointcloud = segmentation.cropToBounds(pointcloud, transform, bounds)
print "cropped_pointcloud.GetNumberOfPoints()", cropped_pointcloud.GetNumberOfPoints()
# visualize it
def vis_function():
print "visualizing pointcloud"
vis.showPolyData(pointcloud, "pointcloud")
vis.showPolyData(cropped_pointcloud, "Mug rack pointcloud")
self.mug_rack_pointcloud = cropped_pointcloud
# not working for some reason
print "visualizing"
self.taskRunner.callOnMain(vis_function)
return
rgbd_with_pose = pdc_ros_msgs.msg.RGBDWithPose()
# N x 3
cropped_pointcloud_numpy = vnp.getNumpyFromVtk(cropped_pointcloud)
print "cropped_pointcloud_numpy.shape", cropped_pointcloud_numpy.shape
# save numpy to file
save_file = "/home/manuelli/sandbox/spartan/pointcloud.npy"
np.save(save_file, cropped_pointcloud_numpy)
return
# it's already in world frame
rgbd_with_pose.point_cloud = DirectorROSVisualizer.pointcloud2_msg_from_numpy(cropped_pointcloud_numpy)
# convert it back to ROS msg
goal = pdc_ros_msgs.msg.EstimatePoseGoal()
goal.rgbd_with_pose_list.append(rgbd_with_pose)
T_world_rack_vtk = self._category_manip.mug_rack_vis_obj.getChildFrame().transform
T_world_rack = transformUtils.getNumpyFromTransform(T_world_rack_vtk)
goal.T_init = ros_numpy.msgify(geometry_msgs.Pose, T_world_rack)
# send out service call
self.pose_estimation_client.wait_for_server()
self.pose_estimation_client.send_goal(goal)
# wait for result
self.pose_estimation_client.wait_for_result()
result = self.pose_estimation_client.get_result()
T_world_rack_estimated = ros_numpy.numpify(result.T_world_model)
T_world_rack_estimated_vtk = transformUtils.getTransformFromNumpy(T_world_rack_estimated)
self._category_manip.mug_rack_vis_obj.getChildFrame().copyFrame(T_world_rack_estimated_vtk)
def run_category_manipulation_goal_estimation(self, wait_for_result=True, capture_rgbd=True):
"""
Calls the CategoryManipulation service of pdc-ros
which is provided by category_manip_server.py.
Uses the keypoint detection result from either
`run_poser` or `run_keypoint_detection`
:return: bool
:rtype:
"""
if not self.check_keypoint_detection_succeeded():
return False
keypoint_detection_result = self.state.cache['keypoint_detection_result']
# don't specify poser output dir for now
goal = pdc_ros_msgs.msg.CategoryManipulationGoal()
goal.output_dir = keypoint_detection_result['output_dir']
goal.keypoint_detection_type = keypoint_detection_result['type']
if capture_rgbd:
self.moveHome()
rgbd_with_pose = self.captureRgbdAndCameraTransform()
self.state.cache['rgbd_with_pose_list'].append(rgbd_with_pose)
goal.rgbd_with_pose_list = self.state.cache['rgbd_with_pose_list']
if 'rgbd_with_pose_list' in self.state.cache:
goal.rgbd_with_pose_list = self.state.cache['rgbd_with_pose_list']
if MANIP_TYPE == CategoryManipulationType.SHOE_ON_RACK:
print("applying T_adjust")
print("self._shoe_manipulation_counter", self._shoe_manipulation_counter)
goal.apply_T_adjust = True
pos = np.array([self.graspingParams["shoe_offset"], 0, 0]) * self._shoe_manipulation_counter
quat = [1,0,0,0]
T_adjust_vtk = transformUtils.transformFromPose(pos, quat)
T_adjust = transformUtils.getNumpyFromTransform(T_adjust_vtk)
goal.T_adjust = ros_numpy.msgify(geometry_msgs.msg.Pose, T_adjust)
else:
goal.apply_T_adjust =False
rospy.loginfo("waiting for CategoryManip server")
self.category_manip_client.wait_for_server()
rospy.loginfo("connected to CategoryManip server")
self.category_manip_client.send_goal(goal)
if wait_for_result:
self.wait_for_category_manipulation_goal_result()
return True
def wait_for_category_manipulation_goal_result(self):
"""
Waits for category manipulation goal result
"""
print("waiting for category manipulation result")
self.category_manip_client.wait_for_result()
result = self.category_manip_client.get_result()
state = self.category_manip_client.get_state()
T_goal_obs = ros_numpy.numpify(result.T_goal_obs)
print "T_goal_obs:\n", T_goal_obs
T_goal_obs_vtk = transformUtils.getTransformFromNumpy(T_goal_obs)
print transformUtils.poseFromTransform(T_goal_obs_vtk)
self.state.cache['category_manipulation_goal'] = dict()
self.state.cache['category_manipulation_goal']['result'] = result
self.state.cache['category_manipulation_goal']["T_goal_obs"] = T_goal_obs_vtk
self.state.cache['category_manipulation_goal']['state'] = state
self.state.cache['category_manipulation_goal']["type"] = CategoryManipulationType.from_string(result.category_manipulation_type)
def run_mug_shelf_3D_pipeline(self):
"""
Runs entire pipeline for mug shelf 3D
:return:
:rtype:
"""
self.state.clear()
self._clear_cache()
# move home
speed = self.graspingParams['speed']['fast']
super_fast_speed = self.graspingParams['speed']['fast']
# q = self._stored_poses_director["General"]["home"]
# q = self._stored_poses_director["mug"]["image_capture_for_mug_shelf"]
q = self._stored_poses_director["General"]["center_back"]
self.robotService.moveToJointPosition(q,
maxJointDegreesPerSecond=super_fast_speed)
self.run_keypoint_detection(wait_for_result=False, move_to_stored_pose=False, clear_state=False)
# run keypoint detection
# move to center back to capture another RGBD image
q = self._stored_poses_director["General"]["home"]
self.robotService.moveToJointPosition(q,
maxJointDegreesPerSecond=super_fast_speed)
rgbd_with_pose = self.captureRgbdAndCameraTransform()
self.state.cache['rgbd_with_pose_list'].append(rgbd_with_pose)
self.wait_for_keypoint_detection_result()
if not self.check_keypoint_detection_succeeded():
self.state.set_status("FAILED")
return False
# run category manip
code = self.run_category_manipulation_goal_estimation(capture_rgbd=False)
if not code:
self.state.set_status("FAILED")
return False
self.wait_for_category_manipulation_goal_result()
if not self.check_category_goal_estimation_succeeded():
self.state.set_status("PLANNING_FAILED")
return False
# run the manipulation
# need safety checks in there before running autonomously
code = self.run_mug_shelf_manipulation()
if not (code == True):
self.state.set_status("FAILED")
return False
# if the place was successful then retract
self.retract_from_mug_shelf()
if EXPERIMENT_MODE:
output_dir = self.state.cache['keypoint_detection_result']['output_dir']
print "\n\n", os.path.split(output_dir)[1]
def run_mug_on_rack_pipeline(self, side_view=False):
"""
Runs entire pipeline for mug shelf 3D
:return:
:rtype:
"""
self.state.clear()
self._clear_cache()
# move home
speed = self.graspingParams['speed']['fast']
q = self._stored_poses_director["General"]["home"]
if side_view:
print "\nusing side view\n"
q = self._stored_poses_director["General"]["center_back"]
self.robotService.moveToJointPosition(q,
maxJointDegreesPerSecond=speed)
# run keypoint detection
self.run_keypoint_detection(wait_for_result=False, move_to_stored_pose=False, clear_state=False)
self.wait_for_keypoint_detection_result()
# move to center back to capture another RGBD image
q = self._stored_poses_director["General"]["center_back"]
if side_view:
q = self._stored_poses_director["General"]["home"]
self.robotService.moveToJointPosition(q,
maxJointDegreesPerSecond=speed)
rgbd_with_pose = self.captureRgbdAndCameraTransform()
self.state.cache['rgbd_with_pose_list'].append(rgbd_with_pose)
q = self._stored_poses_director["General"]["home"]
self.robotService.moveToJointPosition(q,
maxJointDegreesPerSecond=speed)
if not self.check_keypoint_detection_succeeded():
self.state.set_status("FAILED")
return False
# run category manip
code = self.run_category_manipulation_goal_estimation(capture_rgbd=False)
if not code:
self.state.set_status("FAILED")
return False
self.wait_for_category_manipulation_goal_result()
if not self.check_category_goal_estimation_succeeded():
self.state.set_status("PLANNING_FAILED")
return False
# run the manipulation
# need safety checks in there before running autonomously
code = self.run_mug_on_rack_manipulation()
if not (code == True):
self.state.set_status("FAILED")
return False
if EXPERIMENT_MODE:
output_dir = self.state.cache['keypoint_detection_result']['output_dir']
print "\n\n", os.path.split(output_dir)[1]
def run_shoe_on_rack_pipeline(self):
"""
Runs entire pipeline for mug shelf 3D
:return:
:rtype:
"""
if EXPERIMENT_MODE:
self._shoe_manipulation_counter = 0 # for testing
self.state.clear()
self._clear_cache()
# move home
speed = self.graspingParams['speed']['fast']
# q = self._stored_poses_director["General"]["center_back"]
q = self._stored_poses_director["General"]["home"]
self.robotService.moveToJointPosition(q,
maxJointDegreesPerSecond=speed)
# run keypoint detection
self.run_keypoint_detection(wait_for_result=False, move_to_stored_pose=False, clear_state=False)
self.wait_for_keypoint_detection_result()
if not self.check_keypoint_detection_succeeded():
self.state.set_status("FAILED")
return False
# run category manip
code = self.run_category_manipulation_goal_estimation(capture_rgbd=False)
if not code:
self.state.set_status("FAILED")
return False
self.wait_for_category_manipulation_goal_result()
if not self.check_category_goal_estimation_succeeded():
self.state.set_status("PLANNING_FAILED")
return False
# run the manipulation
# need safety checks in there before running autonomously
code = self.run_shoe_rack_manipulation()
if not code:
self.state.set_status("FAILED")
return False
# if the place was successful then retract
self.retract_from_shoe_rack()
if EXPERIMENT_MODE:
print "\n\n", self.state.cache['keypoint_detection_result']['output_dir']
def run_manipulate_object(self, debug=False):
"""
Runs the object manipulation code. Will put the object into the
specified target pose from `run_category_manipulation_goal_estimation`
:return:
"""
# self.taskRunner.callOnMain(self._poser_visualizer.visualize_result)
if not self.check_category_goal_estimation_succeeded():
return False
if debug:
self._object_manipulation = ObjectManipulation()
self._object_manipulation.assign_defaults()
self._object_manipulation.compute_transforms()
return
self.moveHome()
grasp_found, grasp_data = self.request_spartan_grasp(clear_state=False)
if not grasp_found:
print "no grasp found, returning\n"
return False
# execute the grasp
object_in_gripper = self.execute_grasp(self.state.grasp_data, close_gripper=True, use_cartesian_plan=True)
print "object_in_gripper:", object_in_gripper
T_goal_obs = self.state.cache['category_manipulation_T_goal_obs']
T_W_G = self.state.cache['gripper_frame_at_grasp']
self._object_manipulation = ObjectManipulation(T_goal_object=T_goal_obs, T_W_G=T_W_G)
self._object_manipulation.grasp_data = self.state.grasp_data
self._object_manipulation.compute_transforms()
self.taskRunner.callOnMain(self._object_manipulation.visualize)
pre_grasp_pose = self.state.cache['pre_grasp_ik_response'].joint_state.position
pickup_speed = self.graspingParams['speed']['pickup']
if not object_in_gripper:
# open the gripper and back away
self.gripperDriver.send_open_gripper_set_distance_from_current()
self.robotService.moveToJointPosition(pre_grasp_pose,
maxJointDegreesPerSecond=
pickup_speed)
return False
# pickup the object
self.robotService.moveToJointPosition(pre_grasp_pose,
maxJointDegreesPerSecond=
pickup_speed)
# place the object
grasp_data_place = self._object_manipulation.get_place_grasp_data()
self.execute_place(grasp_data_place)
# open the gripper and back away
pre_grasp_pose = self.state.cache['pre_grasp_ik_response'].joint_state.position
pickup_speed = self.graspingParams['speed']['pickup']
self.gripperDriver.send_open_gripper_set_distance_from_current()
# pickup the object
self.robotService.moveToJointPosition(pre_grasp_pose,
maxJointDegreesPerSecond=
pickup_speed)
# move home
self.moveHome()
def run_shoe_rack_manipulation(self, debug=False, push_in_distance=0.00):
"""
Runs the object manipulation code. Will put the object into the
specified target pose from `run_category_manipulation_goal_estimation`
:return:
"""
print("\n\n--- Running Shoe Manipulation-------\n\n")
# self.taskRunner.callOnMain(self._poser_visualizer.visualize_result)
if not self.check_category_goal_estimation_succeeded():
return False
# check that we really are doing mug
category_manipulation_type = self.state.cache['category_manipulation_goal']['type']
assert category_manipulation_type == CategoryManipulationType.SHOE_ON_RACK
speed = self.graspingParams['speed']['fast']
self.moveHome(speed=speed)
result = self.state.cache['category_manipulation_goal']['result']
T_W_fingertip = ros_numpy.numpify(result.T_world_gripper_fingertip)
T_W_fingertip_vtk = transformUtils.getTransformFromNumpy(T_W_fingertip)
grasp_data = GraspData.from_gripper_fingertip_frame(T_W_fingertip)
grasp_data.gripper.params["hand_inner_diameter"] = result.gripper_width
grasp_data.gripper.params["hand_inner_diameter"] = 0.07
self.state.grasp_data = grasp_data
# rotate the grasp to align with nominal
params = self.getParamsForCurrentLocation()
grasp_z_axis_nominal = np.array(params['grasp']['grasp_nominal_direction'])
grasp_data.rotate_grasp_frame_to_nominal(grasp_z_axis_nominal)
def vis_function():
vis.updateFrame(T_W_fingertip_vtk, "gripper fingertip frame", scale=0.15, parent=self._vis_container)
vis.updateFrame(grasp_data.grasp_frame, "grasp frame", scale=0.15, parent=self._vis_container)
self.visualize_grasp(grasp_data)
self.taskRunner.callOnMain(vis_function)
# execute the grasp
force_threshold_magnitude = 30
object_in_gripper = self.execute_grasp(grasp_data, close_gripper=True, use_cartesian_plan=True, force_threshold_magnitude=force_threshold_magnitude, push_in_distance=0.04, ee_speed_m_s=0.1)
if not object_in_gripper:
print("gr
|
bd179fda18551d4f3d8a4d695a9da38ee607ef1d
|
Python
|
<|begin_of_text|>import datetime
import json
from dateutil import parser
import mock
from python_http_client.exceptions import ForbiddenError
from rdr_service import clock, config
from rdr_service.api_util import open_cloud_file
from rdr_service.clock import FakeClock
from rdr_service.dao.database_utils import format_datetime
from rdr_service.dao.genomics_dao import GenomicGcDataFileDao, GenomicGCValidationMetricsDao, GenomicIncidentDao, \
GenomicSetMemberDao, UserEventMetricsDao, GenomicJobRunDao, GenomicResultWithdrawalsDao, \
GenomicMemberReportStateDao, GenomicAppointmentEventMetricsDao, GenomicAppointmentEventDao, GenomicResultViewedDao, \
GenomicInformingLoopDao, GenomicAppointmentEventNotifiedDao, GenomicDefaultBaseDao
from rdr_service.dao.message_broker_dao import MessageBrokenEventDataDao
from rdr_service.genomic_enums import GenomicIncidentCode, GenomicJob, GenomicWorkflowState, GenomicSubProcessResult, \
GenomicSubProcessStatus, GenomicManifestTypes, GenomicQcStatus, GenomicReportState
from rdr_service.genomic.genomic_job_components import GenomicFileIngester
from rdr_service.genomic.genomic_job_controller import GenomicJobController
from rdr_service.model.genomics import GenomicGcDataFile, GenomicIncident, GenomicSetMember, GenomicGCValidationMetrics,\
GenomicGCROutreachEscalationNotified
from rdr_service.offline.genomics import genomic_pipeline, genomic_cvl_pipeline
from rdr_service.participant_enums import WithdrawalStatus
from tests import test_data
from tests.genomics_tests.test_genomic_utils import create_ingestion_test_file
from tests.helpers.unittest_base import BaseTestCase
class GenomicJobControllerTest(BaseTestCase):
def setUp(self):
super(GenomicJobControllerTest, self).setUp()
self.data_file_dao = GenomicGcDataFileDao()
self.event_data_dao = MessageBrokenEventDataDao()
self.incident_dao = GenomicIncidentDao()
self.member_dao = GenomicSetMemberDao()
self.metrics_dao = GenomicGCValidationMetricsDao()
self.user_event_metrics_dao = UserEventMetricsDao()
self.job_run_dao = GenomicJobRunDao()
self.report_state_dao = GenomicMemberReportStateDao()
self.appointment_event_dao = GenomicAppointmentEventDao()
self.appointment_metrics_dao = GenomicAppointmentEventMetricsDao()
def test_incident_with_long_message(self):
"""Make sure the length of incident messages doesn't cause issues when recording them"""
incident_message = "1" * (GenomicIncident.message.type.length + 20)
mock_slack_handler = mock.MagicMock()
job_controller = GenomicJobController(job_id=1)
job_controller.genomic_alert_slack = mock_slack_handler
job_controller.create_incident(message=incident_message, slack=True)
# Double check that the incident was saved successfully, with part of the message
incident: GenomicIncident = self.session.query(GenomicIncident).one()
self.assertTrue(incident_message.startswith(incident.message))
# Make sure Slack received the full message
mock_slack_handler.send_message_to_webhook.assert_called_with(
message_data={
'text': incident_message
}
)
def test_gvcf_files_ingestion(self):
job_controller = GenomicJobController(job_id=38)
bucket_name = "test_bucket"
file_path = "Wgs_sample_raw_data/SS_VCF_research/BCM_A100153482_21042005280_SIA0013441__1.hard-filtered.gvcf.gz"
file_path_md5 = "Wgs_sample_raw_data/SS_VCF_research/" \
"BCM_A100153482_21042005280_SIA0013441__1.hard-filtered.gvcf.gz.md5sum"
full_path = f'{bucket_name}/{file_path}'
full_path_md5 = f'{bucket_name}/{file_path_md5}'
gen_set = self.data_generator.create_database_genomic_set(
genomicSetName=".",
genomicSetCriteria=".",
genomicSetVersion=1
)
gen_member = self.data_generator.create_database_genomic_set_member(
genomicSetId=gen_set.id,
biobankId="100153482",
sampleId="21042005280",
genomeType="aou_wgs",
genomicWorkflowState=GenomicWorkflowState.AW1
)
gen_job_run = self.data_generator.create_database_genomic_job_run(
jobId=GenomicJob.AW1_MANIFEST,
startTime=clock.CLOCK.now(),
runResult=GenomicSubProcessResult.SUCCESS
)
gen_processed_file = self.data_generator.create_database_genomic_file_processed(
runId=gen_job_run.id,
startTime=clock.CLOCK.now(),
filePath='/test_file_path',
bucketName='test_bucket',
fileName='test_file_name',
)
self.data_generator.create_database_genomic_gc_validation_metrics(
genomicSetMemberId=gen_member.id,
genomicFileProcessedId=gen_processed_file.id
)
job_controller.ingest_data_files_into_gc_metrics(file_path_md5, bucket_name)
metrics = self.metrics_dao.get_metrics_by_member_id(gen_member.id)
self.assertIsNotNone(metrics.gvcfMd5Path)
self.assertEqual(metrics.gvcfMd5Path, full_path_md5)
job_controller.ingest_data_files_into_gc_metrics(file_path, bucket_name)
metrics = self.metrics_dao.get_metrics_by_member_id(gen_member.id)
self.assertIsNotNone(metrics.gvcfPath)
self.assertEqual(metrics.gvcfPath, full_path)
def test_gvcf_files_ingestion_create_incident(self):
bucket_name = "test_bucket"
file_path = "Wgs_sample_raw_data/SS_VCF_research/BCM_A100153482_21042005280_SIA0013441__1.hard-filtered.gvcf.gz"
gen_set = self.data_generator.create_database_genomic_set(
genomicSetName=".",
genomicSetCriteria=".",
genomicSetVersion=1
)
gen_member = self.data_generator.create_database_genomic_set_member(
genomicSetId=gen_set.id,
biobankId="111111111",
sampleId="222222222222",
genomeType="aou_wgs",
genomicWorkflowState=GenomicWorkflowState.AW1
)
gen_job_run = self.data_generator.create_database_genomic_job_run(
jobId=GenomicJob.AW1_MANIFEST,
startTime=clock.CLOCK.now(),
runResult=GenomicSubProcessResult.SUCCESS
)
gen_processed_file = self.data_generator.create_database_genomic_file_processed(
runId=gen_job_run.id,
startTime=clock.CLOCK.now(),
filePath='/test_file_path',
bucketName=bucket_name,
fileName='test_file_name',
)
self.data_generator.create_database_genomic_gc_validation_metrics(
genomicSetMemberId=gen_member.id,
genomicFileProcessedId=gen_processed_file.id
)
with GenomicJobController(GenomicJob.INGEST_DATA_FILES) as controller:
controller.ingest_data_files_into_gc_metrics(file_path, bucket_name)
incident = self.incident_dao.get(1)
self.assertIsNotNone(incident)
self.assertEqual(incident.code, GenomicIncidentCode.UNABLE_TO_FIND_METRIC.name)
self.assertEqual(incident.data_file_path, file_path)
self.assertEqual(incident.message, 'INGEST_DATA_FILES: Cannot find '
'genomics metric record for sample id: '
'21042005280')
def test_accession_data_files(self):
test_bucket_baylor = "fake-data-bucket-baylor"
test_idat_file = "fake-data-bucket-baylor/Genotyping_sample_raw_data/204027270091_R02C01_Grn.idat"
test_vcf_file = "fake-data-bucket-baylor/Genotyping_sample_raw_data/204027270091_R02C01.vcf.gz"
test_cram_file = "fake-data-bucket-baylor/Wgs_sample_raw_data/" \
"CRAMs_CRAIs/BCM_A100134256_21063006771_SIA0017196_1.cram"
test_files = [test_idat_file, test_vcf_file, test_cram_file]
test_time = datetime.datetime(2021, 7, 9, 14, 1, 1)
# run job controller method on each file
with clock.FakeClock(test_time):
for file_path in test_files:
with GenomicJobController(GenomicJob.ACCESSION_DATA_FILES) as controller:
controller.accession_data_files(file_path, test_bucket_baylor)
inserted_files = self.data_file_dao.get_all()
# idat
expected_idat = GenomicGcDataFile(
id=1,
created=test_time,
modified=test_time,
file_path=test_idat_file,
gc_site_id='jh',
bucket_name='fake-data-bucket-baylor',
file_prefix='Genotyping_sample_raw_data',
file_name='204027270091_R02C01_Grn.idat',
file_type='Grn.idat',
identifier_type='chipwellbarcode',
identifier_value='204027270091_R02C01',
ignore_flag=0,
)
# vcf
expected_vcf = GenomicGcDataFile(
id=2,
created=test_time,
modified=test_time,
file_path=test_vcf_file,
gc_site_id='jh',
bucket_name='fake-data-bucket-baylor',
file_prefix='Genotyping_sample_raw_data',
file_name='204027270091_R02C01.vcf.gz',
file_type='vcf.gz',
identifier_type='chipwellbarcode',
identifier_value='204027270091_R02C01',
ignore_flag=0,
)
# cram
expected_cram = GenomicGcDataFile(
id=3,
created=test_time,
modified=test_time,
file_path=test_cram_file,
gc_site_id='bcm',
bucket_name='fake-data-bucket-baylor',
file_prefix='Wgs_sample_raw_data/CRAMs_CRAIs',
file_name='BCM_A100134256_21063006771_SIA0017196_1.cram',
file_type='cram',
identifier_type='sample_id',
identifier_value='21063006771',
ignore_flag=0,
)
# obj mapping
expected_objs = {
0: expected_idat,
1: expected_vcf,
2: expected_cram
}
# verify test objects match expectations
for i in range(3):
self.assertEqual(expected_objs[i].bucket_name, inserted_files[i].bucket_name)
self.assertEqual(expected_objs[i].created, inserted_files[i].created)
self.assertEqual(expected_objs[i].file_name, inserted_files[i].file_name)
self.assertEqual(expected_objs[i].file_path, inserted_files[i].file_path)
self.assertEqual(expected_objs[i].file_prefix, inserted_files[i].file_prefix)
self.assertEqual(expected_objs[i].file_type, inserted_files[i].file_type)
self.assertEqual(expected_objs[i].gc_site_id, inserted_files[i].gc_site_id)
self.assertEqual(expected_objs[i].id, inserted_files[i].id)
self.assertEqual(expected_objs[i].identifier_type, inserted_files[i].identifier_type)
self.assertEqual(expected_objs[i].identifier_value, inserted_files[i].identifier_value)
self.assertEqual(expected_objs[i].ignore_flag, inserted_files[i].ignore_flag)
self.assertEqual(expected_objs[i].metadata, inserted_files[i].metadata)
self.assertEqual(expected_objs[i].modified, inserted_files[i].modified)
def test_updating_members_blocklists(self):
gen_set = self.data_generator.create_database_genomic_set(
genomicSetName=".",
genomicSetCriteria=".",
genomicSetVersion=1
)
ids_should_be_updated = []
# for just created and wf state query and MATCHES criteria
for i in range(4):
ids_should_be_updated.append(
self.data_generator.create_database_genomic_set_member(
genomicSetId=gen_set.id,
biobankId="100153482",
sampleId="21042005280",
genomeType='test_investigation_one' if i & 2!= 0 else 'aou_wgs',
genomicWorkflowState=GenomicWorkflowState.AW0,
ai_an='Y' if i & 2 == 0 else 'N'
).id
)
# for just created and wf state query and DOES NOT MATCH criteria
for i in range(2):
self.data_generator.create_database_genomic_set_member(
genomicSetId=gen_set.id,
biobankId="100153482",
sampleId="21042005280",
genomeType='aou_array',
genomicWorkflowState=GenomicWorkflowState.AW0,
ai_an='N'
)
with GenomicJobController(GenomicJob.UPDATE_MEMBERS_BLOCKLISTS) as controller:
controller.update_members_blocklists()
# current config json in base_config.json
created_members = self.member_dao.get_all()
blocklisted = list(filter(lambda x: x.blockResults == 1 or x.blockResearch == 1, created_members))
self.assertTrue(ids_should_be_updated.sort() == [obj.id for obj in blocklisted].sort())
# should be RESEARCH blocked
self.assertTrue(all(
obj.blockResearch == 1 and obj.blockResearchReason is not None and obj.blockResearchReason == 'aian'
for obj in created_members if obj.ai_an == 'Y' and obj.genomicWorkflowState == GenomicWorkflowState.AW0)
)
# should NOT be RESULTS blocked
self.assertTrue(all(
obj.blockResults == 0 and obj.blockResultsReason is None
for obj in created_members if obj.ai_an == 'Y' and obj.genomicWorkflowState == GenomicWorkflowState.AW0)
)
# should be RESEARCH blocked
self.assertTrue(all(
obj.blockResearch == 1 and obj.blockResearchReason is not None and obj.blockResearchReason == 'test_sample_swap'
for obj in created_members if obj.genomeType == 'test_investigation_one' and obj.genomicWorkflowState ==
GenomicWorkflowState.AW0)
)
# should be RESULTS blocked
self.assertTrue(all(
obj.blockResults == 1 and obj.blockResultsReason is not None and obj.blockResultsReason == 'test_sample_swap'
for obj in created_members if obj.genomeType == 'test_investigation_one' and obj.genomicWorkflowState ==
GenomicWorkflowState.AW0)
)
# should NOT be RESEARCH/RESULTS blocked
self.assertTrue(all(
obj.blockResearch == 0 and obj.blockResearchReason is None
for obj in created_members if obj.genomeType == 'aou_array' and obj.genomicWorkflowState ==
GenomicWorkflowState.AW0)
)
self.assertTrue(all(
obj.blockResults == 0 and obj.blockResultsReason is None
for obj in created_members if obj.genomeType == 'aou_array' and obj.genomicWorkflowState ==
GenomicWorkflowState.AW0)
)
# clear current set member records
with self.member_dao.session() as session:
session.query(GenomicSetMember).delete()
run_result = self.job_run_dao.get(1)
self.assertEqual(run_result.runStatus, GenomicSubProcessStatus.COMPLETED)
self.assertEqual(run_result.runResult, GenomicSubProcessResult.SUCCESS)
# for modified data query and MATCHES criteria
for i in range(4):
self.data_generator.create_database_genomic_set_member(
genomicSetId=gen_set.id,
biobankId="100153482",
sampleId="21042005280",
genomeType='test_investigation_one' if i & 2!= 0 else 'aou_wgs',
genomicWorkflowState=GenomicWorkflowState.AW1,
ai_an='Y' if i & 2 == 0 else 'N'
)
with GenomicJobController(GenomicJob.UPDATE_MEMBERS_BLOCKLISTS) as controller:
controller.update_members_blocklists()
modified_members = self.member_dao.get_all()
# should be RESEARCH blocked
self.assertTrue(all(
obj.blockResearch == 1 and obj.blockResearchReason is not None and obj.blockResearchReason == 'aian'
for obj in modified_members if obj.ai_an == 'Y' and obj.genomicWorkflowState == GenomicWorkflowState.AW1)
)
# should NOT be RESULTS blocked
self.assertTrue(all(
obj.blockResults == 0 and obj.blockResultsReason is None
for obj in modified_members if obj.ai_an == 'Y' and obj.genomicWorkflowState == GenomicWorkflowState.AW1)
)
# should be RESEARCH blocked
self.assertTrue(all(
obj.blockResearch == 1 and obj.blockResearchReason is not None and obj.blockResearchReason == 'test_sample_swap'
for obj in modified_members if obj.genomeType == 'test_investigation_one' and obj.genomicWorkflowState ==
GenomicWorkflowState.AW1)
)
# should be RESULTS blocked
self.assertTrue(all(
obj.blockResults == 1 and obj.blockResultsReason is not None and obj.blockResultsReason == 'test_sample_swap'
for obj in modified_members if obj.genomeType == 'test_investigation_one' and obj.genomicWorkflowState ==
GenomicWorkflowState.AW1)
)
run_result = self.job_run_dao.get(2)
self.assertEqual(run_result.runStatus, GenomicSubProcessStatus.COMPLETED)
self.assertEqual(run_result.runResult, GenomicSubProcessResult.SUCCESS)
def test_ingest_user_metrics_file(self):
test_file = 'Genomic-Metrics-File-User-Events-Test.csv'
bucket_name = 'test_bucket'
sub_folder = 'user_events'
pids = []
file_ingester = GenomicFileIngester()
for _ in range(2):
pid = self.data_generator.create_database_participant()
pids.append(pid.participantId)
test_metrics_file = create_ingestion_test_file(
test_file,
bucket_name,
sub_folder)
test_file_path = f'{bucket_name}/{sub_folder}/{test_metrics_file}'
with open_cloud_file(test_file_path) as csv_file:
metrics_to_ingest = file_ingester._read_data_to_ingest(csv_file)
with GenomicJobController(GenomicJob.METRICS_FILE_INGEST) as controller:
controller.ingest_metrics_file(
metric_type='user_events',
file_path=test_file_path,
)
job_run_id = controller.job_run.id
metrics = self.user_event_metrics_dao.get_all()
for pid in pids:
file_metrics = list(filter(lambda x: int(x['participant_id'].split('P')[-1]) == pid, metrics_to_ingest[
'rows']))
participant_ingested_metrics = list(filter(lambda x: x.participant_id == pid, metrics))
self.assertEqual(len(file_metrics), len(participant_ingested_metrics))
self.assertTrue(all(obj.run_id == job_run_id for obj in participant_ingested_metrics))
@mock.patch('rdr_service.genomic.genomic_job_controller.GenomicJobController.execute_cloud_task')
def test_reconcile_pdr_data(self, mock_cloud_task):
# init new job run in __enter__
with GenomicJobController(GenomicJob.RECONCILE_PDR_DATA) as controller:
controller.reconcile_pdr_data()
cloud_task_endpoint ='rebuild_genomic_table_records_task'
first_run = self.job_run_dao.get_all()
self.assertEqual(mock_cloud_task.call_count, 1)
call_args = mock_cloud_task.call_args_list
self.assertEqual(len(call_args), 1)
self.assertEqual(call_args[0].args[0]['table'], self.job_run_dao.model_type.__tablename__)
self.assertTrue(type(call_args[0].args[0]['ids']) is list)
self.assertEqual(call_args[0].args[0]['ids'], [obj.id for obj in first_run])
self.assertEqual(call_args[0].args[1], cloud_task_endpoint)
participant = self.data_generator.create_database_participant()
gen_set = self.data_generator.create_database_genomic_set(
genomicSetName=".",
genomicSetCriteria=".",
genomicSetVersion=1
)
plus_ten = clock.CLOCK.now() + datetime.timedelta(minutes=10)
plus_ten = plus_ten.replace(microsecond=0)
with FakeClock(plus_ten):
for i in range(2):
gen_member = self.data_generator.create_database_genomic_set_member(
genomicSetId=gen_set.id,
biobankId="100153482",
sampleId="21042005280",
genomeType="aou_wgs",
genomicWorkflowState=GenomicWorkflowState.AW1
)
gen_processed_file = self.data_generator.create_database_genomic_file_processed(
runId=first_run[0].id,
startTime=clock.CLOCK.now(),
filePath=f'test_file_path_{i}',
bucketName='test_bucket',
fileName='test_file_name',
)
self.data_generator.create_database_genomic_gc_validation_metrics(
genomicSetMemberId=gen_member.id,
genomicFileProcessedId=gen_processed_file.id
)
manifest = self.data_generator.create_database_genomic_manifest_file(
manifestTypeId=2,
filePath=f'test_file_path_{i}'
)
self.data_generator.create_database_genomic_manifest_feedback(
inputManifestFileId=manifest.id,
feedbackRecordCount=2
)
self.data_generator.create_database_genomic_user_event_metrics(
participant_id=participant.participantId,
event_name='test_event',
run_id=1,
)
self.data_generator.create_database_genomic_informing_loop(
message_record_id=1,
event_type='informing_loop_decision',
module_type='gem',
participant_id=participant.participantId,
decision_value='maybe_later',
event_authored_time=clock.CLOCK.now()
)
self.data_generator.create_database_genomic_cvl_past_due(
cvl_site_id='co',
email_notification_sent=0,
sample_id='sample_test',
results_type='hdr',
genomic_set_member_id=gen_member.id
)
self.data_generator.create_database_genomic_appointment(
message_record_id=i,
appointment_id=i,
event_type='appointment_scheduled',
module_type='hdr',
participant_id=participant.participantId,
event_authored_time=clock.CLOCK.now(),
source='Color',
appointment_timestamp=format_datetime(clock.CLOCK.now()),
appointment_timezone='America/Los_Angeles',
location='123 address st',
contact_number='17348675309',
language='en'
)
self.data_generator.create_database_genomic_member_report_state(
genomic_set_member_id=gen_member.id,
participant_id=participant.participantId,
module='gem',
genomic_report_state=GenomicReportState.GEM_RPT_READY,
event_authored_time=clock.CLOCK.now()
)
self.data_generator.create_genomic_result_viewed(
participant_id=participant.participantId,
event_type='result_viewed',
event_authored_time=clock.CLOCK.now(),
module_type='gem',
sample_id=gen_member.sampleId
)
# gets new records that were created with last job run from above
with GenomicJobController(GenomicJob.RECONCILE_PDR_DATA) as controller:
controller.reconcile_pdr_data()
affected_tables = [
'genomic_set',
'genomic_set_member',
'genomic_job_run',
'genomic_file_processed',
'genomic_gc_validation_metrics',
'genomic_manifest_file',
'genomic_manifest_feedback',
'genomic_informing_loop',
'genomic_cvl_results_past_due',
'user_event_metrics',
'genomic_member_report_state',
'genomic_result_viewed',
'genomic_appointment_event'
]
num_calls = len(affected_tables) + 1
self.assertEqual(mock_cloud_task.call_count, num_calls)
call_args = mock_cloud_task.call_args_list
self.assertEqual(len(call_args), num_calls)
mock_tables = set([obj[0][0]['table'] for obj in call_args])
mock_endpoint = [obj[0][1] for obj in call_args]
self.assertTrue([mock_tables].sort() == affected_tables.sort())
self.assertTrue(all(obj for obj in mock_endpoint if obj == cloud_task_endpoint))
@mock.patch('rdr_service.genomic.genomic_job_controller.GenomicJobController.execute_cloud_task')
def test_retry_manifest_ingestions_if_deltas(self, mock_cloud_task):
bucket_name = "test-bucket"
aw1_file_name = "AW1_wgs_sample_manifests/RDR_AoU_SEQ_PKG-2104-026571.csv"
aw1_manifest_path = f"{bucket_name}/{aw1_file_name}"
aw2_file_name = "AW2_wgs_data_manifests/RDR_AoU_SEQ_DataManifest_04092021.csv"
aw2_manifest_path = f"{bucket_name}/{aw2_file_name}"
gen_set = self.data_generator.create_database_genomic_set(
genomicSetName=".",
genomicSetCriteria=".",
genomicSetVersion=1
)
# Create AW1 job_run
aw1_job_run = self.data_generator.create_database_genomic_job_run(
jobId=GenomicJob.AW1_MANIFEST,
startTime=clock.CLOCK.now(),
endTime=clock.CLOCK.now(),
runResult=GenomicSubProcessResult.SUCCESS
)
# Create AW2 job_run
aw2_job_run = self.data_generator.create_database_genomic_job_run(
jobId=GenomicJob.METRICS_INGESTION,
startTime=clock.CLOCK.now(),
endTime=clock.CLOCK.now(),
runResult=GenomicSubProcessResult.SUCCESS
)
# should have no data
with GenomicJobController(GenomicJob.RETRY_MANIFEST_INGESTIONS) as controller:
controller.retry_manifest_ingestions()
job_run = self.job_run_dao.get(3)
self.assertEqual(job_run.jobId, GenomicJob.RETRY_MANIFEST_INGESTIONS)
self.assertEqual(job_run.runStatus, GenomicSubProcessStatus.COMPLETED)
self.assertEqual(job_run.runResult, GenomicSubProcessResult.NO_FILES)
self.assertEqual(mock_cloud_task.call_count, 0)
self.assertFalse(mock_cloud_task.call_count)
# Create genomic_aw1_raw record
self.data_generator.create_database_genomic_aw1_raw(
file_path=aw1_manifest_path,
package_id="PKG-2104-026571",
biobank_id="A10001",
)
# Create genomic_aw2_raw record
self.data_generator.create_database_genomic_aw2_raw(
file_path=aw2_manifest_path,
biobank_id="A10001",
sample_id="100001",
biobankidsampleid="A10001_100001",
)
# Create AW1 genomic_manifest_file record
aw1_manifest_file = self.data_generator.create_database_genomic_manifest_file(
created=clock.CLOCK.now(),
modified=clock.CLOCK.now(),
uploadDate=clock.CLOCK.now(),
manifestTypeId=GenomicManifestTypes.AW1,
filePath=aw1_manifest_path,
fileName=aw1_file_name,
bucketName=bucket_name,
recordCount=1,
rdrProcessingComplete=1,
rdrProcessingCompleteDate=clock.CLOCK.now(),
)
# Create AW2 genomic_manifest_file record
aw2_manifest_file = self.data_generator.create_database_genomic_manifest_file(
created=clock.CLOCK.now(),
modified=clock.CLOCK.now(),
uploadDate=clock.CLOCK.now(),
manifestTypeId=GenomicManifestTypes.AW2,
filePath=aw2_manifest_path,
fileName=aw2_file_name,
bucketName=bucket_name,
recordCount=1,
rdrProcessingComplete=1,
rdrProcessingCompleteDate=clock.CLOCK.now(),
)
# Create AW1 file_processed
aw1_file_processed = self.data_generator.create_database_genomic_file_processed(
runId=aw1_job_run.id,
startTime=clock.CLOCK.now(),
genomicManifestFileId=aw1_manifest_file.id,
filePath=f"/{aw1_manifest_path}",
bucketName=bucket_name,
fileName=aw1_file_name,
)
# Create AW2 file_processed
aw2_file_processed = self.data_generator.create_database_genomic_file_processed(
runId=aw2_job_run.id,
startTime=clock.CLOCK.now(),
genomicManifestFileId=aw2_manifest_file.id,
filePath=f"/{aw2_manifest_path}",
bucketName=bucket_name,
fileName=aw2_file_name,
)
# genomic_set_member for AW1
gen_member = self.data_generator.create_database_genomic_set_member(
genomicSetId=gen_set.id,
biobankId="100153482",
sampleId="21042005280",
genomeType="aou_wgs",
genomicWorkflowState=GenomicWorkflowState.AW1,
aw1FileProcessedId=aw1_file_processed.id
)
# genomic_gc_validation_metrics for AW1
self.data_generator.create_database_genomic_gc_validation_metrics(
genomicSetMemberId=gen_member.id,
genomicFileProcessedId=aw2_file_processed.id
)
# one AW1/AW2 with no deltas
with GenomicJobController(GenomicJob.RETRY_MANIFEST_INGESTIONS) as controller:
controller.retry_manifest_ingestions()
job_run = self.job_run_dao.get(4)
self.assertEqual(job_run.jobId, GenomicJob.RETRY_MANIFEST_INGESTIONS)
self.assertEqual(job_run.runStatus, GenomicSubProcessStatus.COMPLETED)
self.assertEqual(job_run.runResult, GenomicSubProcessResult.NO_FILES)
self.assertEqual(mock_cloud_task.call_count, 0)
self.assertFalse(mock_cloud_task.call_count)
# empty tables resulting in deltas and cloud task calls
with self.member_dao.session() as session:
session.query(GenomicGCValidationMetrics).delete()
session.query(GenomicSetMember).delete()
with GenomicJobController(GenomicJob.RETRY_MANIFEST_INGESTIONS) as controller:
controller.retry_manifest_ingestions()
job_run = self.job_run_dao.get(5)
self.assertEqual(job_run.jobId, GenomicJob.RETRY_MANIFEST_INGESTIONS)
self.assertEqual(job_run.runStatus, GenomicSubProcessStatus.COMPLETED)
self.assertEqual(job_run.runResult, GenomicSubProcessResult.SUCCESS)
# one AW1/AW2 with deltas
self.assertEqual(mock_cloud_task.call_count, 2)
self.assertTrue(mock_cloud_task.call_count)
call_args = mock_cloud_task.call_args_list
self.assertEqual(len(call_args), 2)
cloud_task_endpoint = ['ingest_aw1_manifest_task', 'ingest_aw2_manifest_task']
mock_endpoint = [obj[0][1] for obj in call_args]
self.assertTrue(all(obj for obj in mock_endpoint if obj == cloud_task_endpoint))
mock_buckets = set([obj[0][0]['bucket_name'] for obj in call_args])
self.assertTrue(len(mock_buckets), 1)
self.assertTrue(list(mock_buckets)[0] == bucket_name)
def test_calculate_informing_loop_ready_flags(self):
num_participants = 4
gen_set = self.data_generator.create_database_genomic_set(
genomicSetName=".",
genomicSetCriteria=".",
genomicSetVersion=1
)
for num in range(num_participants):
plus_num = clock.CLOCK.now() + datetime.timedelta(minutes=num)
plus_num = plus_num.replace(microsecond=0)
with FakeClock(plus_num):
summary = self.data_generator.create_database_participant_summary(
consentForStudyEnrollment=1,
consentForGenomicsROR=1
)
stored_sample = self.data_generator.create_database_biobank_stored_sample(
biobankId=summary.biobankId,
biobankOrderIdentifier=self.fake.pyint()
)
collection_site = self.data_generator.create_database_site(
siteType='Clinic'
)
order = self.data_generator.create_database_biobank_order(
collectedSiteId=collection_site.siteId,
participantId=summary.participantId,
finalizedTime=plus_num
)
self.data_generator.create_database_biobank_order_identifier(
value=stored_sample.biobankOrderIdentifier,
biobankOrderId=order.biobankOrderId,
system="1",
)
self.data_generator.create_database_biobank_order_identifier(
value=stored_sample.biobankOrderIdentifier,
biobankOrderId=order.biobankOrderId,
system="2",
)
member = self.data_generator.create_database_genomic_set_member(
genomicSetId=gen_set.id,
participantId=summary.participantId,
genomeType=config.GENOME_TYPE_WGS,
qcStatus=GenomicQcStatus.PASS,
gcManifestSampleSource='Whole Blood',
collectionTubeId=stored_sample.biobankStoredSampleId
)
self.data_generator.create_database_genomic_gc_validation_metrics(
genomicSetMemberId=member.id,
sexConcordance='True',
drcFpConcordance='Pass',
drcSexConcordance='Pass',
processingStatus='Pass'
)
members_for_ready_loop = self.member_dao.get_members_for_informing_loop_ready()
self.assertEqual(len(members_for_ready_loop), num_participants)
current_set_members = self.member_dao.get_all()
self.assertTrue(all(obj.informingLoopReadyFlag == 0 for obj in current_set_members))
self.assertTrue(all(obj.informingLoopReadyFlagModified is None for obj in current_set_members))
with GenomicJobController(GenomicJob.CALCULATE_INFORMING_LOOP_READY) as controller:
controller.calculate_informing_loop_ready_flags()
# no config object, controller method should return
members_for_ready_loop = self.member_dao.get_members_for_informing_loop_ready()
self.assertEqual(len(members_for_ready_loop), num_participants)
calculation_limit = 2
config.override_setting(config.CALCULATE_READY_FLAG_LIMIT, [calculation_limit])
with GenomicJobController(GenomicJob.CALCULATE_INFORMING_LOOP_READY) as controller:
controller.calculate_informing_loop_ready_flags()
current_set_members = self.member_dao.get_all()
self.assertTrue(any(obj.informingLoopReadyFlag == 1 for obj in current_set_members))
self.assertTrue(any(obj.informingLoopReadyFlagModified is not None for obj in current_set_members))
current_loops_set = [obj for obj in current_set_members if obj.informingLoopReadyFlag == 1
and obj.informingLoopReadyFlagModified is not None]
self.assertEqual(len(current_loops_set), calculation_limit)
members_for_ready_loop = self.member_dao.get_members_for_informing_loop_ready()
self.assertEqual(len(members_for_ready_loop), num_participants // 2)
with GenomicJobController(GenomicJob.CALCULATE_INFORMING_LOOP_READY) as controller:
controller.calculate_informing_loop_ready_flags()
current_set_members = self.member_dao.get_all()
self.assertTrue(all(obj.informingLoopReadyFlag == 1 for obj in current_set_members))
self.assertTrue(all(obj.informingLoopReadyFlagModified is not None for obj in current_set_members))
members_for_ready_loop = self.member_dao.get_members_for_informing_loop_ready()
self.assertEqual(len(members_for_ready_loop), 0)
@mock.patch('rdr_service.services.email_service.EmailService.send_email')
def test_getting_results_withdrawn(self, email_mock):
num_participants = 4
result_withdrawal_dao = GenomicResultWithdrawalsDao()
gen_set = self.data_generator.create_database_genomic_set(
genomicSetName=".",
genomicSetCriteria=".",
genomicSetVersion=1
)
gen_job_run = self.data_generator.create_database_genomic_job_run(
jobId=GenomicJob.AW1_MANIFEST,
startTime=clock.CLOCK.now(),
runResult=GenomicSubProcessResult.SUCCESS
)
pids = []
for num in range(num_participants):
summary = self.data_generator.create_database_participant_summary(
consentForStudyEnrollment=1,
consentForGenomicsROR=1,
withdrawalStatus=WithdrawalStatus.EARLY_OUT
)
self.data_generator.create_database_genomic_set_member(
genomicSetId=gen_set.id,
participantId=summary.participantId,
genomeType=config.GENOME_TYPE_ARRAY,
gemA1ManifestJobRunId=gen_job_run.id if num % 2 == 0 else None
)
self.data_generator.create_database_genomic_set_member(
genomicSetId=gen_set.id,
participantId=summary.participantId,
genomeType=config.GENOME_TYPE_WGS,
cvlW1ilHdrJobRunId=gen_job_run.id
)
pids.append(summary.participantId)
config.override_setting(config.RDR_GENOMICS_NOTIFICATION_EMAIL, '[email protected]')
with GenomicJobController(GenomicJob.RESULTS_PIPELINE_WITHDRAWALS) as controller:
controller.check_results_withdrawals()
# mock checks should be two => 1 GEM 1 HEALTH
self.assertEqual(email_mock.call_count, 2)
call_args = email_mock.call_args_list
self.assertTrue(any('GEM' in call.args[0].subject for call in call_args))
self.assertTrue(any('HEALTH' in call.args[0].subject for call in call_args))
job_runs = self.job_run_dao.get_all()
current_job_run = list(filter(lambda x: x.jobId == GenomicJob.RESULTS_PIPELINE_WITHDRAWALS, job_runs))[0]
self.assertTrue(current_job_run.runResult == GenomicSubProcessResult.SUCCESS)
all_withdrawal_records = result_withdrawal_dao.get_all()
self.assertTrue(len(all_withdrawal_records) == len(pids))
self.assertTrue(all(obj.participant_id in pids for obj in all_withdrawal_records))
array_results = list(filter(lambda x: x.array_results == 1, all_withdrawal_records))
# should only be 2
self.assertTrue(len(array_results), 2)
cvl_results = list(filter(lambda x: x.cvl_results == 1, all_withdrawal_records))
# should be 4 for num of participants
self.assertTrue(len(cvl_results), num_participants)
with GenomicJobController(GenomicJob.RESULTS_PIPELINE_WITHDRAWALS) as controller:
controller.check_results_withdrawals()
# mock checks should still be two on account of no records
self.assertEqual(email_mock.call_count, 2)
job_runs = self.job_run_dao.get_all()
current_job_run = list(filter(lambda x: x.jobId == GenomicJob.RESULTS_PIPELINE_WITHDRAWALS, job_runs))[1]
self.assertTrue(current_job_run.runResult == GenomicSubProcessResult.NO_RESULTS)
def test_gem_results_to_report_state(self):
num_participants = 8
gen_set = self.data_generator.create_database_genomic_set(
genomicSetName=".",
genomicSetCriteria=".",
genomicSetVersion=1
)
gem_a2_job_run = self.data_generator.create_database_genomic_job_run(
jobId=GenomicJob.GEM_A2_MANIFEST,
startTime=clock.CLOCK.now(),
runResult=GenomicSubProcessResult.SUCCESS
)
pids_to_update, member_ids = [], []
for num in range(num_participants):
summary = self.data_generator.create_database_participant_summary(
consentForStudyEnrollment=1,
consentForGenomicsROR=1,
withdrawalStatus=WithdrawalStatus.EARLY_OUT
)
member = self.data_generator.create_database_genomic_set_member(
genomicSetId=gen_set.id,
participantId=summary.participantId,
genomeType=config.GENOME_TYPE_ARRAY
)
if num % 2 == 0:
member_ids.append(member.id)
pids_to_update.append(summary.participantId)
with GenomicJobController(GenomicJob.GEM_RESULT_REPORTS) as controller:
controller.gem_results_to_report_state()
current_job_runs = self.job_run_dao.get_all()
self.assertEqual(len(current_job_runs), 2)
current_job_run = list(filter(lambda x: x.jobId == GenomicJob.GEM_RESULT_REPORTS, current_job_runs))[0]
self.assertTrue(current_job_run.runResult == GenomicSubProcessResult.NO_RESULTS)
current_members = self.member_dao.get_all()
# 4 members updated correctly should return
for member in current_members:
if member.participantId in pids_to_update:
member.gemA2ManifestJobRunId = gem_a2_job_run.id
member.genomicWorkflowState = GenomicWorkflowState.GEM_RPT_READY
self.member_dao.update(member)
with GenomicJobController(GenomicJob.GEM_RESULT_REPORTS) as controller:
controller.gem_results_to_report_state()
current_job_runs = self.job_run_dao.get_all()
self.assertEqual(len(current_job_runs), 3)
current_job_run = list(filter(lambda x: x.jobId == GenomicJob.GEM_RESULT_REPORTS, current_job_runs))[1]
self.assertTrue(current_job_run.runResult == GenomicSubProcessResult.SUCCESS)
current_gem_report_states = self.report_state_dao.get_all()
self.assertEqual(len(current_gem_report_states), len(pids_to_update))
self.assertTrue(all(obj.event_type =='result_ready' for obj in current_gem_report_states))
self.assertTrue(all(obj.event_authored_time is not None for obj in current_gem_report_states))
self.assertTrue(all(obj.module == 'gem' for obj in current_gem_report_states))
self.assertTrue(
all(obj.genomic_report_state == GenomicReportState.GEM_RPT_READY for obj in current_gem_report_states)
)
self.assertTrue(
all(obj.genomic_report_state_str == GenomicReportState.GEM_RPT_READY.name for obj in
current_gem_report_states)
)
self.assertTrue(
all(obj.genomic_set_member_id in member_ids for obj in
current_gem_report_states)
)
# 4 members inserted already should not return
with GenomicJobController(GenomicJob.GEM_RESULT_REPORTS) as controller:
controller.gem_results_to_report_state()
current_job_runs = self.job_run_dao.get_all()
self.assertEqual(len(current_job_runs), 4)
current_job_run = list(filter(lambda x: x.jobId == GenomicJob.GEM_RESULT_REPORTS, current_job_runs))[2]
self.assertTrue(current_job_run.runResult == GenomicSubProcessResult.NO_RESULTS)
self.clear_table_after_test('genomic_member_report_state')
def test_reconcile_informing_loop(self):
event_dao = UserEventMetricsDao()
event_dao.truncate() # for test suite
il_dao = GenomicInformingLoopDao()
for pid in range(8):
self.data_generator.create_database_participant(participantId=1 + pid, biobankId=1 + pid)
# Set up initial job run ID
self.data_generator.create_database_genomic_job_run(
jobId=GenomicJob.METRICS_FILE_INGEST,
startTime=clock.CLOCK.now()
)
# create genomic set
self.data_generator.create_database_genomic_set(
genomicSetName='test',
genomicSetCriteria='.',
genomicSetVersion=1
)
# insert set members
for b in ["aou_array", "aou_wgs"]:
for i in range(1, 9):
self.data_generator.create_database_genomic_set_member(
participantId=i,
genomicSetId=1,
biobankId=i,
collectionTubeId=100 + i,
sampleId=10 + i,
genomeType=b,
)
# Set up ingested metrics data
events = ['gem.informing_loop.started',
'gem.informing_loop.screen8_no',
'gem.informing_loop.screen8_yes',
'hdr.informing_loop.started',
'gem.informing_loop.screen3',
'pgx.informing_loop.screen8_no',
'hdr.informing_loop.screen10_no']
for p in range(4):
for i in range(len(events)):
self.data_generator.create_database_genomic_user_event_metrics(
created=clock.CLOCK.now(),
modified=clock.CLOCK.now(),
participant_id=p + 1,
created_at=datetime.datetime(2021, 12, 29, 00) + datetime.timedelta(hours=i),
event_name=events[i],
run_id=1,
ignore_flag=0,
)
# Set up informing loop from message broker records
decisions = [None, 'no', 'yes']
for p in range(3):
for i in range(2):
self.data_generator.create_database_genomic_informing_loop(
message_record_id=i,
event_type='informing_loop_started' if i == 0 else 'informing_loop_decision',
module_type='gem',
participant_id=p + 1,
decision_value=decisions[i],
sample_id=100 + p,
event_authored_time=datetime.datetime(2021, 12, 29, 00) + datetime.timedelta(hours=i)
)
# Test for no message but yes user event
self.data_generator.create_database_genomic_user_event_metrics(
created=clock.CLOCK.now(),
modified=clock.CLOCK.now(),
participant_id=6,
created_at=datetime.datetime(2021, 12, 29, 00),
event_name='gem.informing_loop.screen8_yes',
run_id=1,
ignore_flag=0,
)
# Run reconcile job
genomic_pipeline.reconcile_informing_loop_responses()
# Test mismatched GEM data ingested correctly
pid_list = [1,
|
addf92a3d4060fa9464a802a4a4378cf9eeadde4
|
Python
| "<|begin_of_text|># -*- coding: utf-8 -*-\n# This file is auto-generated, don't edit it. Thanks.\nfr(...TRUNCATED) |
87baaf4a1b48fa248c65d26cc44e819a2ede1140
|
Python
| "<|begin_of_text|># Python library import\nimport asyncio, asyncssh, logging\n\n# Module logging log(...TRUNCATED) |
32e904a39d03d3166369420b49db0b9b118110a3
|
Python
| "<|begin_of_text|>import hashlib\nimport json\nimport logging\nimport os\nimport urllib.parse\nimpor(...TRUNCATED) |
1721bba2cae1e330bffeb9df05341df9522ff885
|
Python
| "<|begin_of_text|>import ROOT\nfrom PhysicsTools.NanoAODTools.postprocessing.framework.datamodel imp(...TRUNCATED) |
202670314ad28685aaa296dce4b5094daab3f47a
|
Python
| "<|begin_of_text|>#\n# PySNMP MIB module Nortel-MsCarrier-MscPassport-AtmEbrMIB (http://snmplabs.com(...TRUNCATED) |
b8e18877af990c533c642d4937354198a4676419
|
Python
| "<|begin_of_text|>\"\"\"autogenerated by genpy from arm_navigation_msgs/GetPlanningSceneRequest.msg.(...TRUNCATED) |
End of preview. Expand
in Data Studio
README.md exists but content is empty.
- Downloads last month
- 146